Compare commits
56 Commits
Author | SHA1 | Date |
---|---|---|
Roberto Cirillo | 6334bb4224 | |
Roberto Cirillo | d1f61f1693 | |
Roberto Cirillo | a1ae32f437 | |
Roberto Cirillo | cde6155c81 | |
lucio.lelii | 2b719a7633 | |
lucio.lelii | d42448591d | |
lucio.lelii | d9a6eb21be | |
user1 | 279535c13a | |
Giancarlo Panichi | 0a1de08b27 | |
Giancarlo Panichi | a6e006439e | |
Giancarlo Panichi | 806effbd6f | |
Giancarlo Panichi | 6621bb20d9 | |
Giancarlo Panichi | 45ecff015d | |
Giancarlo Panichi | a24ecf41cb | |
roberto cirillo | 577a564b4d | |
roberto cirillo | cc1cece20e | |
roberto cirillo | c573360e2a | |
roberto cirillo | fdffbd6063 | |
Giancarlo Panichi | 4d6318df92 | |
Giancarlo Panichi | 34083f1357 | |
Giancarlo Panichi | a5223ecb43 | |
Giancarlo Panichi | 08a56c02dd | |
Giancarlo Panichi | c2acc48494 | |
Giancarlo Panichi | 9360f1eaa5 | |
Giancarlo Panichi | a11cb3647f | |
Giancarlo Panichi | ea88169e41 | |
Giancarlo Panichi | 6b8d28873b | |
Giancarlo Panichi | f16dbd2f71 | |
Giancarlo Panichi | 81ddc263d1 | |
Giancarlo Panichi | 88bc383f3c | |
Giancarlo Panichi | 8d0d481e14 | |
Giancarlo Panichi | 9891b206a5 | |
Giancarlo Panichi | 790cff989c | |
Giancarlo Panichi | 4d653662ce | |
Giancarlo Panichi | 020c621a34 | |
Lucio Lelii | f47693f27a | |
Lucio Lelii | ed556a9960 | |
Lucio Lelii | 9d6794ffde | |
Lucio Lelii | 0adfa5b959 | |
Lucio Lelii | fb6c980623 | |
Giancarlo Panichi | fc5e616101 | |
Giancarlo Panichi | d3080c4052 | |
Giancarlo Panichi | 34d131b900 | |
Lucio Lelii | 60ccac1784 | |
Giancarlo Panichi | 26ad1e8cc9 | |
Lucio Lelii | bf7e31697d | |
Lucio Lelii | 3d509ae807 | |
Lucio Lelii | 9013721e12 | |
Lucio Lelii | 67fbb1f724 | |
Lucio Lelii | fdda6ce838 | |
Lucio Lelii | f6b3253459 | |
Lucio Lelii | 8e7edbb075 | |
Lucio Lelii | 30749c37e3 | |
Lucio Lelii | 908e7e57f4 | |
Lucio Lelii | 607f49125c | |
Lucio Lelii | c6676795ce |
|
@ -14,17 +14,20 @@
|
||||||
<classpathentry kind="con" path="org.eclipse.m2e.MAVEN2_CLASSPATH_CONTAINER">
|
<classpathentry kind="con" path="org.eclipse.m2e.MAVEN2_CLASSPATH_CONTAINER">
|
||||||
<attributes>
|
<attributes>
|
||||||
<attribute name="maven.pomderived" value="true"/>
|
<attribute name="maven.pomderived" value="true"/>
|
||||||
|
<attribute name="org.eclipse.jst.component.nondependency" value=""/>
|
||||||
</attributes>
|
</attributes>
|
||||||
</classpathentry>
|
</classpathentry>
|
||||||
<classpathentry kind="src" output="target/test-classes" path="src/test/java">
|
<classpathentry kind="src" output="target/test-classes" path="src/test/java">
|
||||||
<attributes>
|
<attributes>
|
||||||
<attribute name="optional" value="true"/>
|
<attribute name="optional" value="true"/>
|
||||||
<attribute name="maven.pomderived" value="true"/>
|
<attribute name="maven.pomderived" value="true"/>
|
||||||
|
<attribute name="test" value="true"/>
|
||||||
</attributes>
|
</attributes>
|
||||||
</classpathentry>
|
</classpathentry>
|
||||||
<classpathentry excluding="**" kind="src" output="target/test-classes" path="src/test/resources">
|
<classpathentry excluding="**" kind="src" output="target/test-classes" path="src/test/resources">
|
||||||
<attributes>
|
<attributes>
|
||||||
<attribute name="maven.pomderived" value="true"/>
|
<attribute name="maven.pomderived" value="true"/>
|
||||||
|
<attribute name="test" value="true"/>
|
||||||
</attributes>
|
</attributes>
|
||||||
</classpathentry>
|
</classpathentry>
|
||||||
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.8">
|
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.8">
|
||||||
|
|
|
@ -0,0 +1 @@
|
||||||
|
/target/
|
13
.project
13
.project
|
@ -5,11 +5,21 @@
|
||||||
<projects>
|
<projects>
|
||||||
</projects>
|
</projects>
|
||||||
<buildSpec>
|
<buildSpec>
|
||||||
|
<buildCommand>
|
||||||
|
<name>org.eclipse.wst.common.project.facet.core.builder</name>
|
||||||
|
<arguments>
|
||||||
|
</arguments>
|
||||||
|
</buildCommand>
|
||||||
<buildCommand>
|
<buildCommand>
|
||||||
<name>org.eclipse.jdt.core.javabuilder</name>
|
<name>org.eclipse.jdt.core.javabuilder</name>
|
||||||
<arguments>
|
<arguments>
|
||||||
</arguments>
|
</arguments>
|
||||||
</buildCommand>
|
</buildCommand>
|
||||||
|
<buildCommand>
|
||||||
|
<name>org.eclipse.wst.validation.validationbuilder</name>
|
||||||
|
<arguments>
|
||||||
|
</arguments>
|
||||||
|
</buildCommand>
|
||||||
<buildCommand>
|
<buildCommand>
|
||||||
<name>org.eclipse.m2e.core.maven2Builder</name>
|
<name>org.eclipse.m2e.core.maven2Builder</name>
|
||||||
<arguments>
|
<arguments>
|
||||||
|
@ -17,7 +27,10 @@
|
||||||
</buildCommand>
|
</buildCommand>
|
||||||
</buildSpec>
|
</buildSpec>
|
||||||
<natures>
|
<natures>
|
||||||
|
<nature>org.eclipse.jem.workbench.JavaEMFNature</nature>
|
||||||
|
<nature>org.eclipse.wst.common.modulecore.ModuleCoreNature</nature>
|
||||||
<nature>org.eclipse.m2e.core.maven2Nature</nature>
|
<nature>org.eclipse.m2e.core.maven2Nature</nature>
|
||||||
<nature>org.eclipse.jdt.core.javanature</nature>
|
<nature>org.eclipse.jdt.core.javanature</nature>
|
||||||
|
<nature>org.eclipse.wst.common.project.facet.core.nature</nature>
|
||||||
</natures>
|
</natures>
|
||||||
</projectDescription>
|
</projectDescription>
|
||||||
|
|
|
@ -7,6 +7,9 @@ org.eclipse.jdt.core.compiler.debug.lineNumber=generate
|
||||||
org.eclipse.jdt.core.compiler.debug.localVariable=generate
|
org.eclipse.jdt.core.compiler.debug.localVariable=generate
|
||||||
org.eclipse.jdt.core.compiler.debug.sourceFile=generate
|
org.eclipse.jdt.core.compiler.debug.sourceFile=generate
|
||||||
org.eclipse.jdt.core.compiler.problem.assertIdentifier=error
|
org.eclipse.jdt.core.compiler.problem.assertIdentifier=error
|
||||||
|
org.eclipse.jdt.core.compiler.problem.enablePreviewFeatures=disabled
|
||||||
org.eclipse.jdt.core.compiler.problem.enumIdentifier=error
|
org.eclipse.jdt.core.compiler.problem.enumIdentifier=error
|
||||||
org.eclipse.jdt.core.compiler.problem.forbiddenReference=warning
|
org.eclipse.jdt.core.compiler.problem.forbiddenReference=warning
|
||||||
|
org.eclipse.jdt.core.compiler.problem.reportPreviewFeatures=ignore
|
||||||
|
org.eclipse.jdt.core.compiler.release=disabled
|
||||||
org.eclipse.jdt.core.compiler.source=1.8
|
org.eclipse.jdt.core.compiler.source=1.8
|
||||||
|
|
|
@ -0,0 +1,6 @@
|
||||||
|
<?xml version="1.0" encoding="UTF-8"?><project-modules id="moduleCoreId" project-version="1.5.0">
|
||||||
|
<wb-module deploy-name="dataminer">
|
||||||
|
<wb-resource deploy-path="/" source-path="/src/main/java"/>
|
||||||
|
<wb-resource deploy-path="/" source-path="/src/main/resources"/>
|
||||||
|
</wb-module>
|
||||||
|
</project-modules>
|
|
@ -0,0 +1,5 @@
|
||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<faceted-project>
|
||||||
|
<installed facet="java" version="1.8"/>
|
||||||
|
<installed facet="jst.utility" version="1.0"/>
|
||||||
|
</faceted-project>
|
|
@ -0,0 +1,2 @@
|
||||||
|
disabled=06target
|
||||||
|
eclipse.preferences.version=1
|
|
@ -0,0 +1,71 @@
|
||||||
|
# Changelog
|
||||||
|
|
||||||
|
|
||||||
|
## [v1.7.0] - 2020-11-20
|
||||||
|
|
||||||
|
- import range modified to resolve old repositories invalid url
|
||||||
|
|
||||||
|
## [v1.6.0] - 2020-05-12
|
||||||
|
|
||||||
|
### Fixes
|
||||||
|
|
||||||
|
- Added storagehub retry in InputsManager class, getLocalFile method [#19253]
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
## [v1.5.9] - 2019-11-20
|
||||||
|
|
||||||
|
### Fixes
|
||||||
|
|
||||||
|
- Fixed Content-Type support for files in the results of computations [#18096]
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
## [v1.5.8] - 2019-10-01
|
||||||
|
|
||||||
|
### Fixes
|
||||||
|
|
||||||
|
- Fixed https link for output parameter [#17659]
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
## [v1.5.7] - 2019-03-01
|
||||||
|
|
||||||
|
### Features
|
||||||
|
|
||||||
|
- Updated https support [#13024]
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
## [v1.5.2] - 2017-12-13
|
||||||
|
|
||||||
|
### Fixes
|
||||||
|
|
||||||
|
- added the right extension on output file
|
||||||
|
- lock file created on execution
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
## [v1.5.1] - 2017-09-14
|
||||||
|
|
||||||
|
### Features
|
||||||
|
|
||||||
|
- added accounting on algorithm execution
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
## [v1.5.0] - 2017-07-31
|
||||||
|
|
||||||
|
### Features
|
||||||
|
|
||||||
|
- service interface classes moved to wps project
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
## [v1.1.0] - 2016-10-03
|
||||||
|
|
||||||
|
- First Release
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
This project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
|
@ -0,0 +1,26 @@
|
||||||
|
# Acknowledgments
|
||||||
|
|
||||||
|
The projects leading to this software have received funding from a series of European Union programmes including:
|
||||||
|
|
||||||
|
- the Sixth Framework Programme for Research and Technological Development
|
||||||
|
- [DILIGENT](https://cordis.europa.eu/project/id/004260) (grant no. 004260).
|
||||||
|
- the Seventh Framework Programme for research, technological development and demonstration
|
||||||
|
- [D4Science](https://cordis.europa.eu/project/id/212488) (grant no. 212488);
|
||||||
|
- [D4Science-II](https://cordis.europa.eu/project/id/239019) (grant no.239019);
|
||||||
|
- [ENVRI](https://cordis.europa.eu/project/id/283465) (grant no. 283465);
|
||||||
|
- [iMarine](https://cordis.europa.eu/project/id/283644) (grant no. 283644);
|
||||||
|
- [EUBrazilOpenBio](https://cordis.europa.eu/project/id/288754) (grant no. 288754).
|
||||||
|
- the H2020 research and innovation programme
|
||||||
|
- [SoBigData](https://cordis.europa.eu/project/id/654024) (grant no. 654024);
|
||||||
|
- [PARTHENOS](https://cordis.europa.eu/project/id/654119) (grant no. 654119);
|
||||||
|
- [EGI-Engage](https://cordis.europa.eu/project/id/654142) (grant no. 654142);
|
||||||
|
- [ENVRI PLUS](https://cordis.europa.eu/project/id/654182) (grant no. 654182);
|
||||||
|
- [BlueBRIDGE](https://cordis.europa.eu/project/id/675680) (grant no. 675680);
|
||||||
|
- [PerformFISH](https://cordis.europa.eu/project/id/727610) (grant no. 727610);
|
||||||
|
- [AGINFRA PLUS](https://cordis.europa.eu/project/id/731001) (grant no. 731001);
|
||||||
|
- [DESIRA](https://cordis.europa.eu/project/id/818194) (grant no. 818194);
|
||||||
|
- [ARIADNEplus](https://cordis.europa.eu/project/id/823914) (grant no. 823914);
|
||||||
|
- [RISIS 2](https://cordis.europa.eu/project/id/824091) (grant no. 824091);
|
||||||
|
- [EOSC-Pillar](https://cordis.europa.eu/project/id/857650) (grant no. 857650);
|
||||||
|
- [Blue Cloud](https://cordis.europa.eu/project/id/862409) (grant no. 862409);
|
||||||
|
- [SoBigData-PlusPlus](https://cordis.europa.eu/project/id/871042) (grant no. 871042);
|
|
@ -0,0 +1,311 @@
|
||||||
|
#European Union Public Licence V.1.1
|
||||||
|
|
||||||
|
##*EUPL © the European Community 2007*
|
||||||
|
|
||||||
|
|
||||||
|
This **European Union Public Licence** (the **“EUPL”**) applies to the Work or Software
|
||||||
|
(as defined below) which is provided under the terms of this Licence. Any use of
|
||||||
|
the Work, other than as authorised under this Licence is prohibited (to the
|
||||||
|
extent such use is covered by a right of the copyright holder of the Work).
|
||||||
|
|
||||||
|
The Original Work is provided under the terms of this Licence when the Licensor
|
||||||
|
(as defined below) has placed the following notice immediately following the
|
||||||
|
copyright notice for the Original Work:
|
||||||
|
|
||||||
|
**Licensed under the EUPL V.1.1**
|
||||||
|
|
||||||
|
or has expressed by any other mean his willingness to license under the EUPL.
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
##1. Definitions
|
||||||
|
|
||||||
|
In this Licence, the following terms have the following meaning:
|
||||||
|
|
||||||
|
- The Licence: this Licence.
|
||||||
|
|
||||||
|
- The Original Work or the Software: the software distributed and/or
|
||||||
|
communicated by the Licensor under this Licence, available as Source Code and
|
||||||
|
also as Executable Code as the case may be.
|
||||||
|
|
||||||
|
- Derivative Works: the works or software that could be created by the Licensee,
|
||||||
|
based upon the Original Work or modifications thereof. This Licence does not
|
||||||
|
define the extent of modification or dependence on the Original Work required
|
||||||
|
in order to classify a work as a Derivative Work; this extent is determined by
|
||||||
|
copyright law applicable in the country mentioned in Article 15.
|
||||||
|
|
||||||
|
- The Work: the Original Work and/or its Derivative Works.
|
||||||
|
|
||||||
|
- The Source Code: the human-readable form of the Work which is the most
|
||||||
|
convenient for people to study and modify.
|
||||||
|
|
||||||
|
- The Executable Code: any code which has generally been compiled and which is
|
||||||
|
meant to be interpreted by a computer as a program.
|
||||||
|
|
||||||
|
- The Licensor: the natural or legal person that distributes and/or communicates
|
||||||
|
the Work under the Licence.
|
||||||
|
|
||||||
|
- Contributor(s): any natural or legal person who modifies the Work under the
|
||||||
|
Licence, or otherwise contributes to the creation of a Derivative Work.
|
||||||
|
|
||||||
|
- The Licensee or “You”: any natural or legal person who makes any usage of the
|
||||||
|
Software under the terms of the Licence.
|
||||||
|
|
||||||
|
- Distribution and/or Communication: any act of selling, giving, lending,
|
||||||
|
renting, distributing, communicating, transmitting, or otherwise making
|
||||||
|
available, on-line or off-line, copies of the Work or providing access to its
|
||||||
|
essential functionalities at the disposal of any other natural or legal
|
||||||
|
person.
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
##2. Scope of the rights granted by the Licence
|
||||||
|
|
||||||
|
The Licensor hereby grants You a world-wide, royalty-free, non-exclusive,
|
||||||
|
sub-licensable licence to do the following, for the duration of copyright vested
|
||||||
|
in the Original Work:
|
||||||
|
|
||||||
|
- use the Work in any circumstance and for all usage, reproduce the Work, modify
|
||||||
|
- the Original Work, and make Derivative Works based upon the Work, communicate
|
||||||
|
- to the public, including the right to make available or display the Work or
|
||||||
|
- copies thereof to the public and perform publicly, as the case may be, the
|
||||||
|
- Work, distribute the Work or copies thereof, lend and rent the Work or copies
|
||||||
|
- thereof, sub-license rights in the Work or copies thereof.
|
||||||
|
|
||||||
|
Those rights can be exercised on any media, supports and formats, whether now
|
||||||
|
known or later invented, as far as the applicable law permits so.
|
||||||
|
|
||||||
|
In the countries where moral rights apply, the Licensor waives his right to
|
||||||
|
exercise his moral right to the extent allowed by law in order to make effective
|
||||||
|
the licence of the economic rights here above listed.
|
||||||
|
|
||||||
|
The Licensor grants to the Licensee royalty-free, non exclusive usage rights to
|
||||||
|
any patents held by the Licensor, to the extent necessary to make use of the
|
||||||
|
rights granted on the Work under this Licence.
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
##3. Communication of the Source Code
|
||||||
|
|
||||||
|
The Licensor may provide the Work either in its Source Code form, or as
|
||||||
|
Executable Code. If the Work is provided as Executable Code, the Licensor
|
||||||
|
provides in addition a machine-readable copy of the Source Code of the Work
|
||||||
|
along with each copy of the Work that the Licensor distributes or indicates, in
|
||||||
|
a notice following the copyright notice attached to the Work, a repository where
|
||||||
|
the Source Code is easily and freely accessible for as long as the Licensor
|
||||||
|
continues to distribute and/or communicate the Work.
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
##4. Limitations on copyright
|
||||||
|
|
||||||
|
Nothing in this Licence is intended to deprive the Licensee of the benefits from
|
||||||
|
any exception or limitation to the exclusive rights of the rights owners in the
|
||||||
|
Original Work or Software, of the exhaustion of those rights or of other
|
||||||
|
applicable limitations thereto.
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
##5. Obligations of the Licensee
|
||||||
|
|
||||||
|
The grant of the rights mentioned above is subject to some restrictions and
|
||||||
|
obligations imposed on the Licensee. Those obligations are the following:
|
||||||
|
|
||||||
|
Attribution right: the Licensee shall keep intact all copyright, patent or
|
||||||
|
trademarks notices and all notices that refer to the Licence and to the
|
||||||
|
disclaimer of warranties. The Licensee must include a copy of such notices and a
|
||||||
|
copy of the Licence with every copy of the Work he/she distributes and/or
|
||||||
|
communicates. The Licensee must cause any Derivative Work to carry prominent
|
||||||
|
notices stating that the Work has been modified and the date of modification.
|
||||||
|
|
||||||
|
Copyleft clause: If the Licensee distributes and/or communicates copies of the
|
||||||
|
Original Works or Derivative Works based upon the Original Work, this
|
||||||
|
Distribution and/or Communication will be done under the terms of this Licence
|
||||||
|
or of a later version of this Licence unless the Original Work is expressly
|
||||||
|
distributed only under this version of the Licence. The Licensee (becoming
|
||||||
|
Licensor) cannot offer or impose any additional terms or conditions on the Work
|
||||||
|
or Derivative Work that alter or restrict the terms of the Licence.
|
||||||
|
|
||||||
|
Compatibility clause: If the Licensee Distributes and/or Communicates Derivative
|
||||||
|
Works or copies thereof based upon both the Original Work and another work
|
||||||
|
licensed under a Compatible Licence, this Distribution and/or Communication can
|
||||||
|
be done under the terms of this Compatible Licence. For the sake of this clause,
|
||||||
|
“Compatible Licence” refers to the licences listed in the appendix attached to
|
||||||
|
this Licence. Should the Licensee’s obligations under the Compatible Licence
|
||||||
|
conflict with his/her obligations under this Licence, the obligations of the
|
||||||
|
Compatible Licence shall prevail.
|
||||||
|
|
||||||
|
Provision of Source Code: When distributing and/or communicating copies of the
|
||||||
|
Work, the Licensee will provide a machine-readable copy of the Source Code or
|
||||||
|
indicate a repository where this Source will be easily and freely available for
|
||||||
|
as long as the Licensee continues to distribute and/or communicate the Work.
|
||||||
|
|
||||||
|
Legal Protection: This Licence does not grant permission to use the trade names,
|
||||||
|
trademarks, service marks, or names of the Licensor, except as required for
|
||||||
|
reasonable and customary use in describing the origin of the Work and
|
||||||
|
reproducing the content of the copyright notice.
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
##6. Chain of Authorship
|
||||||
|
|
||||||
|
The original Licensor warrants that the copyright in the Original Work granted
|
||||||
|
hereunder is owned by him/her or licensed to him/her and that he/she has the
|
||||||
|
power and authority to grant the Licence.
|
||||||
|
|
||||||
|
Each Contributor warrants that the copyright in the modifications he/she brings
|
||||||
|
to the Work are owned by him/her or licensed to him/her and that he/she has the
|
||||||
|
power and authority to grant the Licence.
|
||||||
|
|
||||||
|
Each time You accept the Licence, the original Licensor and subsequent
|
||||||
|
Contributors grant You a licence to their contributions to the Work, under the
|
||||||
|
terms of this Licence.
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
##7. Disclaimer of Warranty
|
||||||
|
|
||||||
|
The Work is a work in progress, which is continuously improved by numerous
|
||||||
|
contributors. It is not a finished work and may therefore contain defects or
|
||||||
|
“bugs” inherent to this type of software development.
|
||||||
|
|
||||||
|
For the above reason, the Work is provided under the Licence on an “as is” basis
|
||||||
|
and without warranties of any kind concerning the Work, including without
|
||||||
|
limitation merchantability, fitness for a particular purpose, absence of defects
|
||||||
|
or errors, accuracy, non-infringement of intellectual property rights other than
|
||||||
|
copyright as stated in Article 6 of this Licence.
|
||||||
|
|
||||||
|
This disclaimer of warranty is an essential part of the Licence and a condition
|
||||||
|
for the grant of any rights to the Work.
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
##8. Disclaimer of Liability
|
||||||
|
|
||||||
|
Except in the cases of wilful misconduct or damages directly caused to natural
|
||||||
|
persons, the Licensor will in no event be liable for any direct or indirect,
|
||||||
|
material or moral, damages of any kind, arising out of the Licence or of the use
|
||||||
|
of the Work, including without limitation, damages for loss of goodwill, work
|
||||||
|
stoppage, computer failure or malfunction, loss of data or any commercial
|
||||||
|
damage, even if the Licensor has been advised of the possibility of such
|
||||||
|
damage. However, the Licensor will be liable under statutory product liability
|
||||||
|
laws as far such laws apply to the Work.
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
##9. Additional agreements
|
||||||
|
|
||||||
|
While distributing the Original Work or Derivative Works, You may choose to
|
||||||
|
conclude an additional agreement to offer, and charge a fee for, acceptance of
|
||||||
|
support, warranty, indemnity, or other liability obligations and/or services
|
||||||
|
consistent with this Licence. However, in accepting such obligations, You may
|
||||||
|
act only on your own behalf and on your sole responsibility, not on behalf of
|
||||||
|
the original Licensor or any other Contributor, and only if You agree to
|
||||||
|
indemnify, defend, and hold each Contributor harmless for any liability incurred
|
||||||
|
by, or claims asserted against such Contributor by the fact You have accepted
|
||||||
|
any such warranty or additional liability.
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
##10. Acceptance of the Licence
|
||||||
|
|
||||||
|
The provisions of this Licence can be accepted by clicking on an icon “I agree”
|
||||||
|
placed under the bottom of a window displaying the text of this Licence or by
|
||||||
|
affirming consent in any other similar way, in accordance with the rules of
|
||||||
|
applicable law. Clicking on that icon indicates your clear and irrevocable
|
||||||
|
acceptance of this Licence and all of its terms and conditions.
|
||||||
|
|
||||||
|
Similarly, you irrevocably accept this Licence and all of its terms and
|
||||||
|
conditions by exercising any rights granted to You by Article 2 of this Licence,
|
||||||
|
such as the use of the Work, the creation by You of a Derivative Work or the
|
||||||
|
Distribution and/or Communication by You of the Work or copies thereof.
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
##11. Information to the public
|
||||||
|
|
||||||
|
In case of any Distribution and/or Communication of the Work by means of
|
||||||
|
electronic communication by You (for example, by offering to download the Work
|
||||||
|
from a remote location) the distribution channel or media (for example, a
|
||||||
|
website) must at least provide to the public the information requested by the
|
||||||
|
applicable law regarding the Licensor, the Licence and the way it may be
|
||||||
|
accessible, concluded, stored and reproduced by the Licensee.
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
##12. Termination of the Licence
|
||||||
|
|
||||||
|
The Licence and the rights granted hereunder will terminate automatically upon
|
||||||
|
any breach by the Licensee of the terms of the Licence.
|
||||||
|
|
||||||
|
Such a termination will not terminate the licences of any person who has
|
||||||
|
received the Work from the Licensee under the Licence, provided such persons
|
||||||
|
remain in full compliance with the Licence.
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
##13. Miscellaneous
|
||||||
|
|
||||||
|
Without prejudice of Article 9 above, the Licence represents the complete
|
||||||
|
agreement between the Parties as to the Work licensed hereunder.
|
||||||
|
|
||||||
|
If any provision of the Licence is invalid or unenforceable under applicable
|
||||||
|
law, this will not affect the validity or enforceability of the Licence as a
|
||||||
|
whole. Such provision will be construed and/or reformed so as necessary to make
|
||||||
|
it valid and enforceable.
|
||||||
|
|
||||||
|
The European Commission may publish other linguistic versions and/or new
|
||||||
|
versions of this Licence, so far this is required and reasonable, without
|
||||||
|
reducing the scope of the rights granted by the Licence. New versions of the
|
||||||
|
Licence will be published with a unique version number.
|
||||||
|
|
||||||
|
All linguistic versions of this Licence, approved by the European Commission,
|
||||||
|
have identical value. Parties can take advantage of the linguistic version of
|
||||||
|
their choice.
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
##14. Jurisdiction
|
||||||
|
|
||||||
|
Any litigation resulting from the interpretation of this License, arising
|
||||||
|
between the European Commission, as a Licensor, and any Licensee, will be
|
||||||
|
subject to the jurisdiction of the Court of Justice of the European Communities,
|
||||||
|
as laid down in article 238 of the Treaty establishing the European Community.
|
||||||
|
|
||||||
|
Any litigation arising between Parties, other than the European Commission, and
|
||||||
|
resulting from the interpretation of this License, will be subject to the
|
||||||
|
exclusive jurisdiction of the competent court where the Licensor resides or
|
||||||
|
conducts its primary business.
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
##15. Applicable Law
|
||||||
|
|
||||||
|
This Licence shall be governed by the law of the European Union country where
|
||||||
|
the Licensor resides or has his registered office.
|
||||||
|
|
||||||
|
This licence shall be governed by the Belgian law if:
|
||||||
|
|
||||||
|
- a litigation arises between the European Commission, as a Licensor, and any
|
||||||
|
- Licensee; the Licensor, other than the European Commission, has no residence
|
||||||
|
- or registered office inside a European Union country.
|
||||||
|
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
|
||||||
|
##Appendix
|
||||||
|
|
||||||
|
|
||||||
|
**“Compatible Licences”** according to article 5 EUPL are:
|
||||||
|
|
||||||
|
|
||||||
|
- GNU General Public License (GNU GPL) v. 2
|
||||||
|
|
||||||
|
- Open Software License (OSL) v. 2.1, v. 3.0
|
||||||
|
|
||||||
|
- Common Public License v. 1.0
|
||||||
|
|
||||||
|
- Eclipse Public License v. 1.0
|
||||||
|
|
||||||
|
- Cecill v. 2.0
|
|
@ -0,0 +1,48 @@
|
||||||
|
# DataMiner
|
||||||
|
|
||||||
|
DataMiner is a library for integrate WPS 52North in the D4Science Infrastructure.
|
||||||
|
|
||||||
|
## Structure of the project
|
||||||
|
|
||||||
|
* The source code is present in the src folder.
|
||||||
|
|
||||||
|
## Built With
|
||||||
|
|
||||||
|
* [OpenJDK](https://openjdk.java.net/) - The JDK used
|
||||||
|
* [Maven](https://maven.apache.org/) - Dependency Management
|
||||||
|
|
||||||
|
## Documentation
|
||||||
|
|
||||||
|
* Use of this library is described on [Wiki](https://wiki.gcube-system.org/gcube/DataMiner_Installation).
|
||||||
|
|
||||||
|
## Change log
|
||||||
|
|
||||||
|
See [Releases](https://code-repo.d4science.org/gCubeSystem/dataminer/releases).
|
||||||
|
|
||||||
|
## Authors
|
||||||
|
|
||||||
|
* **Gianpaolo Coro** ([ORCID]()) - [ISTI-CNR Infrascience Group](http://nemis.isti.cnr.it/groups/infrascience)
|
||||||
|
* **Lucio Lelii** ([ORCID]()) - [ISTI-CNR Infrascience Group](http://nemis.isti.cnr.it/groups/infrascience)
|
||||||
|
* **Giancarlo Panichi** ([ORCID](http://orcid.org/0000-0001-8375-6644)) - [ISTI-CNR Infrascience Group](http://nemis.isti.cnr.it/groups/infrascience)
|
||||||
|
|
||||||
|
|
||||||
|
## License
|
||||||
|
|
||||||
|
This project is licensed under the EUPL V.1.1 License - see the [LICENSE.md](LICENSE.md) file for details.
|
||||||
|
|
||||||
|
|
||||||
|
## About the gCube Framework
|
||||||
|
This software is part of the [gCubeFramework](https://www.gcube-system.org/ "gCubeFramework"): an
|
||||||
|
open-source software toolkit used for building and operating Hybrid Data
|
||||||
|
Infrastructures enabling the dynamic deployment of Virtual Research Environments
|
||||||
|
by favouring the realisation of reuse oriented policies.
|
||||||
|
|
||||||
|
The projects leading to this software have received funding from a series of European Union programmes including:
|
||||||
|
|
||||||
|
- the Sixth Framework Programme for Research and Technological Development
|
||||||
|
- DILIGENT (grant no. 004260);
|
||||||
|
- the Seventh Framework Programme for research, technological development and demonstration
|
||||||
|
- D4Science (grant no. 212488), D4Science-II (grant no.239019), ENVRI (grant no. 283465), EUBrazilOpenBio (grant no. 288754), iMarine(grant no. 283644);
|
||||||
|
- the H2020 research and innovation programme
|
||||||
|
- BlueBRIDGE (grant no. 675680), EGIEngage (grant no. 654142), ENVRIplus (grant no. 654182), Parthenos (grant no. 654119), SoBigData (grant no. 654024),DESIRA (grant no. 818194), ARIADNEplus (grant no. 823914), RISIS2 (grant no. 824091), PerformFish (grant no. 727610), AGINFRAplus (grant no. 731001);
|
||||||
|
|
|
@ -1 +0,0 @@
|
||||||
${gcube.license}
|
|
|
@ -1,69 +0,0 @@
|
||||||
The gCube System - ${name}
|
|
||||||
--------------------------------------------------
|
|
||||||
|
|
||||||
${description}
|
|
||||||
|
|
||||||
|
|
||||||
${gcube.description}
|
|
||||||
|
|
||||||
${gcube.funding}
|
|
||||||
|
|
||||||
|
|
||||||
Version
|
|
||||||
--------------------------------------------------
|
|
||||||
|
|
||||||
${version} (${buildDate})
|
|
||||||
|
|
||||||
Please see the file named "changelog.xml" in this directory for the release notes.
|
|
||||||
|
|
||||||
|
|
||||||
Authors
|
|
||||||
--------------------------------------------------
|
|
||||||
|
|
||||||
* Gianpaolo Coro (gianpaolo.coro-AT-isti.cnr.it),
|
|
||||||
Istituto di Scienza e Tecnologie dell'Informazione "A. Faedo" CNR, Pisa IT
|
|
||||||
|
|
||||||
|
|
||||||
Maintainers
|
|
||||||
-----------
|
|
||||||
|
|
||||||
* Gianpaolo Coro (gianpaolo.coro-AT-isti.cnr.it),
|
|
||||||
Istituto di Scienza e Tecnologie dell'Informazione "A. Faedo" CNR, Pisa IT
|
|
||||||
|
|
||||||
|
|
||||||
Download information
|
|
||||||
--------------------------------------------------
|
|
||||||
|
|
||||||
Source code is available from SVN:
|
|
||||||
${scm.url}
|
|
||||||
|
|
||||||
Binaries can be downloaded from the gCube website:
|
|
||||||
${gcube.website}
|
|
||||||
|
|
||||||
|
|
||||||
Installation
|
|
||||||
--------------------------------------------------
|
|
||||||
|
|
||||||
Installation documentation is available on-line in the gCube Wiki:
|
|
||||||
https://wiki.gcube-system.org/gcube/DataMiner_Installation
|
|
||||||
|
|
||||||
|
|
||||||
Documentation
|
|
||||||
--------------------------------------------------
|
|
||||||
|
|
||||||
Documentation is available on-line in the gCube Wiki:
|
|
||||||
https://wiki.gcube-system.org/gcube/DataMiner_Installation
|
|
||||||
|
|
||||||
|
|
||||||
Support
|
|
||||||
--------------------------------------------------
|
|
||||||
|
|
||||||
Bugs and support requests can be reported in the gCube issue tracking tool:
|
|
||||||
${gcube.issueTracking}
|
|
||||||
|
|
||||||
|
|
||||||
Licensing
|
|
||||||
--------------------------------------------------
|
|
||||||
|
|
||||||
This software is licensed under the terms you may find in the file named "LICENSE" in this directory.
|
|
||||||
|
|
|
@ -1,15 +0,0 @@
|
||||||
<ReleaseNotes>
|
|
||||||
<Changeset component="${groupId}.${artifactId}.1-5-2" date="2017-12-13">
|
|
||||||
<Change>added the right extension on output file</Change>
|
|
||||||
<Change>lock file created on execution</Change>
|
|
||||||
</Changeset>
|
|
||||||
<Changeset component="${groupId}.${artifactId}.1-5-1" date="2017-09-14">
|
|
||||||
<Change>added accounting on algorithm execution</Change>
|
|
||||||
</Changeset>
|
|
||||||
<Changeset component="${groupId}.${artifactId}.1-5-0" date="2017-07-31">
|
|
||||||
<Change>service interface classes moved to wps project</Change>
|
|
||||||
</Changeset>
|
|
||||||
<Changeset component="${groupId}.${artifactId}.1-1-0" date="2016-10-03">
|
|
||||||
<Change>First Release</Change>
|
|
||||||
</Changeset>
|
|
||||||
</ReleaseNotes>
|
|
|
@ -1,32 +0,0 @@
|
||||||
<assembly
|
|
||||||
xmlns="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.0"
|
|
||||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
|
||||||
xsi:schemaLocation="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.0 http://maven.apache.org/xsd/assembly-1.1.0.xsd">
|
|
||||||
<id>servicearchive</id>
|
|
||||||
<formats>
|
|
||||||
<format>tar.gz</format>
|
|
||||||
</formats>
|
|
||||||
<baseDirectory>/</baseDirectory>
|
|
||||||
<fileSets>
|
|
||||||
<fileSet>
|
|
||||||
<directory>${distroDirectory}</directory>
|
|
||||||
<outputDirectory>/</outputDirectory>
|
|
||||||
<useDefaultExcludes>true</useDefaultExcludes>
|
|
||||||
<includes>
|
|
||||||
<include>README</include>
|
|
||||||
<include>LICENSE</include>
|
|
||||||
<include>changelog.xml</include>
|
|
||||||
<include>profile.xml</include>
|
|
||||||
</includes>
|
|
||||||
<fileMode>755</fileMode>
|
|
||||||
<filtered>true</filtered>
|
|
||||||
</fileSet>
|
|
||||||
</fileSets>
|
|
||||||
<files>
|
|
||||||
<file>
|
|
||||||
<source>target/${build.finalName}.${project.packaging}</source>
|
|
||||||
<outputDirectory>/${artifactId}</outputDirectory>
|
|
||||||
</file>
|
|
||||||
|
|
||||||
</files>
|
|
||||||
</assembly>
|
|
|
@ -1,30 +0,0 @@
|
||||||
<?xml version="1.0" encoding="UTF-8"?>
|
|
||||||
<Resource>
|
|
||||||
<ID></ID>
|
|
||||||
<Type>Service</Type>
|
|
||||||
<Profile>
|
|
||||||
<Description>${project.description}</Description>
|
|
||||||
<Class>DataAnalysis</Class>
|
|
||||||
<Name>${project.name}</Name>
|
|
||||||
<Version>1.0.0</Version>
|
|
||||||
<Packages>
|
|
||||||
<Software>
|
|
||||||
<Name>${project.name}</Name>
|
|
||||||
<Description>${project.description}</Description>
|
|
||||||
<Version>${version}</Version>
|
|
||||||
<MavenCoordinates>
|
|
||||||
<groupId>${project.groupId}</groupId>
|
|
||||||
<artifactId>${project.artifactId}</artifactId>
|
|
||||||
<version>${project.version}</version>
|
|
||||||
</MavenCoordinates>
|
|
||||||
<Type>Service</Type>
|
|
||||||
<Files>
|
|
||||||
<File>${project.build.finalName}.${project.packaging}</File>
|
|
||||||
</Files>
|
|
||||||
</Software>
|
|
||||||
</Packages>
|
|
||||||
</Profile>
|
|
||||||
</Resource>
|
|
||||||
|
|
||||||
|
|
||||||
|
|
113
pom.xml
113
pom.xml
|
@ -4,16 +4,19 @@
|
||||||
<parent>
|
<parent>
|
||||||
<artifactId>maven-parent</artifactId>
|
<artifactId>maven-parent</artifactId>
|
||||||
<groupId>org.gcube.tools</groupId>
|
<groupId>org.gcube.tools</groupId>
|
||||||
<version>1.0.0</version>
|
<version>1.1.0</version>
|
||||||
<relativePath />
|
<relativePath />
|
||||||
</parent>
|
</parent>
|
||||||
<groupId>org.gcube.dataanalysis</groupId>
|
<groupId>org.gcube.dataanalysis</groupId>
|
||||||
<artifactId>dataminer</artifactId>
|
<artifactId>dataminer</artifactId>
|
||||||
<version>1.5.3-SNAPSHOT</version>
|
<version>1.7.0</version>
|
||||||
<name>dataminer</name>
|
<name>dataminer</name>
|
||||||
<description>An e-Infrastructure service providing state-of-the art DataMining algorithms and ecological modelling approaches under the Web Processing Service (WPS) standard.</description>
|
<description>An e-Infrastructure service providing state-of-the art DataMining algorithms and ecological modelling approaches under the Web Processing Service (WPS) standard.</description>
|
||||||
|
|
||||||
<scm>
|
<scm>
|
||||||
<url>https://svn.d4science.research-infrastructures.eu/gcube/trunk/data-analysis/DataMiner</url>
|
<connection>scm:git:https://code-repo.d4science.org/gCubeSystem/${project.artifactId}.git</connection>
|
||||||
|
<developerConnection>scm:git:https://code-repo.d4science.org/gCubeSystem/${project.artifactId}.git</developerConnection>
|
||||||
|
<url>https://code-repo.d4science.org/gCubeSystem/${project.artifactId}</url>
|
||||||
</scm>
|
</scm>
|
||||||
|
|
||||||
<developers>
|
<developers>
|
||||||
|
@ -26,13 +29,40 @@
|
||||||
<role>developer</role>
|
<role>developer</role>
|
||||||
</roles>
|
</roles>
|
||||||
</developer>
|
</developer>
|
||||||
|
<developer>
|
||||||
|
<name>Lucio Lelii</name>
|
||||||
|
<email>lucio.lelii@isti.cnr.it</email>
|
||||||
|
<organization>CNR Pisa, Istituto di Scienza e Tecnologie dell'Informazione "A. Faedo"</organization>
|
||||||
|
<roles>
|
||||||
|
<role>architect</role>
|
||||||
|
<role>developer</role>
|
||||||
|
</roles>
|
||||||
|
</developer>
|
||||||
</developers>
|
</developers>
|
||||||
|
|
||||||
<properties>
|
<properties>
|
||||||
<webappDirectory>${project.build.directory}/${project.build.finalName}</webappDirectory>
|
<webappDirectory>${project.build.directory}/${project.build.finalName}</webappDirectory>
|
||||||
<distroDirectory>distro</distroDirectory>
|
<distroDirectory>distro</distroDirectory>
|
||||||
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
|
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
|
||||||
<project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
|
<project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
|
||||||
</properties>
|
</properties>
|
||||||
|
|
||||||
|
<dependencyManagement>
|
||||||
|
<!-- Old solution <dependencies> <dependency> <groupId>org.gcube.distribution</groupId>
|
||||||
|
<artifactId>maven-smartgears-bom</artifactId> <version>2.1.0</version> <type>pom</type>
|
||||||
|
<scope>import</scope> </dependency> </dependencies> -->
|
||||||
|
<dependencies>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.gcube.distribution</groupId>
|
||||||
|
<artifactId>gcube-bom</artifactId>
|
||||||
|
<version>2.0.0</version>
|
||||||
|
<type>pom</type>
|
||||||
|
<scope>import</scope>
|
||||||
|
</dependency>
|
||||||
|
</dependencies>
|
||||||
|
</dependencyManagement>
|
||||||
|
|
||||||
|
|
||||||
<dependencies>
|
<dependencies>
|
||||||
<!-- https://mvnrepository.com/artifact/org.reflections/reflections-maven -->
|
<!-- https://mvnrepository.com/artifact/org.reflections/reflections-maven -->
|
||||||
<dependency>
|
<dependency>
|
||||||
|
@ -61,27 +91,22 @@
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.gcube.dataanalysis</groupId>
|
<groupId>org.gcube.dataanalysis</groupId>
|
||||||
<artifactId>52n-wps-algorithm-gcube</artifactId>
|
<artifactId>52n-wps-algorithm-gcube</artifactId>
|
||||||
<version>[3.6.1-SNAPSHOT,3.7.0-SNAPSHOT)</version>
|
<version>[3.6.1,3.7.0)</version>
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.gcube.dataanalysis</groupId>
|
<groupId>org.gcube.dataanalysis</groupId>
|
||||||
<artifactId>52n-wps-server-gcube</artifactId>
|
<artifactId>52n-wps-server-gcube</artifactId>
|
||||||
<version>[3.6.1-SNAPSHOT, 3.7.0-SNAPSHOT)</version>
|
<version>[3.6.1, 3.7.0)</version>
|
||||||
</dependency>
|
|
||||||
<dependency>
|
|
||||||
<groupId>org.gcube.dataanalysis</groupId>
|
|
||||||
<artifactId>ecological-engine</artifactId>
|
|
||||||
<version>[1.8.5-SNAPSHOT,2.0.0-SNAPSHOT)</version>
|
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.gcube.dataanalysis</groupId>
|
<groupId>org.gcube.dataanalysis</groupId>
|
||||||
<artifactId>ecological-engine-wps-extension</artifactId>
|
<artifactId>ecological-engine-wps-extension</artifactId>
|
||||||
<version>[1.0.2-SNAPSHOT,2.0.0-SNAPSHOT)</version>
|
<version>[1.0.5,2.0.0-SNAPSHOT)</version>
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.gcube.dataanalysis</groupId>
|
<groupId>org.gcube.dataanalysis</groupId>
|
||||||
<artifactId>ecological-engine-geospatial-extensions</artifactId>
|
<artifactId>ecological-engine-geospatial-extensions</artifactId>
|
||||||
<version>[1.3.2-SNAPSHOT,2.0.0-SNAPSHOT)</version>
|
<version>[1.5.2-SNAPSHOT,2.0.0-SNAPSHOT)</version>
|
||||||
<exclusions>
|
<exclusions>
|
||||||
<exclusion>
|
<exclusion>
|
||||||
<artifactId>log4j</artifactId>
|
<artifactId>log4j</artifactId>
|
||||||
|
@ -92,12 +117,22 @@
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.gcube.dataanalysis</groupId>
|
<groupId>org.gcube.dataanalysis</groupId>
|
||||||
<artifactId>ecological-engine-external-algorithms</artifactId>
|
<artifactId>ecological-engine-external-algorithms</artifactId>
|
||||||
<version>[1.1.5-SNAPSHOT,2.0.0-SNAPSHOT)</version>
|
<version>[1.2.2-SNAPSHOT,2.0.0-SNAPSHOT)</version>
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.gcube.dataanalysis</groupId>
|
<groupId>org.gcube.dataanalysis</groupId>
|
||||||
<artifactId>ecological-engine-smart-executor</artifactId>
|
<artifactId>ecological-engine-smart-executor</artifactId>
|
||||||
<version>[1.0.0-SNAPSHOT,2.0.0-SNAPSHOT)</version>
|
<version>[1.6.5-SNAPSHOT,2.0.0-SNAPSHOT)</version>
|
||||||
|
<exclusions>
|
||||||
|
<exclusion>
|
||||||
|
<groupId>org.glassfish.jersey.core</groupId>
|
||||||
|
<artifactId>jersey-client</artifactId>
|
||||||
|
</exclusion>
|
||||||
|
<exclusion>
|
||||||
|
<groupId>org.glassfish.jersey.media</groupId>
|
||||||
|
<artifactId>jersey-media-json-jackson</artifactId>
|
||||||
|
</exclusion>
|
||||||
|
</exclusions>
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.slf4j</groupId>
|
<groupId>org.slf4j</groupId>
|
||||||
|
@ -116,24 +151,23 @@
|
||||||
<version>4.11</version>
|
<version>4.11</version>
|
||||||
<scope>test</scope>
|
<scope>test</scope>
|
||||||
</dependency>
|
</dependency>
|
||||||
<!-- <dependency> <groupId>org.gcube.common</groupId> <artifactId>common-authorization</artifactId>
|
|
||||||
</dependency> <dependency> <groupId>org.gcube.core</groupId> <artifactId>common-scope</artifactId>
|
|
||||||
</dependency> -->
|
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>javassist</groupId>
|
<groupId>javassist</groupId>
|
||||||
<artifactId>javassist</artifactId>
|
<artifactId>javassist</artifactId>
|
||||||
<version>3.12.1.GA</version>
|
<version>3.12.1.GA</version>
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
|
||||||
<groupId>org.gcube.common</groupId>
|
|
||||||
<artifactId>home-library-jcr</artifactId>
|
|
||||||
<version>[2.0.0-SNAPSHOT,3.0.0-SNAPSHOT)</version>
|
|
||||||
</dependency>
|
|
||||||
|
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.gcube.common</groupId>
|
<groupId>org.gcube.common</groupId>
|
||||||
<artifactId>home-library</artifactId>
|
<artifactId>storagehub-client-library</artifactId>
|
||||||
<version>[2.0.0-SNAPSHOT,3.0.0-SNAPSHOT)</version>
|
<version>[1.0.0, 2.0.0-SNAPSHOT)</version>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.gcube.common</groupId>
|
||||||
|
<artifactId>storagehub-model</artifactId>
|
||||||
|
<version>[1.0.0, 2.0.0-SNAPSHOT)</version>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
|
||||||
<dependency>
|
<dependency>
|
||||||
|
@ -145,7 +179,6 @@
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.gcube.accounting</groupId>
|
<groupId>org.gcube.accounting</groupId>
|
||||||
<artifactId>accounting-lib</artifactId>
|
<artifactId>accounting-lib</artifactId>
|
||||||
<version>[3.0.0-SNAPSHOT,4.0.0-SNAPSHOT)</version>
|
|
||||||
<scope>provided</scope>
|
<scope>provided</scope>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
|
||||||
|
@ -154,44 +187,14 @@
|
||||||
<build>
|
<build>
|
||||||
<plugins>
|
<plugins>
|
||||||
|
|
||||||
|
|
||||||
<plugin>
|
|
||||||
<artifactId>maven-compiler-plugin</artifactId>
|
|
||||||
<version>3.1</version>
|
|
||||||
<configuration>
|
|
||||||
<source>1.8</source>
|
|
||||||
<target>1.8</target>
|
|
||||||
</configuration>
|
|
||||||
</plugin>
|
|
||||||
<plugin>
|
<plugin>
|
||||||
<groupId>org.apache.maven.plugins</groupId>
|
<groupId>org.apache.maven.plugins</groupId>
|
||||||
<artifactId>maven-surefire-plugin</artifactId>
|
<artifactId>maven-surefire-plugin</artifactId>
|
||||||
<version>2.18.1</version>
|
|
||||||
<configuration>
|
<configuration>
|
||||||
<skipTests>true</skipTests>
|
<skipTests>true</skipTests>
|
||||||
</configuration>
|
</configuration>
|
||||||
</plugin>
|
</plugin>
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
<plugin>
|
|
||||||
<groupId>org.apache.maven.plugins</groupId>
|
|
||||||
<artifactId>maven-assembly-plugin</artifactId>
|
|
||||||
<configuration>
|
|
||||||
<descriptors>
|
|
||||||
<descriptor>${distroDirectory}/descriptor.xml</descriptor>
|
|
||||||
</descriptors>
|
|
||||||
</configuration>
|
|
||||||
<executions>
|
|
||||||
<execution>
|
|
||||||
<id>servicearchive</id>
|
|
||||||
<phase>install</phase>
|
|
||||||
<goals>
|
|
||||||
<goal>single</goal>
|
|
||||||
</goals>
|
|
||||||
</execution>
|
|
||||||
</executions>
|
|
||||||
</plugin>
|
|
||||||
</plugins>
|
</plugins>
|
||||||
</build>
|
</build>
|
||||||
<repositories>
|
<repositories>
|
||||||
|
|
|
@ -30,7 +30,7 @@ public class InfrastructureDialoguer {
|
||||||
public DatabaseInfo getDatabaseInfo(String resourceName) throws Exception{
|
public DatabaseInfo getDatabaseInfo(String resourceName) throws Exception{
|
||||||
DatabaseInfo dbi = new DatabaseInfo();
|
DatabaseInfo dbi = new DatabaseInfo();
|
||||||
|
|
||||||
LOGGER.debug("Searching for Database "+resourceName+" in scope "+scope);
|
LOGGER.debug("Searching for Database {} in scope {}", resourceName, scope);
|
||||||
SimpleQuery query = queryFor(ServiceEndpoint.class);
|
SimpleQuery query = queryFor(ServiceEndpoint.class);
|
||||||
// query.addCondition("$resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'StatisticalManagerDataBase' ");
|
// query.addCondition("$resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'StatisticalManagerDataBase' ");
|
||||||
// query.addCondition("$resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq '"+resourceName+"' ");
|
// query.addCondition("$resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq '"+resourceName+"' ");
|
||||||
|
@ -51,7 +51,7 @@ public class InfrastructureDialoguer {
|
||||||
dbi.driver = property.value();
|
dbi.driver = property.value();
|
||||||
}
|
}
|
||||||
|
|
||||||
LOGGER.debug("Found Database : "+dbi);
|
LOGGER.debug("Found Database : {}",dbi);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (dbi.url == null)
|
if (dbi.url == null)
|
||||||
|
|
|
@ -62,6 +62,8 @@ public class AbstractEcologicalEngineMapper extends AbstractAnnotatedAlgorithm i
|
||||||
|
|
||||||
private TokenManager tokenm = null;
|
private TokenManager tokenm = null;
|
||||||
|
|
||||||
|
private EnvironmentVariableManager env = null;
|
||||||
|
|
||||||
// inputs and outputs
|
// inputs and outputs
|
||||||
public LinkedHashMap<String, Object> inputs = new LinkedHashMap<String, Object>();
|
public LinkedHashMap<String, Object> inputs = new LinkedHashMap<String, Object>();
|
||||||
public LinkedHashMap<String, Object> outputs = new LinkedHashMap<String, Object>();
|
public LinkedHashMap<String, Object> outputs = new LinkedHashMap<String, Object>();
|
||||||
|
@ -246,14 +248,14 @@ public class AbstractEcologicalEngineMapper extends AbstractAnnotatedAlgorithm i
|
||||||
|
|
||||||
float previousStatus = -3;
|
float previousStatus = -3;
|
||||||
String host = WPSConfig.getInstance().getWPSConfig().getServer().getHostname();
|
String host = WPSConfig.getInstance().getWPSConfig().getServer().getHostname();
|
||||||
public void updateStatus(float status) {
|
public void updateStatus(float status, boolean canWrite) {
|
||||||
if (agent != null) {
|
if (agent != null) {
|
||||||
if (status != previousStatus) {
|
if (status != previousStatus) {
|
||||||
LOGGER.debug("STATUS update to: {} ", status );
|
LOGGER.debug("STATUS update to: {} ", status );
|
||||||
previousStatus = status;
|
previousStatus = status;
|
||||||
super.update(new Integer((int) status));
|
super.update(new Integer((int) status));
|
||||||
try {
|
try {
|
||||||
updateComputationOnWS(status, null);
|
if (canWrite) updateComputationOnWS(status, null);
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
LOGGER.warn("error updating compution on WS");
|
LOGGER.warn("error updating compution on WS");
|
||||||
}
|
}
|
||||||
|
@ -262,6 +264,10 @@ public class AbstractEcologicalEngineMapper extends AbstractAnnotatedAlgorithm i
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void setEnvironmentVariableManager(EnvironmentVariableManager env) {
|
||||||
|
this.env = env;
|
||||||
|
}
|
||||||
|
|
||||||
public void updateComputationOnWS(float status, String exception) {
|
public void updateComputationOnWS(float status, String exception) {
|
||||||
updateComputationOnWS(status, exception, null, null);
|
updateComputationOnWS(status, exception, null, null);
|
||||||
}
|
}
|
||||||
|
@ -290,7 +296,7 @@ public class AbstractEcologicalEngineMapper extends AbstractAnnotatedAlgorithm i
|
||||||
currentComputation.setStatus("" + status);
|
currentComputation.setStatus("" + status);
|
||||||
if (exception != null && exception.length() > 0)
|
if (exception != null && exception.length() > 0)
|
||||||
currentComputation.setException(exception);
|
currentComputation.setException(exception);
|
||||||
|
LOGGER.debug("RunDataspaceManager: [inputData="+inputData+", generatedData="+generatedData+"]");
|
||||||
RunDataspaceManager rundm = new RunDataspaceManager(inputData,generatedData);
|
RunDataspaceManager rundm = new RunDataspaceManager(inputData,generatedData);
|
||||||
rundm.run();
|
rundm.run();
|
||||||
|
|
||||||
|
@ -301,6 +307,8 @@ public class AbstractEcologicalEngineMapper extends AbstractAnnotatedAlgorithm i
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@Execute
|
@Execute
|
||||||
public void run() throws Exception {
|
public void run() throws Exception {
|
||||||
if (observer!=null)
|
if (observer!=null)
|
||||||
|
@ -324,8 +332,11 @@ public class AbstractEcologicalEngineMapper extends AbstractAnnotatedAlgorithm i
|
||||||
} else
|
} else
|
||||||
LOGGER.info("Wps External ID not set");
|
LOGGER.info("Wps External ID not set");
|
||||||
InputsManager inputsManager = null;
|
InputsManager inputsManager = null;
|
||||||
ConfigurationManager configManager = new ConfigurationManager(); // initializes parameters from file
|
ConfigurationManager configManager = new ConfigurationManager(this.env); // initializes parameters from web.xml
|
||||||
manageUserToken();
|
manageUserToken();
|
||||||
|
|
||||||
|
boolean canWriteOnShub = checkWriteAuthorization(tokenm.getUserName());
|
||||||
|
|
||||||
Path dir = Paths.get(System.getProperty("java.io.tmpdir"), "dmlocks");
|
Path dir = Paths.get(System.getProperty("java.io.tmpdir"), "dmlocks");
|
||||||
if (!Files.exists(dir))
|
if (!Files.exists(dir))
|
||||||
dir = Files.createDirectory(dir);
|
dir = Files.createDirectory(dir);
|
||||||
|
@ -380,7 +391,9 @@ public class AbstractEcologicalEngineMapper extends AbstractAnnotatedAlgorithm i
|
||||||
time("Ecological Engine Algorithm selection");
|
time("Ecological Engine Algorithm selection");
|
||||||
// adding service parameters to the configuration
|
// adding service parameters to the configuration
|
||||||
LOGGER.info("5 - Adding Service parameters to the configuration");
|
LOGGER.info("5 - Adding Service parameters to the configuration");
|
||||||
inputsManager.addInputServiceParameters(getInputParameters(algorithm), infrastructureDialoguer);
|
List<StatisticalType> dataminerInputParameters = getInputParameters(algorithm);
|
||||||
|
LOGGER.debug("Dataminer Algo Default InputParameters: "+dataminerInputParameters);
|
||||||
|
inputsManager.addInputServiceParameters(dataminerInputParameters, infrastructureDialoguer);
|
||||||
time("Service parameters added to the algorithm");
|
time("Service parameters added to the algorithm");
|
||||||
// merging wps with ecological engine parameters - modifies the
|
// merging wps with ecological engine parameters - modifies the
|
||||||
// config
|
// config
|
||||||
|
@ -388,7 +401,7 @@ public class AbstractEcologicalEngineMapper extends AbstractAnnotatedAlgorithm i
|
||||||
LOGGER.debug("Operator class is " + this.getClass().getCanonicalName());
|
LOGGER.debug("Operator class is " + this.getClass().getCanonicalName());
|
||||||
// build computation Data
|
// build computation Data
|
||||||
currentComputation = new ComputationData(config.getTaskID(), config.getAgent(), "", "", startTime, "-", "0", config.getTaskID(), configManager.getUsername(), config.getGcubeScope(), this.getClass().getCanonicalName());
|
currentComputation = new ComputationData(config.getTaskID(), config.getAgent(), "", "", startTime, "-", "0", config.getTaskID(), configManager.getUsername(), config.getGcubeScope(), this.getClass().getCanonicalName());
|
||||||
inputsManager.mergeWpsAndEcologicalInputs(supportDatabaseInfo);
|
inputsManager.mergeWpsAndEcologicalInputs(supportDatabaseInfo, dataminerInputParameters);
|
||||||
generatedInputTables = inputsManager.getGeneratedTables();
|
generatedInputTables = inputsManager.getGeneratedTables();
|
||||||
generatedFiles = inputsManager.getGeneratedInputFiles();
|
generatedFiles = inputsManager.getGeneratedInputFiles();
|
||||||
time("Setup and download of input parameters with tables creation");
|
time("Setup and download of input parameters with tables creation");
|
||||||
|
@ -412,11 +425,11 @@ public class AbstractEcologicalEngineMapper extends AbstractAnnotatedAlgorithm i
|
||||||
LOGGER.info("9 - Running the computation and updater");
|
LOGGER.info("9 - Running the computation and updater");
|
||||||
|
|
||||||
LOGGER.info("Initializing the WPS status of the computation");
|
LOGGER.info("Initializing the WPS status of the computation");
|
||||||
updateStatus(0);
|
updateStatus(0, canWriteOnShub);
|
||||||
LOGGER.info("Initializing the computation");
|
LOGGER.info("Initializing the computation");
|
||||||
agent.init();
|
agent.init();
|
||||||
LOGGER.info("Updating status");
|
LOGGER.info("Updating status");
|
||||||
runStatusUpdater();
|
runStatusUpdater(canWriteOnShub);
|
||||||
LOGGER.info("Running the computation");
|
LOGGER.info("Running the computation");
|
||||||
agent.compute();
|
agent.compute();
|
||||||
LOGGER.info("The computation has finished. Retrieving output");
|
LOGGER.info("The computation has finished. Retrieving output");
|
||||||
|
@ -440,13 +453,13 @@ public class AbstractEcologicalEngineMapper extends AbstractAnnotatedAlgorithm i
|
||||||
|
|
||||||
outputmanager.shutdown();
|
outputmanager.shutdown();
|
||||||
|
|
||||||
// delete all temporary tables
|
LOGGER.debug("12 - Final Computation Output");
|
||||||
LOGGER.info("12 - Deleting possible generated temporary tables");
|
LOGGER.debug("Outputs: "+ outputs);
|
||||||
LOGGER.debug("Final Computation Output: " + outputs);
|
|
||||||
|
|
||||||
endTime = new java.text.SimpleDateFormat("dd/MM/yyyy HH:mm:ss").format(System.currentTimeMillis());
|
endTime = new java.text.SimpleDateFormat("dd/MM/yyyy HH:mm:ss").format(System.currentTimeMillis());
|
||||||
if (!isCancelled()) {
|
if (!isCancelled()) {
|
||||||
saveComputationOnWS(inputsManager.getProvenanceData(), outputmanager.getProvenanceData(), agent, generatedFiles);
|
LOGGER.debug("Save Computation Data");
|
||||||
|
if (canWriteOnShub) saveComputationOnWS(inputsManager.getProvenanceData(), outputmanager.getProvenanceData(), agent, generatedFiles);
|
||||||
} else {
|
} else {
|
||||||
LOGGER.debug("Computation interrupted - no update");
|
LOGGER.debug("Computation interrupted - no update");
|
||||||
throw new Exception("Computation cancelled");
|
throw new Exception("Computation cancelled");
|
||||||
|
@ -461,9 +474,9 @@ public class AbstractEcologicalEngineMapper extends AbstractAnnotatedAlgorithm i
|
||||||
exitstatus = -1;
|
exitstatus = -1;
|
||||||
|
|
||||||
if (inputsManager != null)
|
if (inputsManager != null)
|
||||||
updateComputationOnWS(exitstatus, e.getMessage(), inputsManager.getProvenanceData(), generatedFiles);
|
if (canWriteOnShub) updateComputationOnWS(exitstatus, e.getMessage(), inputsManager.getProvenanceData(), generatedFiles);
|
||||||
else
|
else
|
||||||
updateComputationOnWS(exitstatus, e.getMessage());
|
if (canWriteOnShub) updateComputationOnWS(exitstatus, e.getMessage());
|
||||||
if (isCancelled())
|
if (isCancelled())
|
||||||
throw new Exception("Computation cancelled");
|
throw new Exception("Computation cancelled");
|
||||||
else
|
else
|
||||||
|
@ -490,6 +503,18 @@ public class AbstractEcologicalEngineMapper extends AbstractAnnotatedAlgorithm i
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private boolean checkWriteAuthorization(String username) {
|
||||||
|
if (env!=null && env.getShubUsersExcluded()!=null) {
|
||||||
|
if (env.getShubUsersExcluded().isEmpty()) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
if (env.getShubUsersExcluded().contains(username)) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
private void accountAlgorithmExecution(long start, long end, OperationResult result) {
|
private void accountAlgorithmExecution(long start, long end, OperationResult result) {
|
||||||
try{
|
try{
|
||||||
JobUsageRecord jobUsageRecord = new JobUsageRecord();
|
JobUsageRecord jobUsageRecord = new JobUsageRecord();
|
||||||
|
@ -513,11 +538,17 @@ public class AbstractEcologicalEngineMapper extends AbstractAnnotatedAlgorithm i
|
||||||
|
|
||||||
public class StatusUpdater implements Runnable {
|
public class StatusUpdater implements Runnable {
|
||||||
|
|
||||||
|
private boolean canWrite = true;
|
||||||
|
|
||||||
|
public StatusUpdater(boolean canWrite) {
|
||||||
|
this.canWrite = canWrite;
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void run() {
|
public void run() {
|
||||||
while (agent != null && !isCancelled() && agent.getStatus() < 100) {
|
while (agent != null && !isCancelled() && agent.getStatus() < 100) {
|
||||||
try {
|
try {
|
||||||
updateStatus(agent.getStatus());
|
updateStatus(agent.getStatus(), canWrite);
|
||||||
Thread.sleep(10000);
|
Thread.sleep(10000);
|
||||||
} catch (InterruptedException e) {}
|
} catch (InterruptedException e) {}
|
||||||
}
|
}
|
||||||
|
@ -525,8 +556,8 @@ public class AbstractEcologicalEngineMapper extends AbstractAnnotatedAlgorithm i
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private void runStatusUpdater() {
|
private void runStatusUpdater(boolean canWrite) {
|
||||||
StatusUpdater updater = new StatusUpdater();
|
StatusUpdater updater = new StatusUpdater(canWrite);
|
||||||
|
|
||||||
Thread t = new Thread(updater);
|
Thread t = new Thread(updater);
|
||||||
t.start();
|
t.start();
|
||||||
|
@ -534,6 +565,11 @@ public class AbstractEcologicalEngineMapper extends AbstractAnnotatedAlgorithm i
|
||||||
}
|
}
|
||||||
|
|
||||||
private void saveComputationOnWS(List<StoredData> inputData, List<StoredData> outputData, ComputationalAgent agent, List<File> generatedFiles) {
|
private void saveComputationOnWS(List<StoredData> inputData, List<StoredData> outputData, ComputationalAgent agent, List<File> generatedFiles) {
|
||||||
|
LOGGER.debug("Save Computation On WS");
|
||||||
|
LOGGER.debug("InputData: "+inputData);
|
||||||
|
LOGGER.debug("OutputData: "+outputData);
|
||||||
|
LOGGER.debug("Agent: "+agent);
|
||||||
|
LOGGER.debug("Generated files: "+generatedFiles);
|
||||||
LOGGER.debug("Provenance manager started for operator " + this.getClass().getCanonicalName());
|
LOGGER.debug("Provenance manager started for operator " + this.getClass().getCanonicalName());
|
||||||
|
|
||||||
ComputationData computation = new ComputationData(config.getTaskID(), config.getAgent(), agent.getDescription(), agent.getInfrastructure().name(), startTime, endTime, "100", config.getTaskID(), config.getParam(ConfigurationManager.serviceUserNameParameterVariable), config.getGcubeScope(), this.getClass().getCanonicalName());
|
ComputationData computation = new ComputationData(config.getTaskID(), config.getAgent(), agent.getDescription(), agent.getInfrastructure().name(), startTime, endTime, "100", config.getTaskID(), config.getParam(ConfigurationManager.serviceUserNameParameterVariable), config.getGcubeScope(), this.getClass().getCanonicalName());
|
||||||
|
|
|
@ -29,6 +29,9 @@ public class ConfigurationManager {
|
||||||
private static Boolean useStorage = null;
|
private static Boolean useStorage = null;
|
||||||
static boolean simulationMode = false;
|
static boolean simulationMode = false;
|
||||||
|
|
||||||
|
|
||||||
|
EnvironmentVariableManager env = null;
|
||||||
|
|
||||||
public static synchronized Integer getMaxComputations(){
|
public static synchronized Integer getMaxComputations(){
|
||||||
return maxComputations;
|
return maxComputations;
|
||||||
}
|
}
|
||||||
|
@ -41,7 +44,12 @@ public class ConfigurationManager {
|
||||||
return simulationMode;
|
return simulationMode;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Deprecated
|
||||||
public void getInitializationProperties() {
|
public void getInitializationProperties() {
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
private void inizializePropertiesUsingTemplateFile() {
|
||||||
try {
|
try {
|
||||||
if (maxComputations == null) {
|
if (maxComputations == null) {
|
||||||
Properties options = new Properties();
|
Properties options = new Properties();
|
||||||
|
@ -49,6 +57,7 @@ public class ConfigurationManager {
|
||||||
options.load(is);
|
options.load(is);
|
||||||
is.close();
|
is.close();
|
||||||
maxComputations = Integer.parseInt(options.getProperty("maxcomputations"));
|
maxComputations = Integer.parseInt(options.getProperty("maxcomputations"));
|
||||||
|
logger.info("setting max computation to {}",maxComputations);
|
||||||
useStorage = Boolean.parseBoolean(options.getProperty("saveond4sstorage"));
|
useStorage = Boolean.parseBoolean(options.getProperty("saveond4sstorage"));
|
||||||
simulationMode=Boolean.parseBoolean(options.getProperty("simulationMode"));
|
simulationMode=Boolean.parseBoolean(options.getProperty("simulationMode"));
|
||||||
}
|
}
|
||||||
|
@ -70,8 +79,13 @@ public class ConfigurationManager {
|
||||||
return username;
|
return username;
|
||||||
}
|
}
|
||||||
|
|
||||||
public ConfigurationManager() {
|
public ConfigurationManager(EnvironmentVariableManager env) {
|
||||||
getInitializationProperties();
|
if (env == null) inizializePropertiesUsingTemplateFile();
|
||||||
|
else {
|
||||||
|
maxComputations = env.getMaxComputation();
|
||||||
|
useStorage = env.isSaveOnStorage();
|
||||||
|
simulationMode = env.isSimulationMode();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public AlgorithmConfiguration getConfig() {
|
public AlgorithmConfiguration getConfig() {
|
||||||
|
@ -88,35 +102,33 @@ public class ConfigurationManager {
|
||||||
config.setAlgorithmClassLoader(Thread.currentThread().getContextClassLoader());
|
config.setAlgorithmClassLoader(Thread.currentThread().getContextClassLoader());
|
||||||
String webperspath = WPSConfig.getConfigDir() + "../persistence/";
|
String webperspath = WPSConfig.getConfigDir() + "../persistence/";
|
||||||
// selecting persistence path
|
// selecting persistence path
|
||||||
// String persistencePath = File.createTempFile("wpsstatcheck", ".sm").getParent() + "/../cfg/";
|
|
||||||
|
|
||||||
//TODO: REMOVE this shit (the persistence must be the persistence dir of the webapp)
|
|
||||||
String persistencePath = WPSConfig.getConfigDir() + "../ecocfg/";
|
String persistencePath = WPSConfig.getConfigDir() + "../ecocfg/";
|
||||||
String configPath = persistencePath;
|
String configPath = persistencePath;
|
||||||
if (!new File(configPath).isDirectory()) {
|
if (!new File(configPath).isDirectory()) {
|
||||||
configPath = "./cfg/";
|
configPath = "./cfg/";
|
||||||
persistencePath = "./";
|
persistencePath = "./";
|
||||||
}
|
}
|
||||||
logger.debug("Taking configuration from " + (new File(configPath).getAbsolutePath()) + " and persistence in " + persistencePath);
|
logger.debug("Taking configuration from {}", configPath);
|
||||||
|
//+ " and persistence in " + persistencePath);
|
||||||
// setting configuration and logger
|
// setting configuration and logger
|
||||||
config.setPersistencePath(persistencePath);
|
config.setPersistencePath(configPath);
|
||||||
config.setConfigPath(configPath);
|
config.setConfigPath(configPath);
|
||||||
config.setNumberOfResources(1);
|
config.setNumberOfResources(1);
|
||||||
// setting application paths
|
// setting application paths
|
||||||
String webapp = WPSConfig.getInstance().getWPSConfig().getServer().getWebappPath();
|
String webapp = WPSConfig.getInstance().getWPSConfig().getServer().getWebappPath();
|
||||||
String host = WPSConfig.getInstance().getWPSConfig().getServer().getHostname();
|
String host = WPSConfig.getInstance().getWPSConfig().getServer().getHostname();
|
||||||
String port = WPSConfig.getInstance().getWPSConfig().getServer().getHostport();
|
String port = WPSConfig.getInstance().getWPSConfig().getServer().getHostport();
|
||||||
logger.debug("Host: " + host + " Port: " + port + " Webapp: " + webapp + " ");
|
logger.debug("Host: {} Port: {} Webapp: {} ", host, port, webapp );
|
||||||
logger.debug("Web persistence path: " + webperspath);
|
logger.debug("Web persistence path:{} ", webperspath);
|
||||||
|
|
||||||
String webPath = "http://" + host + ":" + port + "/" + webapp + "/persistence/";
|
String webPath = "http://" + host + ":" + port + "/" + webapp + "/persistence/";
|
||||||
|
|
||||||
// logger.debug("Env Vars: \n"+System.getenv());
|
// logger.debug("Env Vars: \n"+System.getenv());
|
||||||
logger.debug("Web app path: " + webPath);
|
logger.debug("Web app path: {} ", webPath);
|
||||||
|
|
||||||
// retrieving scope
|
// retrieving scope
|
||||||
scope = (String) inputs.get(scopeParameter);
|
scope = (String) inputs.get(scopeParameter);
|
||||||
logger.debug("Retrieved scope: " + scope);
|
logger.debug("Retrieved scope: {} ", scope);
|
||||||
if (scope == null)
|
if (scope == null)
|
||||||
throw new Exception("Error: scope parameter (scope) not set! This violates e-Infrastructure security policies");
|
throw new Exception("Error: scope parameter (scope) not set! This violates e-Infrastructure security policies");
|
||||||
if (!scope.startsWith("/"))
|
if (!scope.startsWith("/"))
|
||||||
|
@ -125,8 +137,8 @@ public class ConfigurationManager {
|
||||||
username = (String) inputs.get(usernameParameter);
|
username = (String) inputs.get(usernameParameter);
|
||||||
token = (String) inputs.get(tokenParameter);
|
token = (String) inputs.get(tokenParameter);
|
||||||
|
|
||||||
logger.debug("User name used by the client: " + username);
|
logger.debug("User name used by the client:{} ", username);
|
||||||
logger.debug("User token used by the client: " + token);
|
logger.debug("User token used by the client:{} ", token);
|
||||||
|
|
||||||
if (username == null || username.trim().length() == 0)
|
if (username == null || username.trim().length() == 0)
|
||||||
throw new Exception("Error: user name parameter (user.name) not set! This violates e-Infrastructure security policies");
|
throw new Exception("Error: user name parameter (user.name) not set! This violates e-Infrastructure security policies");
|
||||||
|
@ -139,7 +151,7 @@ public class ConfigurationManager {
|
||||||
config.setGcubeToken(token);
|
config.setGcubeToken(token);
|
||||||
// DONE get username from request
|
// DONE get username from request
|
||||||
config.setParam(serviceUserNameParameterVariable, username);
|
config.setParam(serviceUserNameParameterVariable, username);
|
||||||
config.setParam(processingSessionVariable, "" + UUID.randomUUID());
|
config.setParam(processingSessionVariable, UUID.randomUUID().toString());
|
||||||
config.setParam(webpathVariable, webPath);
|
config.setParam(webpathVariable, webPath);
|
||||||
config.setParam(webPersistencePathVariable, webperspath);
|
config.setParam(webPersistencePathVariable, webperspath);
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,38 @@
|
||||||
|
package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping;
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
public class EnvironmentVariableManager {
|
||||||
|
|
||||||
|
public EnvironmentVariableManager(int maxComputation, boolean saveOnStorage, boolean simulationMode, List<String> shubUsersExcluded) {
|
||||||
|
super();
|
||||||
|
this.maxComputation = maxComputation;
|
||||||
|
this.saveOnStorage = saveOnStorage;
|
||||||
|
this.simulationMode = simulationMode;
|
||||||
|
this.shubUsersExcluded = shubUsersExcluded;
|
||||||
|
}
|
||||||
|
|
||||||
|
private int maxComputation;
|
||||||
|
private boolean saveOnStorage;
|
||||||
|
private boolean simulationMode;
|
||||||
|
|
||||||
|
//null: all users will write on SHub
|
||||||
|
//empty: no one will write on Shub
|
||||||
|
//filled: users reported will not write on Shub
|
||||||
|
private List<String> shubUsersExcluded;
|
||||||
|
|
||||||
|
public int getMaxComputation() {
|
||||||
|
return maxComputation;
|
||||||
|
}
|
||||||
|
public boolean isSaveOnStorage() {
|
||||||
|
return saveOnStorage;
|
||||||
|
}
|
||||||
|
public boolean isSimulationMode() {
|
||||||
|
return simulationMode;
|
||||||
|
}
|
||||||
|
|
||||||
|
public List<String> getShubUsersExcluded() {
|
||||||
|
return shubUsersExcluded;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -5,6 +5,7 @@ import java.io.BufferedReader;
|
||||||
import java.io.File;
|
import java.io.File;
|
||||||
import java.io.FileOutputStream;
|
import java.io.FileOutputStream;
|
||||||
import java.io.FileReader;
|
import java.io.FileReader;
|
||||||
|
import java.io.IOException;
|
||||||
import java.io.InputStream;
|
import java.io.InputStream;
|
||||||
import java.net.HttpURLConnection;
|
import java.net.HttpURLConnection;
|
||||||
import java.net.URL;
|
import java.net.URL;
|
||||||
|
@ -43,6 +44,8 @@ public class InputsManager {
|
||||||
|
|
||||||
private static final Logger LOGGER = LoggerFactory.getLogger(InputsManager.class);
|
private static final Logger LOGGER = LoggerFactory.getLogger(InputsManager.class);
|
||||||
|
|
||||||
|
private static final long SHUB_RETRY_MILLIS = 2000;
|
||||||
|
|
||||||
LinkedHashMap<String, Object> inputs;
|
LinkedHashMap<String, Object> inputs;
|
||||||
List<String> generatedTables;
|
List<String> generatedTables;
|
||||||
List<File> generatedFiles;
|
List<File> generatedFiles;
|
||||||
|
@ -51,11 +54,11 @@ public class InputsManager {
|
||||||
String computationId;
|
String computationId;
|
||||||
|
|
||||||
List<StoredData> provenanceData = new ArrayList<StoredData>();
|
List<StoredData> provenanceData = new ArrayList<StoredData>();
|
||||||
|
|
||||||
public List<StoredData> getProvenanceData() {
|
public List<StoredData> getProvenanceData() {
|
||||||
return provenanceData;
|
return provenanceData;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
public static String inputsSeparator = "\\|";
|
public static String inputsSeparator = "\\|";
|
||||||
|
|
||||||
public AlgorithmConfiguration getConfig() {
|
public AlgorithmConfiguration getConfig() {
|
||||||
|
@ -92,30 +95,35 @@ public class InputsManager {
|
||||||
config.setParam("DatabaseURL", supportDatabaseInfo.url);
|
config.setParam("DatabaseURL", supportDatabaseInfo.url);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void mergeWpsAndEcologicalInputs(DatabaseInfo supportDatabaseInfo) throws Exception {
|
public void mergeWpsAndEcologicalInputs(DatabaseInfo supportDatabaseInfo,
|
||||||
|
List<StatisticalType> dataminerInputParameters) throws Exception {
|
||||||
|
LOGGER.debug("Merge WPS And Ecological Inputs");
|
||||||
// browse input parameters from WPS
|
// browse input parameters from WPS
|
||||||
for (String inputName : inputs.keySet()) {
|
for (String inputName : inputs.keySet()) {
|
||||||
Object input = inputs.get(inputName);
|
Object input = inputs.get(inputName);
|
||||||
LOGGER.debug("Managing Input Parameter with Name "+ inputName);
|
LOGGER.debug("Managing Input Parameter with Name " + inputName);
|
||||||
// case of simple input
|
// case of simple input
|
||||||
if (input instanceof String) {
|
if (input instanceof String) {
|
||||||
LOGGER.debug("Simple Input: "+ input);
|
LOGGER.debug("Simple Input: " + input);
|
||||||
// manage lists
|
// manage lists
|
||||||
String inputAlgoOrig = ((String) input).trim();
|
String inputAlgoOrig = ((String) input).trim();
|
||||||
String inputAlgo = ((String) input).trim().replaceAll(inputsSeparator, AlgorithmConfiguration.listSeparator);
|
String inputAlgo = ((String) input).trim().replaceAll(inputsSeparator,
|
||||||
|
AlgorithmConfiguration.listSeparator);
|
||||||
LOGGER.debug("Simple Input Transformed: " + inputAlgo);
|
LOGGER.debug("Simple Input Transformed: " + inputAlgo);
|
||||||
config.setParam(inputName, inputAlgo);
|
config.setParam(inputName, inputAlgo);
|
||||||
|
|
||||||
saveInputData(inputName,inputName,inputAlgoOrig);
|
saveInputData(inputName, inputName, inputAlgoOrig);
|
||||||
}
|
}
|
||||||
// case of Complex Input
|
// case of Complex Input
|
||||||
else if (input instanceof GenericFileData) {
|
else if (input instanceof GenericFileData) {
|
||||||
|
|
||||||
LOGGER.debug("Complex Input: " + input);
|
LOGGER.debug("Complex Input");
|
||||||
// retrieve payload
|
// retrieve payload
|
||||||
GenericFileData files = ((GenericFileData) input);
|
GenericFileData files = ((GenericFileData) input);
|
||||||
|
LOGGER.debug("GenericFileData: [fileExtension=" + files.getFileExtension() + ", mimeType="
|
||||||
|
+ files.getMimeType() + "]");
|
||||||
|
|
||||||
List<File> localfiles = getLocalFiles(files,inputName);
|
List<File> localfiles = getLocalFiles(files, inputName, dataminerInputParameters);
|
||||||
String inputtables = "";
|
String inputtables = "";
|
||||||
int nfiles = localfiles.size();
|
int nfiles = localfiles.size();
|
||||||
StringBuffer sb = new StringBuffer();
|
StringBuffer sb = new StringBuffer();
|
||||||
|
@ -128,10 +136,11 @@ public class InputsManager {
|
||||||
|
|
||||||
if (inputTableTemplates.get(inputName) != null) {
|
if (inputTableTemplates.get(inputName) != null) {
|
||||||
LOGGER.debug("Creating table: " + tableName);
|
LOGGER.debug("Creating table: " + tableName);
|
||||||
createTable(tableName, tableFile, config, supportDatabaseInfo, inputTableTemplates.get(inputName));
|
createTable(tableName, tableFile, config, supportDatabaseInfo,
|
||||||
|
inputTableTemplates.get(inputName));
|
||||||
generatedTables.add(tableName);
|
generatedTables.add(tableName);
|
||||||
}
|
}
|
||||||
//case of non-table input file, e.g. FFANN
|
// case of non-table input file, e.g. FFANN
|
||||||
else
|
else
|
||||||
tableName = tableFile.getAbsolutePath();
|
tableName = tableFile.getAbsolutePath();
|
||||||
if (i > 0)
|
if (i > 0)
|
||||||
|
@ -140,13 +149,13 @@ public class InputsManager {
|
||||||
inputtables += tableName;
|
inputtables += tableName;
|
||||||
|
|
||||||
saveInputData(tableFile.getName(), inputName, tableFile.getAbsolutePath());
|
saveInputData(tableFile.getName(), inputName, tableFile.getAbsolutePath());
|
||||||
if (i>0)
|
if (i > 0)
|
||||||
sb.append("|");
|
sb.append("|");
|
||||||
|
|
||||||
sb.append(tableFile.getName());
|
sb.append(tableFile.getName());
|
||||||
}
|
}
|
||||||
sb.append("|");
|
sb.append("|");
|
||||||
if (nfiles>0)
|
if (nfiles > 0)
|
||||||
saveInputData(inputName, inputName, sb.toString());
|
saveInputData(inputName, inputName, sb.toString());
|
||||||
|
|
||||||
// the only possible complex input is a table - check the WPS
|
// the only possible complex input is a table - check the WPS
|
||||||
|
@ -157,7 +166,7 @@ public class InputsManager {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public boolean isXML(String fileContent){
|
public boolean isXML(String fileContent) {
|
||||||
|
|
||||||
if (fileContent.startsWith("<"))
|
if (fileContent.startsWith("<"))
|
||||||
return true;
|
return true;
|
||||||
|
@ -165,7 +174,7 @@ public class InputsManager {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
public String readOneLine(String filename){
|
public String readOneLine(String filename) {
|
||||||
|
|
||||||
try {
|
try {
|
||||||
BufferedReader in = new BufferedReader(new FileReader(new File(filename)));
|
BufferedReader in = new BufferedReader(new FileReader(new File(filename)));
|
||||||
|
@ -173,7 +182,7 @@ public class InputsManager {
|
||||||
String vud = "";
|
String vud = "";
|
||||||
|
|
||||||
while ((line = in.readLine()) != null) {
|
while ((line = in.readLine()) != null) {
|
||||||
if (line.trim().length()>0){
|
if (line.trim().length() > 0) {
|
||||||
vud = line.trim();
|
vud = line.trim();
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
@ -186,61 +195,82 @@ public class InputsManager {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public String inputNameFromHttpHeader(String url) throws Exception{
|
public String inputNameFromHttpHeader(String url) throws Exception {
|
||||||
|
LOGGER.debug("Search filename in http header from: " + url);
|
||||||
URL obj = new URL(url);
|
URL obj = new URL(url);
|
||||||
URLConnection conn = obj.openConnection();
|
URLConnection conn = obj.openConnection();
|
||||||
String filename=null;
|
String filename = null;
|
||||||
// get all headers
|
// get all headers
|
||||||
Map<String, List<String>> map = conn.getHeaderFields();
|
Map<String, List<String>> map = conn.getHeaderFields();
|
||||||
LOGGER.debug("Getting file name from http header");
|
|
||||||
for (Map.Entry<String, List<String>> entry : map.entrySet()) {
|
for (Map.Entry<String, List<String>> entry : map.entrySet()) {
|
||||||
String value = entry.getValue().toString();
|
String value = entry.getValue().toString();
|
||||||
if (value.toLowerCase().contains("filename=")){
|
LOGGER.debug("Header value: " + value);
|
||||||
LOGGER.debug("Searching in http header: found file name in header value "+value);
|
if (value.toLowerCase().contains("filename")) {
|
||||||
filename=value.substring(value.indexOf("=")+1);
|
LOGGER.debug("Searching in http header: found file name in header value {}", value);
|
||||||
filename=filename.replace("\"", "").replace("]", "");
|
filename = value.substring(value.indexOf("=") + 1);
|
||||||
LOGGER.debug("Searching in http header: retrieved file name "+filename);
|
filename = filename.replace("\"", "").replace("]", "");
|
||||||
|
LOGGER.debug("Searching in http header: retrieved file name {}", filename);
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
LOGGER.debug("Filename retrieved from http header: " + filename);
|
||||||
return filename;
|
return filename;
|
||||||
}
|
}
|
||||||
|
|
||||||
public List<File> getLocalFiles(GenericFileData files,String inputName) throws Exception {
|
public List<File> getLocalFiles(GenericFileData files, String inputName,
|
||||||
|
List<StatisticalType> dataminerInputParameters) throws Exception {
|
||||||
|
LOGGER.debug("GetLocalFiles: [files: " + files + ", inputName: " + inputName + "]");
|
||||||
// download input
|
// download input
|
||||||
List<File> filesList = new ArrayList<File>();
|
List<File> filesList = new ArrayList<File>();
|
||||||
File f = files.getBaseFile(false);
|
File f = files.getBaseFile(false);
|
||||||
LOGGER.debug("Retrieving file content as a URL link: " + f.getAbsolutePath());
|
LOGGER.debug("Retrieving local files: " + f.getAbsolutePath());
|
||||||
//TODO DO NOT READ FILE INTO MEMORY
|
// TODO DO NOT READ FILE INTO MEMORY
|
||||||
// read file content
|
// read file content
|
||||||
String fileLink = readOneLine(f.getAbsolutePath());
|
String fileLink = readOneLine(f.getAbsolutePath());
|
||||||
LOGGER.debug("File link: " + fileLink.substring(0,Math.min(fileLink.length(),10)) + "...");
|
LOGGER.debug("Check File is link: {} ...", fileLink.substring(0, Math.min(fileLink.length(), 10)));
|
||||||
String fileName = "";
|
String fileName = "";
|
||||||
// case of a http link
|
// case of a http link
|
||||||
if (fileLink!=null && fileLink.toLowerCase().startsWith("http:") || fileLink.toLowerCase().startsWith("https:")) {
|
if (fileLink != null
|
||||||
|
&& (fileLink.toLowerCase().startsWith("http:") || fileLink.toLowerCase().startsWith("https:"))) {
|
||||||
// manage the case of multiple files
|
// manage the case of multiple files
|
||||||
|
LOGGER.debug("Complex Input payload is link");
|
||||||
|
|
||||||
|
LOGGER.debug("Retrieving files from url: " + fileLink);
|
||||||
String[] remotefiles = fileLink.split(inputsSeparator);
|
String[] remotefiles = fileLink.split(inputsSeparator);
|
||||||
for (String subfilelink : remotefiles) {
|
for (String subfilelink : remotefiles) {
|
||||||
subfilelink = subfilelink.trim();
|
subfilelink = subfilelink.trim();
|
||||||
LOGGER.debug("Managing link: " + subfilelink);
|
LOGGER.debug("Managing link: {}", subfilelink);
|
||||||
if (subfilelink.length() == 0)
|
if (subfilelink.length() == 0)
|
||||||
continue;
|
continue;
|
||||||
InputStream is = null;
|
InputStream is = null;
|
||||||
HttpURLConnection urlConnection = null;
|
HttpURLConnection urlConnection = null;
|
||||||
URL url = new URL(subfilelink);
|
URL url = new URL(subfilelink);
|
||||||
urlConnection = (HttpURLConnection) url.openConnection();
|
try {
|
||||||
is = new BufferedInputStream(urlConnection.getInputStream());
|
urlConnection = (HttpURLConnection) url.openConnection();
|
||||||
|
is = new BufferedInputStream(urlConnection.getInputStream());
|
||||||
|
}catch(IOException e) {
|
||||||
|
LOGGER.warn("download from storagehub failed. Retry ongoing...");
|
||||||
|
LOGGER.debug("waiting "+SHUB_RETRY_MILLIS+" millis ");
|
||||||
|
Thread.sleep(SHUB_RETRY_MILLIS);
|
||||||
|
urlConnection = (HttpURLConnection) url.openConnection();
|
||||||
|
is = new BufferedInputStream(urlConnection.getInputStream());
|
||||||
|
LOGGER.debug(" retry success ");
|
||||||
|
}
|
||||||
// retrieve payload: for test purpose only
|
// retrieve payload: for test purpose only
|
||||||
String fileNameTemp = inputNameFromHttpHeader(subfilelink);
|
String fileNameTemp = inputNameFromHttpHeader(subfilelink);
|
||||||
|
|
||||||
if (fileNameTemp==null)
|
LOGGER.debug("the fileNameTemp is {}", fileNameTemp);
|
||||||
fileName = inputName+"_[" + computationId + "]";
|
|
||||||
else
|
if (fileNameTemp != null && !fileNameTemp.isEmpty()) {
|
||||||
fileName = fileNameTemp+ "_[" + computationId + "]."+FilenameUtils.getExtension(fileNameTemp);
|
fileName = String.format("%s_(%s).%s", inputName, computationId,
|
||||||
|
FilenameUtils.getExtension(fileNameTemp));
|
||||||
|
} else {
|
||||||
|
fileName = String.format("%s_(%s).%s", inputName, computationId,
|
||||||
|
FilenameUtils.getExtension(inputName));
|
||||||
|
|
||||||
|
}
|
||||||
|
LOGGER.debug("the name of the generated file is {}", fileName);
|
||||||
|
|
||||||
LOGGER.debug("Retrieving remote input in file: " + fileName);
|
|
||||||
LOGGER.debug("Creating local temp file: " + fileName);
|
|
||||||
File of = new File(config.getPersistencePath(), fileName);
|
File of = new File(config.getPersistencePath(), fileName);
|
||||||
FileOutputStream fos = new FileOutputStream(of);
|
FileOutputStream fos = new FileOutputStream(of);
|
||||||
IOUtils.copy(is, fos);
|
IOUtils.copy(is, fos);
|
||||||
|
@ -249,32 +279,57 @@ public class InputsManager {
|
||||||
fos.close();
|
fos.close();
|
||||||
urlConnection.disconnect();
|
urlConnection.disconnect();
|
||||||
filesList.add(of);
|
filesList.add(of);
|
||||||
LOGGER.debug("Created local file: " + of.getAbsolutePath());
|
LOGGER.debug("Created local file: {}", of.getAbsolutePath());
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
LOGGER.debug("Complex Input payload is the filelink");
|
LOGGER.debug("Complex Input payload is file");
|
||||||
fileName = f.getName();
|
fileName = f.getName();
|
||||||
LOGGER.debug("Retriving local input from file: " + fileName);
|
|
||||||
|
|
||||||
if (isXML(fileLink))
|
LOGGER.debug("Retrieving local input from file: {}", fileName);
|
||||||
{
|
|
||||||
|
String fileExt = null;
|
||||||
|
|
||||||
|
if (isXML(fileLink)) {
|
||||||
String xmlFile = f.getAbsolutePath();
|
String xmlFile = f.getAbsolutePath();
|
||||||
String csvFile = xmlFile+".csv";
|
String csvFile = xmlFile + ".csv";
|
||||||
LOGGER.debug("Transforming XML file into a csv: " + csvFile);
|
LOGGER.debug("Transforming XML file into a csv: {} ", csvFile);
|
||||||
GML2CSV.parseGML(xmlFile, csvFile);
|
GML2CSV.parseGML(xmlFile, csvFile);
|
||||||
LOGGER.debug("GML Parsed: " + readOneLine(csvFile)+"[..]");
|
LOGGER.debug("GML Parsed: {} [..]", readOneLine(csvFile));
|
||||||
f = new File(csvFile);
|
f = new File(csvFile);
|
||||||
|
fileExt = "csv";
|
||||||
|
} else {
|
||||||
|
LOGGER.debug("The file is a csv: {}", f.getAbsolutePath());
|
||||||
|
fileExt = FilenameUtils.getExtension(fileName);
|
||||||
}
|
}
|
||||||
else{
|
|
||||||
LOGGER.debug("The file is a csv: " + f.getAbsolutePath());
|
LOGGER.debug("Retrieve default extension");
|
||||||
|
String fileDefaultValue = null;
|
||||||
|
for (StatisticalType defaultInputParameter : dataminerInputParameters) {
|
||||||
|
if (defaultInputParameter.getName().compareTo(inputName) == 0) {
|
||||||
|
fileDefaultValue = defaultInputParameter.getDefaultValue();
|
||||||
|
break;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
String absFile = new File(f.getParent(),inputName+ "_[" + computationId + "].csv").getAbsolutePath();
|
LOGGER.debug("Parameter default value retrieved: " + fileDefaultValue);
|
||||||
LOGGER.debug("Renaming to: "+absFile);
|
|
||||||
|
if (fileDefaultValue != null && !fileDefaultValue.isEmpty()) {
|
||||||
|
int lastPointIndex = fileDefaultValue.lastIndexOf(".");
|
||||||
|
if (lastPointIndex > -1 && lastPointIndex < (fileDefaultValue.length() - 1)) {
|
||||||
|
fileExt = fileDefaultValue.substring(lastPointIndex + 1);
|
||||||
|
LOGGER.debug("Default Extension retrieved: " + fileExt);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
LOGGER.debug("Use extension: " + fileExt);
|
||||||
|
|
||||||
|
String absFile = new File(f.getParent(), String.format("%s_(%s).%s", inputName, computationId, fileExt))
|
||||||
|
.getAbsolutePath();
|
||||||
|
LOGGER.debug("Renaming to: " + absFile);
|
||||||
System.gc();
|
System.gc();
|
||||||
boolean renamed = f.renameTo(new File(absFile));
|
boolean renamed = f.renameTo(new File(absFile));
|
||||||
if (renamed)
|
if (renamed)
|
||||||
f = new File(absFile);
|
f = new File(absFile);
|
||||||
LOGGER.debug("The file has been renamed as : " + f.getAbsolutePath()+" - "+renamed);
|
LOGGER.debug("The file has been renamed as : {} - {}", f.getAbsolutePath(), renamed);
|
||||||
filesList.add(f);
|
filesList.add(f);
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -282,7 +337,8 @@ public class InputsManager {
|
||||||
return filesList;
|
return filesList;
|
||||||
}
|
}
|
||||||
|
|
||||||
public void createTable(String tableName, File tableFile, AlgorithmConfiguration config, DatabaseInfo supportDatabaseInfo, String inputTableTemplate) throws Exception {
|
public void createTable(String tableName, File tableFile, AlgorithmConfiguration config,
|
||||||
|
DatabaseInfo supportDatabaseInfo, String inputTableTemplate) throws Exception {
|
||||||
|
|
||||||
// creating table
|
// creating table
|
||||||
LOGGER.debug("Complex Input size after download: " + tableFile.length());
|
LOGGER.debug("Complex Input size after download: " + tableFile.length());
|
||||||
|
@ -308,11 +364,13 @@ public class InputsManager {
|
||||||
TableTemplatesMapper mapper = new TableTemplatesMapper();
|
TableTemplatesMapper mapper = new TableTemplatesMapper();
|
||||||
String createstatement = mapper.generateCreateStatement(tableName, templatename, tableStructure);
|
String createstatement = mapper.generateCreateStatement(tableName, templatename, tableStructure);
|
||||||
LOGGER.debug("Creating table: " + tableName);
|
LOGGER.debug("Creating table: " + tableName);
|
||||||
DatabaseUtils.createBigTable(true, tableName, supportDatabaseInfo.driver, supportDatabaseInfo.username, supportDatabaseInfo.password, supportDatabaseInfo.url, createstatement, dbConnection);
|
DatabaseUtils.createBigTable(true, tableName, supportDatabaseInfo.driver, supportDatabaseInfo.username,
|
||||||
DatabaseUtils.createRemoteTableFromFile(tableFile.getAbsolutePath(), tableName, ",", true, supportDatabaseInfo.username, supportDatabaseInfo.password, supportDatabaseInfo.url);
|
supportDatabaseInfo.password, supportDatabaseInfo.url, createstatement, dbConnection);
|
||||||
|
DatabaseUtils.createRemoteTableFromFile(tableFile.getAbsolutePath(), tableName, ",", true,
|
||||||
|
supportDatabaseInfo.username, supportDatabaseInfo.password, supportDatabaseInfo.url);
|
||||||
|
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
LOGGER.error("Error in database transaction " ,e);
|
LOGGER.error("Error in database transaction ", e);
|
||||||
throw new Exception("Error in creating the table for " + tableName + ": " + e.getLocalizedMessage());
|
throw new Exception("Error in creating the table for " + tableName + ": " + e.getLocalizedMessage());
|
||||||
} finally {
|
} finally {
|
||||||
DatabaseUtils.closeDBConnection(dbConnection);
|
DatabaseUtils.closeDBConnection(dbConnection);
|
||||||
|
@ -368,7 +426,8 @@ public class InputsManager {
|
||||||
return structure.toString();
|
return structure.toString();
|
||||||
}
|
}
|
||||||
|
|
||||||
public void addInputServiceParameters(List<StatisticalType> agentInputs, InfrastructureDialoguer infrastructureDialoguer) throws Exception {
|
public void addInputServiceParameters(List<StatisticalType> agentInputs,
|
||||||
|
InfrastructureDialoguer infrastructureDialoguer) throws Exception {
|
||||||
|
|
||||||
// check and fullfil additional parameters
|
// check and fullfil additional parameters
|
||||||
DatabaseInfo dbinfo = null;
|
DatabaseInfo dbinfo = null;
|
||||||
|
@ -376,10 +435,10 @@ public class InputsManager {
|
||||||
|
|
||||||
for (StatisticalType type : agentInputs) {
|
for (StatisticalType type : agentInputs) {
|
||||||
if (type instanceof PrimitiveType) {
|
if (type instanceof PrimitiveType) {
|
||||||
if (((PrimitiveType) type).getType()==PrimitiveTypes.CONSTANT){
|
if (((PrimitiveType) type).getType() == PrimitiveTypes.CONSTANT) {
|
||||||
String constant = ""+((PrimitiveType) type).getDefaultValue();
|
String constant = "" + ((PrimitiveType) type).getDefaultValue();
|
||||||
config.setParam(type.getName(), constant);
|
config.setParam(type.getName(), constant);
|
||||||
LOGGER.debug("Constant parameter: "+constant);
|
LOGGER.debug("Constant parameter: " + constant);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (type instanceof ServiceType) {
|
if (type instanceof ServiceType) {
|
||||||
|
@ -392,10 +451,10 @@ public class InputsManager {
|
||||||
String value = "";
|
String value = "";
|
||||||
if (sp == ServiceParameters.RANDOMSTRING)
|
if (sp == ServiceParameters.RANDOMSTRING)
|
||||||
value = "stat" + UUID.randomUUID().toString().replace("-", "");
|
value = "stat" + UUID.randomUUID().toString().replace("-", "");
|
||||||
else if (sp == ServiceParameters.USERNAME){
|
else if (sp == ServiceParameters.USERNAME) {
|
||||||
value = (String) inputs.get(ConfigurationManager.usernameParameter);
|
value = (String) inputs.get(ConfigurationManager.usernameParameter);
|
||||||
|
|
||||||
LOGGER.debug("User name used by the client: "+value);
|
LOGGER.debug("User name used by the client: " + value);
|
||||||
}
|
}
|
||||||
LOGGER.debug("ServiceType Adding: (" + name + "," + value + ")");
|
LOGGER.debug("ServiceType Adding: (" + name + "," + value + ")");
|
||||||
config.setParam(name, value);
|
config.setParam(name, value);
|
||||||
|
@ -436,8 +495,8 @@ public class InputsManager {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private void saveInputData(String name, String description, String payload) {
|
||||||
private void saveInputData(String name, String description, String payload){
|
LOGGER.debug("SaveInputData [name="+name+", description="+description+", payload="+payload+"]");
|
||||||
String id = name;
|
String id = name;
|
||||||
DataProvenance provenance = DataProvenance.IMPORTED;
|
DataProvenance provenance = DataProvenance.IMPORTED;
|
||||||
String creationDate = new java.text.SimpleDateFormat("dd/MM/yyyy HH:mm:ss").format(System.currentTimeMillis());
|
String creationDate = new java.text.SimpleDateFormat("dd/MM/yyyy HH:mm:ss").format(System.currentTimeMillis());
|
||||||
|
@ -445,18 +504,17 @@ public class InputsManager {
|
||||||
|
|
||||||
String type = "text/plain";
|
String type = "text/plain";
|
||||||
|
|
||||||
if (payload != null && (new File (payload).exists())) {
|
if (payload != null && (new File(payload).exists())) {
|
||||||
if (payload.toLowerCase().endsWith(".csv") || payload.toLowerCase().endsWith(".txt")) {
|
if (payload.toLowerCase().endsWith(".csv") || payload.toLowerCase().endsWith(".txt")) {
|
||||||
type = "text/csv";
|
type = "text/csv";
|
||||||
} else
|
} else
|
||||||
type = "application/d4science";
|
type = "application/d4science";
|
||||||
}
|
}
|
||||||
|
|
||||||
StoredData data = new StoredData(name, description, id, provenance, creationDate, operator, computationId, type,payload,config.getGcubeScope());
|
StoredData data = new StoredData(name, description, id, provenance, creationDate, operator, computationId, type,
|
||||||
|
payload, config.getGcubeScope());
|
||||||
|
|
||||||
provenanceData.add(data);
|
provenanceData.add(data);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -46,12 +46,13 @@ public class OutputsManager {
|
||||||
return generatedTables;
|
return generatedTables;
|
||||||
}
|
}
|
||||||
|
|
||||||
public OutputsManager(AlgorithmConfiguration config,String computationsession) {
|
public OutputsManager(AlgorithmConfiguration config, String computationsession) {
|
||||||
this.config = config;
|
this.config = config;
|
||||||
this.computationsession=computationsession;
|
this.computationsession = computationsession;
|
||||||
}
|
}
|
||||||
|
|
||||||
public LinkedHashMap<String, Object> createOutput(StatisticalType prioroutput, StatisticalType posterioroutput) throws Exception {
|
public LinkedHashMap<String, Object> createOutput(StatisticalType prioroutput, StatisticalType posterioroutput)
|
||||||
|
throws Exception {
|
||||||
|
|
||||||
LinkedHashMap<String, Object> outputs = new LinkedHashMap<String, Object>();
|
LinkedHashMap<String, Object> outputs = new LinkedHashMap<String, Object>();
|
||||||
|
|
||||||
|
@ -66,9 +67,9 @@ public class OutputsManager {
|
||||||
StatisticalTypeToWPSType postconverter = new StatisticalTypeToWPSType();
|
StatisticalTypeToWPSType postconverter = new StatisticalTypeToWPSType();
|
||||||
postconverter.convert2WPSType(posterioroutput, false, config);
|
postconverter.convert2WPSType(posterioroutput, false, config);
|
||||||
generatedFiles.addAll(postconverter.getGeneratedFiles());
|
generatedFiles.addAll(postconverter.getGeneratedFiles());
|
||||||
LOGGER.debug("Generated Files "+generatedFiles);
|
LOGGER.debug("Generated Files " + generatedFiles);
|
||||||
generatedTables.addAll(postconverter.getGeneratedTables());
|
generatedTables.addAll(postconverter.getGeneratedTables());
|
||||||
LOGGER.debug("Generated Tables "+generatedFiles);
|
LOGGER.debug("Generated Tables " + generatedFiles);
|
||||||
|
|
||||||
LinkedHashMap<String, IOWPSInformation> postOutput = postconverter.outputSet;
|
LinkedHashMap<String, IOWPSInformation> postOutput = postconverter.outputSet;
|
||||||
|
|
||||||
|
@ -102,21 +103,23 @@ public class OutputsManager {
|
||||||
if (ConfigurationManager.useStorage()) {
|
if (ConfigurationManager.useStorage()) {
|
||||||
if (postInfo.getLocalMachineContent() != null) {
|
if (postInfo.getLocalMachineContent() != null) {
|
||||||
// return the url from storage manager
|
// return the url from storage manager
|
||||||
String storageurl = uploadFileOnStorage(postInfo.getLocalMachineContent(), postInfo.getMimetype());
|
String storageurl = uploadFileOnStorage(postInfo.getLocalMachineContent(),
|
||||||
|
postInfo.getMimetype());
|
||||||
postInfo.setContent(storageurl);
|
postInfo.setContent(storageurl);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
/*
|
/*
|
||||||
else if (postInfo.getLocalMachineContent() != null) {
|
* else if (postInfo.getLocalMachineContent() != null) { String
|
||||||
String url = "<wps:Reference mimeType=\""+postInfo.getMimetype()+"\" xlink:href=\""+postInfo.getContent()+"\" method=\"GET\"/>";
|
* url = "<wps:Reference mimeType=\""+postInfo.getMimetype()
|
||||||
LOGGER.debug("Reference URL: " + url);
|
* +"\" xlink:href=\""+postInfo.getContent()
|
||||||
outputs.put(okey, url);
|
* +"\" method=\"GET\"/>"; LOGGER.debug("Reference URL: " +
|
||||||
}
|
* url); outputs.put(okey, url); } else
|
||||||
else*/
|
*/
|
||||||
if (info != null) {
|
if (info != null) {
|
||||||
LOGGER.debug("Found a corresponding output: " + okey);
|
LOGGER.debug("Found a corresponding output: " + okey);
|
||||||
outputs.put(okey, postInfo.getContent());
|
outputs.put(okey, postInfo.getContent());
|
||||||
//add link to the file also among the non deterministic output
|
// add link to the file also among the non deterministic
|
||||||
|
// output
|
||||||
if (postInfo.getLocalMachineContent() != null) {
|
if (postInfo.getLocalMachineContent() != null) {
|
||||||
ndoutput.put(okey, postInfo);
|
ndoutput.put(okey, postInfo);
|
||||||
}
|
}
|
||||||
|
@ -133,19 +136,19 @@ public class OutputsManager {
|
||||||
XmlObject ndxml = generateNonDeterministicOutput(ndoutput);
|
XmlObject ndxml = generateNonDeterministicOutput(ndoutput);
|
||||||
outputs.put("non_deterministic_output", ndxml);
|
outputs.put("non_deterministic_output", ndxml);
|
||||||
|
|
||||||
//safety check for declared output, i.e. a priori output
|
// safety check for declared output, i.e. a priori output
|
||||||
for (String pkey:priorOutput.keySet()){
|
for (String pkey : priorOutput.keySet()) {
|
||||||
if (outputs.get(pkey)==null){
|
if (outputs.get(pkey) == null) {
|
||||||
LOGGER.debug("Safety check: adding empty string for " + pkey+ " of type "+priorOutput.get(pkey).getClassname());
|
LOGGER.debug("Safety check: adding empty string for " + pkey + " of type "
|
||||||
|
+ priorOutput.get(pkey).getClassname());
|
||||||
outputs.put(pkey, "");
|
outputs.put(pkey, "");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
LOGGER.debug("OutputsManager outputs " + outputs);
|
||||||
return outputs;
|
return outputs;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private void saveProvenanceData(IOWPSInformation info) {
|
||||||
private void saveProvenanceData(IOWPSInformation info){
|
|
||||||
String name = info.getName();
|
String name = info.getName();
|
||||||
String id = info.getName();
|
String id = info.getName();
|
||||||
DataProvenance provenance = DataProvenance.COMPUTED;
|
DataProvenance provenance = DataProvenance.COMPUTED;
|
||||||
|
@ -153,59 +156,97 @@ public class OutputsManager {
|
||||||
String operator = config.getAgent();
|
String operator = config.getAgent();
|
||||||
String computationId = computationsession;
|
String computationId = computationsession;
|
||||||
String type = info.getMimetype();
|
String type = info.getMimetype();
|
||||||
/* if (info.getLocalMachineContent() != null) {
|
/*
|
||||||
type = StoredType.DATA;
|
* if (info.getLocalMachineContent() != null) { type = StoredType.DATA;
|
||||||
}
|
* }
|
||||||
*/
|
*/
|
||||||
String payload = info.getContent();
|
String payload = info.getContent();
|
||||||
|
|
||||||
StoredData data = new StoredData(name, info.getAbstractStr(),id, provenance, creationDate, operator, computationId, type,payload,config.getGcubeScope());
|
StoredData data = new StoredData(name, info.getAbstractStr(), id, provenance, creationDate, operator,
|
||||||
|
computationId, type, payload, config.getGcubeScope());
|
||||||
|
|
||||||
provenanceData.add(data);
|
provenanceData.add(data);
|
||||||
}
|
}
|
||||||
|
|
||||||
private void prepareForStoring() {
|
private void prepareForStoring() {
|
||||||
LOGGER.debug("Preparing storage client");
|
LOGGER.debug("Preparing storage client");
|
||||||
//String scope = config.getGcubeScope();
|
// String scope = config.getGcubeScope();
|
||||||
//ScopeProvider.instance.set(scope);
|
// ScopeProvider.instance.set(scope);
|
||||||
String serviceClass = "WPS";
|
String serviceClass = "WPS";
|
||||||
String serviceName = "wps.synch";
|
String serviceName = "wps.synch";
|
||||||
String owner = config.getParam(ConfigurationManager.serviceUserNameParameterVariable);
|
String owner = config.getParam(ConfigurationManager.serviceUserNameParameterVariable);
|
||||||
storageclient = new StorageClient(serviceClass, serviceName, owner, AccessType.SHARED, MemoryType.VOLATILE).getClient();
|
storageclient = new StorageClient(serviceClass, serviceName, owner, AccessType.SHARED, MemoryType.VOLATILE)
|
||||||
|
.getClient();
|
||||||
LOGGER.debug("Storage client ready");
|
LOGGER.debug("Storage client ready");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
private String uploadFileOnStorage(String localfile, String mimetype) throws Exception {
|
private String uploadFileOnStorage(String localfile, String mimetype) throws Exception {
|
||||||
LOGGER.debug("Storing->Start uploading on storage the following file: " + localfile);
|
LOGGER.debug("Storing->Start uploading on storage the following file: " + localfile);
|
||||||
File localFile = new File(localfile);
|
File localFile = new File(localfile);
|
||||||
String remotef = "/wps_synch_output/" +config.getAgent()+"/"+computationsession+"/"+ localFile.getName();
|
String remotef = "/wps_synch_output/" + config.getAgent() + "/" + computationsession + "/"
|
||||||
storageclient.put(true).LFile(localfile).RFile(remotef);
|
+ localFile.getName();
|
||||||
|
String contentType=retrieveContentType(localfile);
|
||||||
String url = storageclient.getHttpUrl().RFile(remotef);
|
LOGGER.debug("Retrieved Content-Type: "+contentType);
|
||||||
|
if(contentType==null||contentType.isEmpty()){
|
||||||
|
storageclient.put(true).LFile(localfile).RFile(remotef);
|
||||||
|
} else {
|
||||||
|
storageclient.put(true,contentType).LFile(localfile).RFile(remotef);
|
||||||
|
}
|
||||||
|
String url = storageclient.getHttpsUrl().RFile(remotef);
|
||||||
|
|
||||||
/*
|
/*
|
||||||
if (config.getGcubeScope().startsWith("/gcube"))
|
* if (config.getGcubeScope().startsWith("/gcube")) url =
|
||||||
url = "http://data-d.d4science.org/uri-resolver/smp?smp-uri=" + url + "&fileName=" + localFile.getName() + "&contentType=" + mimetype;
|
* "http://data-d.d4science.org/uri-resolver/smp?smp-uri=" + url +
|
||||||
else
|
* "&fileName=" + localFile.getName() + "&contentType=" + mimetype; else
|
||||||
url = "http://data.d4science.org/uri-resolver/smp?smp-uri=" + url+ "&fileName=" + localFile.getName() + "&contentType=" + mimetype;
|
* url = "http://data.d4science.org/uri-resolver/smp?smp-uri=" + url+
|
||||||
*/
|
* "&fileName=" + localFile.getName() + "&contentType=" + mimetype;
|
||||||
|
*/
|
||||||
LOGGER.info("Storing->Uploading finished - URL: " + url);
|
LOGGER.info("Storing->Uploading finished - URL: " + url);
|
||||||
return url;
|
return url;
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private String retrieveContentType(String fileName) {
|
||||||
|
String contentType=null;
|
||||||
|
if (fileName != null && !fileName.isEmpty()) {
|
||||||
|
String fileNameLowerCase = fileName.toLowerCase();
|
||||||
|
if (fileNameLowerCase.endsWith(".html") || fileNameLowerCase.endsWith(".htm")) {
|
||||||
|
contentType="text/html";
|
||||||
|
} else {
|
||||||
|
if (fileNameLowerCase.endsWith(".pdf")) {
|
||||||
|
contentType="application/pdf";
|
||||||
|
} else {
|
||||||
|
if (fileNameLowerCase.endsWith(".log") || fileNameLowerCase.endsWith(".txt")) {
|
||||||
|
contentType="text/plain";
|
||||||
|
} else {
|
||||||
|
if (fileNameLowerCase.endsWith(".json")) {
|
||||||
|
contentType="application/json";
|
||||||
|
} else {
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return contentType;
|
||||||
|
}
|
||||||
|
|
||||||
public String cleanTagString(String tag) {
|
public String cleanTagString(String tag) {
|
||||||
return tag.replace(" ", "_").replaceAll("[\\]\\[!\"#$%&'()*+,\\./:;<=>?@\\^`{|}~-]", "");
|
return tag.replace(" ", "_").replaceAll("[\\]\\[!\"#$%&'()*+,\\./:;<=>?@\\^`{|}~-]", "");
|
||||||
}
|
}
|
||||||
|
|
||||||
public XmlObject generateNonDeterministicOutputPlain(LinkedHashMap<String, IOWPSInformation> ndoutput) throws Exception {
|
public XmlObject generateNonDeterministicOutputPlain(LinkedHashMap<String, IOWPSInformation> ndoutput)
|
||||||
String XMLString = "<gml:featureMember xmlns:gml=\"http://www.opengis.net/gml\" xmlns:d4science=\"http://www.d4science.org\">\n" + " <d4science:output fid=\"outputcollection\">\n";
|
throws Exception {
|
||||||
|
String XMLString = "<gml:featureMember xmlns:gml=\"http://www.opengis.net/gml\" xmlns:d4science=\"http://www.d4science.org\">\n"
|
||||||
|
+ " <d4science:output fid=\"outputcollection\">\n";
|
||||||
for (String key : ndoutput.keySet()) {
|
for (String key : ndoutput.keySet()) {
|
||||||
IOWPSInformation info = ndoutput.get(key);
|
IOWPSInformation info = ndoutput.get(key);
|
||||||
String payload = info.getContent();
|
String payload = info.getContent();
|
||||||
String mimetype = info.getMimetype();
|
String mimetype = info.getMimetype();
|
||||||
XMLString += " <d4science:k_" + cleanTagString(key) + ">" + " <d4science:Data><![CDATA[" + payload + "]]></d4science:Data>\n" + " <d4science:Description><![CDATA[" + (info.getAbstractStr() != null ? info.getAbstractStr() : "") + "]]></d4science:Description>\n" + " <d4science:MimeType>" + mimetype + "</d4science:MimeType>\n" + " </d4science:k_" + cleanTagString(key) + ">\n";
|
XMLString += " <d4science:k_" + cleanTagString(key) + ">" + " <d4science:Data><![CDATA["
|
||||||
|
+ payload + "]]></d4science:Data>\n" + " <d4science:Description><![CDATA["
|
||||||
|
+ (info.getAbstractStr() != null ? info.getAbstractStr() : "") + "]]></d4science:Description>\n"
|
||||||
|
+ " <d4science:MimeType>" + mimetype + "</d4science:MimeType>\n" + " </d4science:k_"
|
||||||
|
+ cleanTagString(key) + ">\n";
|
||||||
}
|
}
|
||||||
XMLString += " </d4science:output>\n" + "</gml:featureMember>\n";
|
XMLString += " </d4science:output>\n" + "</gml:featureMember>\n";
|
||||||
|
|
||||||
|
@ -219,16 +260,21 @@ public class OutputsManager {
|
||||||
return xmlData;
|
return xmlData;
|
||||||
}
|
}
|
||||||
|
|
||||||
public XmlObject generateNonDeterministicOutputCollection(LinkedHashMap<String, IOWPSInformation> ndoutput) throws Exception {
|
public XmlObject generateNonDeterministicOutputCollection(LinkedHashMap<String, IOWPSInformation> ndoutput)
|
||||||
String XMLString = "<ogr:FeatureCollection xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemaLocation=\"http://ogr.maptools.org/ result_8751.xsd\" xmlns:ogr=\"http://ogr.maptools.org/\" xmlns:gml=\"http://www.opengis.net/gml\" xmlns:d4science=\"http://www.d4science.org\">" +
|
throws Exception {
|
||||||
"\n<gml:featureMember>\n" + " <ogr:Result fid=\"F0\">\n" +
|
String XMLString = "<ogr:FeatureCollection xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemaLocation=\"http://ogr.maptools.org/ result_8751.xsd\" xmlns:ogr=\"http://ogr.maptools.org/\" xmlns:gml=\"http://www.opengis.net/gml\" xmlns:d4science=\"http://www.d4science.org\">"
|
||||||
" <d4science:output fid=\"outputcollection\">\n";
|
+ "\n<gml:featureMember>\n" + " <ogr:Result fid=\"F0\">\n"
|
||||||
|
+ " <d4science:output fid=\"outputcollection\">\n";
|
||||||
|
|
||||||
for (String key : ndoutput.keySet()) {
|
for (String key : ndoutput.keySet()) {
|
||||||
IOWPSInformation info = ndoutput.get(key);
|
IOWPSInformation info = ndoutput.get(key);
|
||||||
String payload = info.getContent();
|
String payload = info.getContent();
|
||||||
String mimetype = info.getMimetype();
|
String mimetype = info.getMimetype();
|
||||||
XMLString += " <d4science:k_" + cleanTagString(key) + ">" + " <d4science:Data><![CDATA[" + payload + "]]></d4science:Data>\n" + " <d4science:Description><![CDATA[" + (info.getAbstractStr() != null ? info.getAbstractStr() : "") + "]]></d4science:Description>\n" + " <d4science:MimeType>" + mimetype + "</d4science:MimeType>\n" + " </d4science:k_" + cleanTagString(key) + ">\n";
|
XMLString += " <d4science:k_" + cleanTagString(key) + ">" + " <d4science:Data><![CDATA["
|
||||||
|
+ payload + "]]></d4science:Data>\n" + " <d4science:Description><![CDATA["
|
||||||
|
+ (info.getAbstractStr() != null ? info.getAbstractStr() : "") + "]]></d4science:Description>\n"
|
||||||
|
+ " <d4science:MimeType>" + mimetype + "</d4science:MimeType>\n" + " </d4science:k_"
|
||||||
|
+ cleanTagString(key) + ">\n";
|
||||||
}
|
}
|
||||||
XMLString += " </d4science:output>\n" + " </ogr:Result>\n</gml:featureMember>\n</ogr:FeatureCollection>";
|
XMLString += " </d4science:output>\n" + " </ogr:Result>\n</gml:featureMember>\n</ogr:FeatureCollection>";
|
||||||
|
|
||||||
|
@ -244,11 +290,11 @@ public class OutputsManager {
|
||||||
|
|
||||||
public XmlObject generateNonDeterministicOutput(LinkedHashMap<String, IOWPSInformation> ndoutput) throws Exception {
|
public XmlObject generateNonDeterministicOutput(LinkedHashMap<String, IOWPSInformation> ndoutput) throws Exception {
|
||||||
|
|
||||||
if (ndoutput.size()==0)
|
if (ndoutput.size() == 0)
|
||||||
return null;
|
return null;
|
||||||
|
|
||||||
String XMLString = "<ogr:FeatureCollection xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemaLocation=\"http://ogr.maptools.org/ result_8751.xsd\" xmlns:ogr=\"http://ogr.maptools.org/\" xmlns:gml=\"http://www.opengis.net/gml\" xmlns:d4science=\"http://www.d4science.org\">" +
|
String XMLString = "<ogr:FeatureCollection xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemaLocation=\"http://ogr.maptools.org/ result_8751.xsd\" xmlns:ogr=\"http://ogr.maptools.org/\" xmlns:gml=\"http://www.opengis.net/gml\" xmlns:d4science=\"http://www.d4science.org\">"
|
||||||
"\n<gml:featureMember>\n";
|
+ "\n<gml:featureMember>\n";
|
||||||
int count = 0;
|
int count = 0;
|
||||||
for (String key : ndoutput.keySet()) {
|
for (String key : ndoutput.keySet()) {
|
||||||
IOWPSInformation info = ndoutput.get(key);
|
IOWPSInformation info = ndoutput.get(key);
|
||||||
|
@ -256,25 +302,31 @@ public class OutputsManager {
|
||||||
String mimetype = info.getMimetype();
|
String mimetype = info.getMimetype();
|
||||||
String abstractStr = info.getAbstractStr();
|
String abstractStr = info.getAbstractStr();
|
||||||
|
|
||||||
LOGGER.debug("IOWPS Information: " + "name "+info.getName()+","
|
LOGGER.debug("IOWPS Information [name=" + info.getName() + ", abstr=" + info.getAbstractStr() + ", content="
|
||||||
+"abstr "+info.getAbstractStr()+","
|
+ info.getContent() + ", def=" + info.getDefaultVal() + "]");
|
||||||
+"content "+info.getContent()+","
|
|
||||||
+"def "+info.getDefaultVal()+",");
|
|
||||||
|
|
||||||
if ((abstractStr==null || abstractStr.trim().length()==0) && (payload!= null && payload.trim().length()>0))
|
if ((abstractStr == null || abstractStr.trim().length() == 0)
|
||||||
|
&& (payload != null && payload.trim().length() > 0))
|
||||||
abstractStr = info.getName();
|
abstractStr = info.getName();
|
||||||
else if (abstractStr == null)
|
else if (abstractStr == null)
|
||||||
abstractStr = "";
|
abstractStr = "";
|
||||||
|
|
||||||
//geospatialized
|
// geospatialized
|
||||||
// XMLString += " <ogr:Result fid=\"F" + count+ "\">" + "<ogr:geometryProperty><gml:Point><gml:coordinates>0,0</gml:coordinates></gml:Point></ogr:geometryProperty>"+ " <d4science:Data><![CDATA[" + payload + "]]></d4science:Data>\n" + " <d4science:Description><![CDATA[" + (info.getAbstractStr() != null ? info.getAbstractStr() : "") + "]]></d4science:Description>\n" + " <d4science:MimeType>" + mimetype + "</d4science:MimeType>\n" + " </ogr:Result>\n";
|
// XMLString += " <ogr:Result fid=\"F" + count+ "\">" +
|
||||||
XMLString += " <ogr:Result fid=\"F" + count+ "\">" + " <d4science:Data><![CDATA[" + payload + "]]></d4science:Data>\n" + " <d4science:Description><![CDATA[" + abstractStr + "]]></d4science:Description>\n" + " <d4science:MimeType>" + mimetype + "</d4science:MimeType>\n" + " </ogr:Result>\n";
|
// "<ogr:geometryProperty><gml:Point><gml:coordinates>0,0</gml:coordinates></gml:Point></ogr:geometryProperty>"+
|
||||||
|
// " <d4science:Data><![CDATA[" + payload + "]]></d4science:Data>\n"
|
||||||
|
// + " <d4science:Description><![CDATA[" + (info.getAbstractStr() !=
|
||||||
|
// null ? info.getAbstractStr() : "") +
|
||||||
|
// "]]></d4science:Description>\n" + " <d4science:MimeType>" +
|
||||||
|
// mimetype + "</d4science:MimeType>\n" + " </ogr:Result>\n";
|
||||||
|
XMLString += " <ogr:Result fid=\"F" + count + "\">" + " <d4science:Data><![CDATA[" + payload
|
||||||
|
+ "]]></d4science:Data>\n" + " <d4science:Description><![CDATA[" + abstractStr
|
||||||
|
+ "]]></d4science:Description>\n" + " <d4science:MimeType>" + mimetype
|
||||||
|
+ "</d4science:MimeType>\n" + " </ogr:Result>\n";
|
||||||
count++;
|
count++;
|
||||||
}
|
}
|
||||||
XMLString += " </gml:featureMember>\n</ogr:FeatureCollection>";
|
XMLString += " </gml:featureMember>\n</ogr:FeatureCollection>";
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
LOGGER.debug("Non deterministic output: " + XMLString);
|
LOGGER.debug("Non deterministic output: " + XMLString);
|
||||||
|
|
||||||
XmlObject xmlData = XmlObject.Factory.newInstance();
|
XmlObject xmlData = XmlObject.Factory.newInstance();
|
||||||
|
@ -285,10 +337,10 @@ public class OutputsManager {
|
||||||
return xmlData;
|
return xmlData;
|
||||||
}
|
}
|
||||||
|
|
||||||
public void shutdown(){
|
public void shutdown() {
|
||||||
try{
|
try {
|
||||||
storageclient.close();
|
storageclient.close();
|
||||||
}catch(Exception e){
|
} catch (Exception e) {
|
||||||
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -13,16 +13,15 @@ import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.UUID;
|
import java.util.UUID;
|
||||||
|
|
||||||
import org.gcube.common.homelibrary.home.Home;
|
import org.gcube.common.authorization.library.provider.AuthorizationProvider;
|
||||||
import org.gcube.common.homelibrary.home.HomeLibrary;
|
import org.gcube.common.storagehub.client.dsl.FileContainer;
|
||||||
import org.gcube.common.homelibrary.home.HomeManager;
|
import org.gcube.common.storagehub.client.dsl.FolderContainer;
|
||||||
import org.gcube.common.homelibrary.home.HomeManagerFactory;
|
import org.gcube.common.storagehub.client.dsl.ItemContainer;
|
||||||
import org.gcube.common.homelibrary.home.User;
|
import org.gcube.common.storagehub.client.dsl.StorageHubClient;
|
||||||
import org.gcube.common.homelibrary.home.workspace.Workspace;
|
import org.gcube.common.storagehub.model.Metadata;
|
||||||
import org.gcube.common.homelibrary.home.workspace.WorkspaceFolder;
|
import org.gcube.common.storagehub.model.exceptions.ItemLockedException;
|
||||||
import org.gcube.common.homelibrary.home.workspace.WorkspaceItem;
|
import org.gcube.common.storagehub.model.items.GCubeItem;
|
||||||
import org.gcube.common.homelibrary.home.workspace.folder.FolderItem;
|
import org.gcube.common.storagehub.model.items.Item;
|
||||||
import org.gcube.common.homelibrary.util.WorkspaceUtil;
|
|
||||||
import org.gcube.contentmanagement.lexicalmatcher.utils.FileTools;
|
import org.gcube.contentmanagement.lexicalmatcher.utils.FileTools;
|
||||||
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
|
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
|
||||||
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping.AbstractEcologicalEngineMapper;
|
import org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping.AbstractEcologicalEngineMapper;
|
||||||
|
@ -64,12 +63,20 @@ public class DataspaceManager implements Runnable {
|
||||||
public static String operator = "operator_name";
|
public static String operator = "operator_name";
|
||||||
public static String payload = "payload";
|
public static String payload = "payload";
|
||||||
|
|
||||||
public DataspaceManager(AlgorithmConfiguration config, ComputationData computation, List<StoredData> inputData, List<StoredData> outputData, List<File> generatedFiles) {
|
private String statusComputationName;
|
||||||
|
private static final String STATUS_POSTFIX="-STATUS";
|
||||||
|
|
||||||
|
public DataspaceManager(AlgorithmConfiguration config, ComputationData computation, List<StoredData> inputData,
|
||||||
|
List<StoredData> outputData, List<File> generatedFiles) {
|
||||||
this.config = config;
|
this.config = config;
|
||||||
this.computation = computation;
|
this.computation = computation;
|
||||||
this.inputData = inputData;
|
this.inputData = inputData;
|
||||||
this.outputData = outputData;
|
this.outputData = outputData;
|
||||||
this.generatedFiles = generatedFiles;
|
this.generatedFiles = generatedFiles;
|
||||||
|
this.statusComputationName = this.computation.id+STATUS_POSTFIX;
|
||||||
|
|
||||||
|
LOGGER.debug("DataspaceManager [config=" + config + ", computation=" + computation + ", inputData=" + inputData
|
||||||
|
+ ", outputData=" + outputData + ", generatedFiles=" + generatedFiles + "]");
|
||||||
}
|
}
|
||||||
|
|
||||||
public void run() {
|
public void run() {
|
||||||
|
@ -88,78 +95,101 @@ public class DataspaceManager implements Runnable {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public void createFoldersNetwork(Workspace ws, WorkspaceFolder root) throws Exception {
|
public FolderContainer createFoldersNetwork() throws Exception {
|
||||||
|
|
||||||
LOGGER.debug("Dataspace->Creating folders for DataMiner");
|
LOGGER.debug("Dataspace->Creating folders for DataMiner");
|
||||||
|
|
||||||
// manage folders: create the folders network
|
StorageHubClient shc = new StorageHubClient();
|
||||||
if (!ws.exists(dataminerFolder, root.getId())) {
|
|
||||||
LOGGER.debug("Dataspace->Creating DataMiner main folder");
|
|
||||||
root.createFolder(dataminerFolder, "A folder collecting DataMiner experiments data and computation information");
|
|
||||||
((WorkspaceFolder) root.find(dataminerFolder)).setSystemFolder(true);
|
|
||||||
}
|
|
||||||
WorkspaceFolder dataminerFolderWS = (WorkspaceFolder) root.find(dataminerFolder);
|
|
||||||
|
|
||||||
if (!ws.exists(importedDataFolder, dataminerFolderWS.getId())) {
|
FolderContainer root = shc.getWSRoot();
|
||||||
|
|
||||||
|
List<ItemContainer<? extends Item>> dataminerItems = root.findByName(dataminerFolder).getContainers();
|
||||||
|
|
||||||
|
FolderContainer dataminerFolderContainer;
|
||||||
|
|
||||||
|
// manage folders: create the folders network
|
||||||
|
if (dataminerItems.isEmpty()) {
|
||||||
|
LOGGER.debug("Dataspace->Creating DataMiner main folder");
|
||||||
|
dataminerFolderContainer = root.newFolder(dataminerFolder,
|
||||||
|
"A folder collecting DataMiner experiments data and computation information");
|
||||||
|
// ((WorkspaceFolder)
|
||||||
|
// root.find(dataminerFolder)).setSystemFolder(true);
|
||||||
|
} else if (dataminerItems.size() > 1)
|
||||||
|
throw new Exception("found more than one dataminer folder (impossible!!!)");
|
||||||
|
else
|
||||||
|
dataminerFolderContainer = (FolderContainer) dataminerItems.get(0);
|
||||||
|
|
||||||
|
if (dataminerFolderContainer.findByName(importedDataFolder).getContainers().isEmpty()) {
|
||||||
LOGGER.debug("Dataspace->Creating DataMiner imported data folder");
|
LOGGER.debug("Dataspace->Creating DataMiner imported data folder");
|
||||||
dataminerFolderWS.createFolder(importedDataFolder, "A folder collecting DataMiner imported data");
|
dataminerFolderContainer.newFolder(importedDataFolder, "A folder collecting DataMiner imported data");
|
||||||
}
|
}
|
||||||
if (!ws.exists(computedDataFolder, dataminerFolderWS.getId())) {
|
|
||||||
|
if (dataminerFolderContainer.findByName(computedDataFolder).getContainers().isEmpty()) {
|
||||||
LOGGER.debug("Dataspace->Creating DataMiner computed data folder");
|
LOGGER.debug("Dataspace->Creating DataMiner computed data folder");
|
||||||
dataminerFolderWS.createFolder(computedDataFolder, "A folder collecting DataMiner computed data");
|
dataminerFolderContainer.newFolder(computedDataFolder, "A folder collecting DataMiner computed data");
|
||||||
}
|
}
|
||||||
if (!ws.exists(computationsFolder, dataminerFolderWS.getId())) {
|
if (dataminerFolderContainer.findByName(computationsFolder).getContainers().isEmpty()) {
|
||||||
LOGGER.debug("Dataspace->Creating DataMiner computations folder");
|
LOGGER.debug("Dataspace->Creating DataMiner computations folder");
|
||||||
dataminerFolderWS.createFolder(computationsFolder, "A folder collecting DataMiner computations information");
|
dataminerFolderContainer.newFolder(computationsFolder,
|
||||||
|
"A folder collecting DataMiner computations information");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
return dataminerFolderContainer;
|
||||||
}
|
}
|
||||||
public String uploadData(StoredData data, WorkspaceFolder wsFolder) throws Exception {
|
|
||||||
return uploadData(data, wsFolder, true);
|
public String uploadData(StoredData data, FolderContainer destinationFolder) throws Exception {
|
||||||
|
return uploadData(data, destinationFolder, true);
|
||||||
}
|
}
|
||||||
public String uploadData(StoredData data, WorkspaceFolder wsFolder, boolean changename) throws Exception {
|
|
||||||
|
public String uploadData(StoredData data, FolderContainer destinationFolder, boolean changename) throws Exception {
|
||||||
LOGGER.debug("Dataspace->Analysing " + data);
|
LOGGER.debug("Dataspace->Analysing " + data);
|
||||||
// String filenameonwsString = WorkspaceUtil.getUniqueName(data.name, wsFolder);
|
// String filenameonwsString = WorkspaceUtil.getUniqueName(data.name,
|
||||||
String filenameonwsString = data.name ;
|
// wsFolder);
|
||||||
|
String filenameonwsString = data.name;
|
||||||
if (changename)
|
if (changename)
|
||||||
filenameonwsString = data.name + "_[" + data.computationId + "]"+getExtension(data.payload, data.type);// ("_"+UUID.randomUUID()).replace("-", "");
|
filenameonwsString = String.format("%s_(%s)%s", data.name, data.computationId,
|
||||||
|
getExtension(data.payload));
|
||||||
|
|
||||||
InputStream in = null;
|
InputStream in = null;
|
||||||
String url = "";
|
String url = "";
|
||||||
try {
|
try {
|
||||||
long size = 0;
|
//long size = 0;
|
||||||
if (data.type.equals("text/csv")||data.type.equals("application/d4science")||data.type.equals("image/png")) {
|
if (data.type.equals("text/csv") || data.type.equals("application/d4science")
|
||||||
|
|| data.type.equals("image/png")) {
|
||||||
|
|
||||||
if (new File(data.payload).exists() || !data.payload.startsWith("http")) {
|
if (new File(data.payload).exists() || !data.payload.startsWith("http")) {
|
||||||
LOGGER.debug("Dataspace->Uploading file " + data.payload);
|
LOGGER.debug("Dataspace->Uploading file {}", data.payload);
|
||||||
in = new FileInputStream(new File(data.payload));
|
in = new FileInputStream(new File(data.payload));
|
||||||
size = new File(data.payload).length();
|
//size = new File(data.payload).length();
|
||||||
} else {
|
} else {
|
||||||
LOGGER.debug("Dataspace->Uploading via URL " + data.payload);
|
LOGGER.debug("Dataspace->Uploading via URL {}", data.payload);
|
||||||
int tries = 10;
|
int tries = 10;
|
||||||
for (int i=0;i<tries;i++){
|
for (int i = 0; i < tries; i++) {
|
||||||
try {
|
try {
|
||||||
URL urlc = new URL(data.payload);
|
URL urlc = new URL(data.payload);
|
||||||
|
url = urlc.toString();
|
||||||
HttpURLConnection urlConnection = (HttpURLConnection) urlc.openConnection();
|
HttpURLConnection urlConnection = (HttpURLConnection) urlc.openConnection();
|
||||||
urlConnection.setConnectTimeout(10000);
|
urlConnection.setConnectTimeout(10000);
|
||||||
urlConnection.setReadTimeout(10000);
|
urlConnection.setReadTimeout(10000);
|
||||||
in = new BufferedInputStream(urlConnection.getInputStream());
|
in = new BufferedInputStream(urlConnection.getInputStream());
|
||||||
}catch(Exception ee){
|
} catch (Exception ee) {
|
||||||
LOGGER.warn("Dataspace->Retrying connection to {} number {} ",data.payload,(i+1),ee);
|
LOGGER.warn("Dataspace->Retrying connection to {} number {} ", data.payload, (i + 1), ee);
|
||||||
in =null;
|
in = null;
|
||||||
}
|
}
|
||||||
if (in!=null)
|
if (in != null)
|
||||||
break;
|
break;
|
||||||
else
|
else
|
||||||
Thread.sleep(10000);
|
Thread.sleep(10000);
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
if (in==null)
|
if (in == null)
|
||||||
throw new Exception("Impossible to open stream from "+data.payload);
|
throw new Exception("Impossible to open stream from " + data.payload);
|
||||||
|
|
||||||
// LOGGER.debug("Dataspace->final file name on ws " + data.name+" description "+data.description);
|
// LOGGER.debug("Dataspace->final file name on ws " +
|
||||||
|
// data.name+" description "+data.description);
|
||||||
LOGGER.debug("Dataspace->WS OP saving the following file on the WS " + filenameonwsString);
|
LOGGER.debug("Dataspace->WS OP saving the following file on the WS " + filenameonwsString);
|
||||||
LinkedHashMap<String, String> properties = new LinkedHashMap<String, String>();
|
Map<String, Object> properties = new LinkedHashMap<String, Object>();
|
||||||
|
|
||||||
properties.put(computation_id, data.computationId);
|
properties.put(computation_id, data.computationId);
|
||||||
properties.put(hostname, WPSConfig.getInstance().getWPSConfig().getServer().getHostname());
|
properties.put(hostname, WPSConfig.getInstance().getWPSConfig().getServer().getHostname());
|
||||||
|
@ -172,238 +202,230 @@ public class DataspaceManager implements Runnable {
|
||||||
properties.put(data_type, data.type);
|
properties.put(data_type, data.type);
|
||||||
properties.put(payload, url);
|
properties.put(payload, url);
|
||||||
|
|
||||||
FolderItem fileItem = WorkspaceUtil.createExternalFile(wsFolder, filenameonwsString, data.description, in,properties,data.type,size);
|
FileContainer fileContainer = destinationFolder.uploadFile(in, filenameonwsString, data.description);
|
||||||
//fileItem.getProperties().addProperties(properties);
|
fileContainer.setMetadata(new Metadata(properties));
|
||||||
|
|
||||||
LOGGER.debug("Dataspace->WS OP file saved on the WS " + filenameonwsString);
|
LOGGER.debug("Dataspace->WS OP file saved on the WS " + filenameonwsString);
|
||||||
|
|
||||||
url = fileItem.getPublicLink(false);
|
url = fileContainer.getPublicLink().toString();
|
||||||
LOGGER.debug("Dataspace->WS OP url produced for the file " + url);
|
LOGGER.debug("Dataspace->WS OP url produced for the file " + url);
|
||||||
|
|
||||||
data.payload = url;
|
data.payload = url;
|
||||||
try {
|
try {
|
||||||
in.close();
|
in.close();
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
LOGGER.debug("Dataspace->Error creating file " + e.getMessage());
|
LOGGER.debug("Dataspace->Error creating file {}", e.getMessage());
|
||||||
//LOGGER.debug(e);
|
// LOGGER.debug(e);
|
||||||
}
|
}
|
||||||
LOGGER.debug("Dataspace->File created " + filenameonwsString);
|
LOGGER.debug("Dataspace->File created {}", filenameonwsString);
|
||||||
} else {
|
} else {
|
||||||
LOGGER.debug("Dataspace->String parameter " + data.payload);
|
LOGGER.debug("Dataspace->String parameter {}", data.payload);
|
||||||
url = data.payload;
|
url = data.payload;
|
||||||
}
|
}
|
||||||
} catch (Throwable e) {
|
} catch (Throwable e) {
|
||||||
LOGGER.error("Dataspace->Could not retrieve input payload {} ",data.payload,e);
|
LOGGER.error("Dataspace->Could not retrieve input payload {} ", data.payload, e);
|
||||||
//LOGGER.debug(e);
|
// LOGGER.debug(e);
|
||||||
url = "payload was not made available for this dataset";
|
url = "payload was not made available for this dataset";
|
||||||
data.payload = url;
|
data.payload = url;
|
||||||
}
|
}
|
||||||
return url;
|
return url;
|
||||||
}
|
}
|
||||||
|
|
||||||
public List<String> uploadInputData(List<StoredData> inputData, WorkspaceFolder dataminerFolder) throws Exception {
|
public List<String> uploadInputData(List<StoredData> inputData, FolderContainer dataminerFolder) throws Exception {
|
||||||
LOGGER.debug("Dataspace->uploading input data; Number of data: " + inputData.size());
|
LOGGER.debug("Dataspace->uploading input data; Number of data: {}", inputData.size());
|
||||||
WorkspaceItem folderItem = dataminerFolder.find(importedDataFolder);
|
FolderContainer destinationFolder = (FolderContainer) dataminerFolder.findByName(importedDataFolder)
|
||||||
|
.getContainers().get(0);
|
||||||
List<String> urls = new ArrayList<String>();
|
List<String> urls = new ArrayList<String>();
|
||||||
if (folderItem != null && folderItem.isFolder()) {
|
for (StoredData input : inputData) {
|
||||||
WorkspaceFolder destinationFolder = (WorkspaceFolder) folderItem;
|
List<ItemContainer<? extends Item>> items = null;
|
||||||
for (StoredData input : inputData) {
|
|
||||||
WorkspaceItem item = null;
|
|
||||||
|
|
||||||
if (input.type.equals("text/csv")||input.type.equals("application/d4science")||input.type.equals("image/png"))
|
if (input.type.equals("text/csv") || input.type.equals("application/d4science")
|
||||||
item = destinationFolder.find(input.name);
|
|| input.type.equals("image/png"))
|
||||||
|
items = destinationFolder.findByName(input.name).getContainers();
|
||||||
|
|
||||||
if (item==null){
|
if (items == null || items.isEmpty()) {
|
||||||
String url = uploadData(input, destinationFolder,false);
|
String url = uploadData(input, destinationFolder, false);
|
||||||
LOGGER.debug("Dataspace->returning property "+url);
|
LOGGER.debug("Dataspace->returning property {}", url);
|
||||||
urls.add(url);
|
urls.add(url);
|
||||||
}
|
} else {
|
||||||
else{
|
FileContainer item = (FileContainer) items.get(0);
|
||||||
LOGGER.debug("Dataspace->Input item "+input.name+" is already available in the input folder");
|
LOGGER.debug("Dataspace->Input item {} is already available in the input folder", input.name);
|
||||||
String url = item.getPublicLink(false);
|
String url = item.getPublicLink().toString();
|
||||||
LOGGER.debug("Dataspace->returning WS url "+url);
|
LOGGER.debug("Dataspace->returning WS url {}", url);
|
||||||
urls.add(url);
|
urls.add(url);
|
||||||
}
|
|
||||||
}
|
}
|
||||||
} else
|
}
|
||||||
LOGGER.debug("Dataspace->folder is not valid");
|
|
||||||
|
|
||||||
LOGGER.debug("Dataspace->finished uploading input data");
|
LOGGER.debug("Dataspace->finished uploading input data");
|
||||||
return urls;
|
return urls;
|
||||||
}
|
}
|
||||||
|
|
||||||
public List<String> uploadOutputData(List<StoredData> outputData, WorkspaceFolder dataminerFolder) throws Exception {
|
public List<String> uploadOutputData(List<StoredData> outputData, FolderContainer dataminerFolder)
|
||||||
|
throws Exception {
|
||||||
LOGGER.debug("Dataspace->uploading output data; Number of data: " + outputData.size());
|
LOGGER.debug("Dataspace->uploading output data; Number of data: " + outputData.size());
|
||||||
WorkspaceItem folderItem = dataminerFolder.find(computedDataFolder);
|
FolderContainer destinationFolder = (FolderContainer) dataminerFolder.findByName(computedDataFolder)
|
||||||
|
.getContainers().get(0);
|
||||||
List<String> urls = new ArrayList<String>();
|
List<String> urls = new ArrayList<String>();
|
||||||
if (folderItem != null && folderItem.isFolder()) {
|
for (StoredData output : outputData) {
|
||||||
WorkspaceFolder destinationFolder = (WorkspaceFolder) folderItem;
|
String url = uploadData(output, destinationFolder);
|
||||||
for (StoredData output : outputData) {
|
urls.add(url);
|
||||||
String url = uploadData(output, destinationFolder);
|
}
|
||||||
urls.add(url);
|
|
||||||
}
|
|
||||||
} else
|
|
||||||
LOGGER.debug("Dataspace->folder is not valid");
|
|
||||||
LOGGER.debug("Dataspace->finished uploading output data");
|
LOGGER.debug("Dataspace->finished uploading output data");
|
||||||
return urls;
|
return urls;
|
||||||
}
|
}
|
||||||
|
|
||||||
public void uploadComputationData(ComputationData computation, List<StoredData> inputData, List<StoredData> outputData, WorkspaceFolder dataminerFolder, Workspace ws) throws Exception {
|
public void uploadComputationData(ComputationData computation, List<StoredData> inputData,
|
||||||
|
List<StoredData> outputData, FolderContainer dataminerFolder) throws Exception {
|
||||||
LOGGER.debug("Dataspace->uploading computation data");
|
LOGGER.debug("Dataspace->uploading computation data");
|
||||||
WorkspaceItem folderItem = dataminerFolder.find(computationsFolder);
|
FolderContainer computationContainer = (FolderContainer) dataminerFolder.findByName(computationsFolder)
|
||||||
if (folderItem != null && folderItem.isFolder()) {
|
.getContainers().get(0);
|
||||||
// create a folder in here
|
// create a folder in here
|
||||||
LOGGER.debug("Dataspace->Creating computation folder " + computation.id);
|
LOGGER.debug("Dataspace->Creating computation folder " + computation.id);
|
||||||
WorkspaceFolder cfolder = ((WorkspaceFolder) folderItem);
|
String cfoldername = computation.id;
|
||||||
String cfoldername = computation.id;
|
FolderContainer newcomputationFolder = null;
|
||||||
WorkspaceFolder newcomputationFolder = null;
|
try {
|
||||||
try{
|
newcomputationFolder = computationContainer.newFolder(cfoldername, computation.operatorDescription);
|
||||||
newcomputationFolder = cfolder.createFolder(cfoldername, computation.operatorDescription);
|
} catch (java.lang.ClassCastException e) {
|
||||||
}catch(java.lang.ClassCastException e){
|
LOGGER.debug("Dataspace->concurrency exception - deleting remaining item");
|
||||||
LOGGER.debug("Dataspace->concurrency exception - deleting remaining item");
|
deleteRunningComputationData();
|
||||||
deleteRunningComputationData();
|
newcomputationFolder = computationContainer.newFolder(cfoldername, computation.operatorDescription);
|
||||||
newcomputationFolder = cfolder.createFolder(cfoldername, computation.operatorDescription);
|
|
||||||
}
|
|
||||||
//String itemType = "COMPUTATION";
|
|
||||||
|
|
||||||
// create IO folders
|
|
||||||
LOGGER.debug("Dataspace->creating IO folders under " + cfoldername);
|
|
||||||
newcomputationFolder.createFolder(importedDataFolder, importedDataFolder);
|
|
||||||
newcomputationFolder.createFolder(computedDataFolder, computedDataFolder);
|
|
||||||
|
|
||||||
// copy IO in those folders
|
|
||||||
LOGGER.debug("Dataspace->*****uploading inputs in IO folder*****");
|
|
||||||
List<String> inputurls = uploadInputData(inputData, newcomputationFolder);
|
|
||||||
LOGGER.debug("Dataspace->*****uploading outputs in IO folder*****");
|
|
||||||
List<String> outputurls = uploadOutputData(outputData, newcomputationFolder);
|
|
||||||
|
|
||||||
LOGGER.debug("Dataspace->*****adding properties to the folder*****");
|
|
||||||
|
|
||||||
LOGGER.debug("Dataspace->creating Folder Properties");
|
|
||||||
|
|
||||||
// write a computation item for the computation
|
|
||||||
LinkedHashMap<String, String> properties = new LinkedHashMap<String, String>();
|
|
||||||
properties.put(computation_id, computation.id);
|
|
||||||
|
|
||||||
properties.put(hostname, WPSConfig.getInstance().getWPSConfig().getServer().getHostname());
|
|
||||||
|
|
||||||
properties.put(vre, computation.vre);
|
|
||||||
|
|
||||||
properties.put(operator_name, config.getAgent());
|
|
||||||
|
|
||||||
properties.put(operator_id, computation.operatorId);
|
|
||||||
|
|
||||||
properties.put(operator_description, computation.operatorDescription);
|
|
||||||
|
|
||||||
properties.put(start_date, computation.startDate);
|
|
||||||
|
|
||||||
properties.put(end_date, computation.endDate);
|
|
||||||
|
|
||||||
properties.put(status, getStatus(computation.status));
|
|
||||||
|
|
||||||
properties.put(execution_platform, computation.infrastructure);
|
|
||||||
|
|
||||||
int ninput = inputurls.size();
|
|
||||||
int noutput = outputurls.size();
|
|
||||||
|
|
||||||
LOGGER.debug("Dataspace->Adding input properties for " + ninput + " inputs");
|
|
||||||
for (int i = 1; i <= ninput; i++) {
|
|
||||||
StoredData input = inputData.get(i - 1);
|
|
||||||
if (input.payload.contains("|")){
|
|
||||||
String payload = input .payload;
|
|
||||||
LOGGER.debug("Dataspace->Managing complex input "+input.name+" : "+payload);
|
|
||||||
//delete the names that are not useful
|
|
||||||
|
|
||||||
for (StoredData subinput:inputData){
|
|
||||||
if (input.description.equals(subinput.description)){
|
|
||||||
payload = payload.replace(subinput.name,subinput.payload);
|
|
||||||
subinput.name=null;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
input.name = null;
|
|
||||||
|
|
||||||
//delete last pipe character
|
|
||||||
if (payload.endsWith("|"))
|
|
||||||
payload = payload.substring(0,payload.length()-1);
|
|
||||||
LOGGER.debug("Dataspace->Complex input after processing "+payload);
|
|
||||||
properties.put("input" + i + "_" + input.description, payload);
|
|
||||||
input.payload=payload;
|
|
||||||
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
for (int i = 1; i <= ninput; i++) {
|
|
||||||
StoredData input = inputData.get(i - 1);
|
|
||||||
if (input.name!=null){
|
|
||||||
properties.put("input" + i + "_" + input.name, inputurls.get(i - 1));
|
|
||||||
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
LOGGER.debug("Dataspace->Adding output properties for " + noutput + " outputs");
|
|
||||||
for (int i = 1; i <= noutput; i++) {
|
|
||||||
properties.put("output" + i + "_" + outputData.get(i - 1).name, outputurls.get(i - 1));
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
LOGGER.debug("Dataspace->Properties of the folder: " + properties);
|
|
||||||
|
|
||||||
LOGGER.debug("Dataspace->Saving properties to ProvO XML file " + noutput + " outputs");
|
|
||||||
|
|
||||||
/*
|
|
||||||
* XStream xstream = new XStream(); String xmlproperties = xstream.toXML(properties);
|
|
||||||
*/
|
|
||||||
try {
|
|
||||||
String xmlproperties = ProvOGenerator.toProvO(computation, inputData, outputData);
|
|
||||||
|
|
||||||
File xmltosave = new File(config.getPersistencePath(), "prov_o_" + UUID.randomUUID());
|
|
||||||
FileTools.saveString(xmltosave.getAbsolutePath(), xmlproperties, true, "UTF-8");
|
|
||||||
InputStream sis = new FileInputStream(xmltosave);
|
|
||||||
WorkspaceUtil.createExternalFile(newcomputationFolder, computation.id + ".xml", computation.operatorDescription, sis,null,"text/xml",xmltosave.length());
|
|
||||||
sis.close();
|
|
||||||
xmltosave.delete();
|
|
||||||
} catch (Exception e) {
|
|
||||||
LOGGER.error("Dataspace->Failed creating ProvO XML file ",e);
|
|
||||||
}
|
|
||||||
//List<String> scopes = new ArrayList<String>();
|
|
||||||
//scopes.add(config.getGcubeScope());
|
|
||||||
//ws.createGcubeItem(computation.id, computation.operatorDescription, scopes, computation.user, itemType, properties, newcomputationFolder.getId());
|
|
||||||
newcomputationFolder.getProperties().addProperties(properties);
|
|
||||||
}
|
}
|
||||||
|
// String itemType = "COMPUTATION";
|
||||||
|
|
||||||
|
// create IO folders
|
||||||
|
LOGGER.debug("Dataspace->creating IO folders under " + cfoldername);
|
||||||
|
newcomputationFolder.newFolder(importedDataFolder, importedDataFolder);
|
||||||
|
newcomputationFolder.newFolder(computedDataFolder, computedDataFolder);
|
||||||
|
|
||||||
|
// copy IO in those folders
|
||||||
|
LOGGER.debug("Dataspace->*****uploading inputs in IO folder*****");
|
||||||
|
List<String> inputurls = uploadInputData(inputData, newcomputationFolder);
|
||||||
|
LOGGER.debug("Dataspace->*****uploading outputs in IO folder*****");
|
||||||
|
List<String> outputurls = uploadOutputData(outputData, newcomputationFolder);
|
||||||
|
|
||||||
|
LOGGER.debug("Dataspace->*****adding properties to the folder*****");
|
||||||
|
|
||||||
|
LOGGER.debug("Dataspace->creating Folder Properties");
|
||||||
|
|
||||||
|
// write a computation item for the computation
|
||||||
|
Map<String, Object> properties = new LinkedHashMap<String, Object>();
|
||||||
|
properties.put(computation_id, computation.id);
|
||||||
|
|
||||||
|
properties.put(hostname, WPSConfig.getInstance().getWPSConfig().getServer().getHostname());
|
||||||
|
|
||||||
|
properties.put(vre, computation.vre);
|
||||||
|
|
||||||
|
properties.put(operator_name, config.getAgent());
|
||||||
|
|
||||||
|
properties.put(operator_id, computation.operatorId);
|
||||||
|
|
||||||
|
properties.put(operator_description, computation.operatorDescription);
|
||||||
|
|
||||||
|
properties.put(start_date, computation.startDate);
|
||||||
|
|
||||||
|
properties.put(end_date, computation.endDate);
|
||||||
|
|
||||||
|
properties.put(status, getStatus(computation.status));
|
||||||
|
|
||||||
|
properties.put(execution_platform, computation.infrastructure);
|
||||||
|
|
||||||
|
int ninput = inputurls.size();
|
||||||
|
int noutput = outputurls.size();
|
||||||
|
|
||||||
|
LOGGER.debug("Dataspace->Adding input properties for " + ninput + " inputs");
|
||||||
|
for (int i = 1; i <= ninput; i++) {
|
||||||
|
StoredData input = inputData.get(i - 1);
|
||||||
|
if (input.payload.contains("|")) {
|
||||||
|
String payload = input.payload;
|
||||||
|
LOGGER.debug("Dataspace->Managing complex input {} : {}", input.name, payload);
|
||||||
|
// delete the names that are not useful
|
||||||
|
|
||||||
|
for (StoredData subinput : inputData) {
|
||||||
|
if (input.description.equals(subinput.description)) {
|
||||||
|
payload = payload.replace(subinput.name, subinput.payload);
|
||||||
|
subinput.name = null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
input.name = null;
|
||||||
|
|
||||||
|
// delete last pipe character
|
||||||
|
if (payload.endsWith("|"))
|
||||||
|
payload = payload.substring(0, payload.length() - 1);
|
||||||
|
LOGGER.debug("Dataspace->Complex input after processing " + payload);
|
||||||
|
properties.put("input" + i + "_" + input.description, payload);
|
||||||
|
input.payload = payload;
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for (int i = 1; i <= ninput; i++) {
|
||||||
|
StoredData input = inputData.get(i - 1);
|
||||||
|
if (input.name != null) {
|
||||||
|
properties.put(String.format("input%d_%s", i, input.name), inputurls.get(i - 1));
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
LOGGER.debug("Dataspace->Adding output properties for " + noutput + " outputs");
|
||||||
|
for (int i = 1; i <= noutput; i++) {
|
||||||
|
properties.put(String.format("output%d_%s", i, outputData.get(i - 1).name), outputurls.get(i - 1));
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
LOGGER.debug("Dataspace->Properties of the folder: {} ", properties);
|
||||||
|
|
||||||
|
LOGGER.debug("Dataspace->Saving properties to ProvO XML file {} outputs", noutput);
|
||||||
|
|
||||||
|
/*
|
||||||
|
* XStream xstream = new XStream(); String xmlproperties =
|
||||||
|
* xstream.toXML(properties);
|
||||||
|
*/
|
||||||
|
try {
|
||||||
|
String xmlproperties = ProvOGenerator.toProvO(computation, inputData, outputData);
|
||||||
|
|
||||||
|
File xmltosave = new File(config.getPersistencePath(), "prov_o_" + UUID.randomUUID());
|
||||||
|
FileTools.saveString(xmltosave.getAbsolutePath(), xmlproperties, true, "UTF-8");
|
||||||
|
try (InputStream sis = new FileInputStream(xmltosave)) {
|
||||||
|
newcomputationFolder.uploadFile(sis, computation.id + ".xml", computation.operatorDescription);
|
||||||
|
}
|
||||||
|
xmltosave.delete();
|
||||||
|
} catch (Exception e) {
|
||||||
|
LOGGER.error("Dataspace->Failed creating ProvO XML file ", e);
|
||||||
|
}
|
||||||
|
/*
|
||||||
|
* List<String> scopes = new ArrayList<String>();
|
||||||
|
* scopes.add(config.getGcubeScope());
|
||||||
|
* ws.createGcubeItem(computation.id, computation.operatorDescription,
|
||||||
|
* scopes, computation.user, itemType, properties,
|
||||||
|
* newcomputationFolder.getId());
|
||||||
|
*/
|
||||||
|
newcomputationFolder.setMetadata(new Metadata(properties));
|
||||||
|
|
||||||
LOGGER.debug("Dataspace->finished uploading computation data");
|
LOGGER.debug("Dataspace->finished uploading computation data");
|
||||||
}
|
}
|
||||||
|
|
||||||
public String buildCompositePayload(List<StoredData> inputData,String payload, String inputName){
|
public String buildCompositePayload(List<StoredData> inputData, String payload, String inputName) {
|
||||||
|
|
||||||
for (StoredData input:inputData){
|
for (StoredData input : inputData) {
|
||||||
if (inputName.equals(input.description)){
|
if (inputName.equals(input.description)) {
|
||||||
payload = payload.replace(input.name,input.payload);
|
payload = payload.replace(input.name, input.payload);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return payload;
|
return payload;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void writeProvenance(ComputationData computation, List<StoredData> inputData, List<StoredData> outputData)
|
||||||
public void writeProvenance(ComputationData computation, List<StoredData> inputData, List<StoredData> outputData) throws Exception {
|
throws Exception {
|
||||||
LOGGER.debug("Dataspace->connecting to Workspace");
|
LOGGER.debug("Dataspace->connecting to Workspace");
|
||||||
HomeManagerFactory factory = HomeLibrary.getHomeManagerFactory();
|
|
||||||
HomeManager manager = factory.getHomeManager();
|
|
||||||
LOGGER.debug("Dataspace->getting user");
|
|
||||||
User user = manager.createUser(computation.user);
|
|
||||||
Home home = manager.getHome(user);
|
|
||||||
LOGGER.debug("Dataspace->getting root folder");
|
|
||||||
Workspace ws = home.getWorkspace();
|
|
||||||
WorkspaceFolder root = ws.getRoot();
|
|
||||||
LOGGER.debug("Dataspace->create folders network");
|
LOGGER.debug("Dataspace->create folders network");
|
||||||
createFoldersNetwork(ws, root);
|
FolderContainer dataminerFolder = createFoldersNetwork();
|
||||||
WorkspaceFolder dataminerItem = (WorkspaceFolder) root.find(dataminerFolder);
|
|
||||||
LOGGER.debug("Dataspace->****uploading input files****");
|
LOGGER.debug("Dataspace->****uploading input files****");
|
||||||
uploadInputData(inputData, dataminerItem);
|
uploadInputData(inputData, dataminerFolder);
|
||||||
LOGGER.debug("Dataspace->****uploading output files****");
|
LOGGER.debug("Dataspace->****uploading output files****");
|
||||||
uploadOutputData(outputData, dataminerItem);
|
uploadOutputData(outputData, dataminerFolder);
|
||||||
LOGGER.debug("Dataspace->****uploading computation files****");
|
LOGGER.debug("Dataspace->****uploading computation files****");
|
||||||
uploadComputationData(computation, inputData, outputData, dataminerItem, ws);
|
uploadComputationData(computation, inputData, outputData, dataminerFolder);
|
||||||
LOGGER.debug("Dataspace->provenance management finished");
|
LOGGER.debug("Dataspace->provenance management finished");
|
||||||
LOGGER.debug("Dataspace->deleting generated files");
|
LOGGER.debug("Dataspace->deleting generated files");
|
||||||
AbstractEcologicalEngineMapper.deleteGeneratedFiles(generatedFiles);
|
AbstractEcologicalEngineMapper.deleteGeneratedFiles(generatedFiles);
|
||||||
|
@ -414,29 +436,20 @@ public class DataspaceManager implements Runnable {
|
||||||
try {
|
try {
|
||||||
deleteRunningComputationData();
|
deleteRunningComputationData();
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
LOGGER.debug("Dataspace->impossible to delete running computation : {} ",e.getMessage());
|
LOGGER.debug("Dataspace->impossible to delete running computation : {} ", e.getMessage());
|
||||||
}
|
}
|
||||||
// LOGGER.debug("Dataspace->updating computation status");
|
|
||||||
// LOGGER.debug("Dataspace->connecting to Workspace");
|
|
||||||
HomeManagerFactory factory = HomeLibrary.getHomeManagerFactory();
|
|
||||||
HomeManager manager = factory.getHomeManager();
|
|
||||||
// LOGGER.debug("Dataspace->getting user");
|
|
||||||
User user = manager.createUser(computation.user);
|
|
||||||
Home home = manager.getHome(user);
|
|
||||||
// LOGGER.debug("Dataspace->getting root folder");
|
|
||||||
Workspace ws = home.getWorkspace();
|
|
||||||
WorkspaceFolder root = ws.getRoot();
|
|
||||||
// LOGGER.debug("Dataspace->create folders network");
|
// LOGGER.debug("Dataspace->create folders network");
|
||||||
createFoldersNetwork(ws, root);
|
FolderContainer folderContainer = createFoldersNetwork();
|
||||||
WorkspaceFolder dataminerFolderWS = (WorkspaceFolder) root.find(dataminerFolder);
|
|
||||||
WorkspaceItem computationsFolderItem = dataminerFolderWS.find(computationsFolder);
|
FolderContainer computationsContainer = (FolderContainer) folderContainer.findByName(computationsFolder)
|
||||||
// LOGGER.debug("Dataspace->Creating computation item " + computation.id+" with status"+computation.status);
|
.getContainers().get(0);
|
||||||
|
// LOGGER.debug("Dataspace->Creating computation item " +
|
||||||
|
// computation.id+" with status"+computation.status);
|
||||||
String itemType = "COMPUTATION";
|
String itemType = "COMPUTATION";
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
// write a computation item for the computation
|
// write a computation item for the computation
|
||||||
LinkedHashMap<String, String> properties = new LinkedHashMap<String, String>();
|
Map<String, Object> properties = new LinkedHashMap<String, Object>();
|
||||||
properties.put(computation_id, computation.id);
|
properties.put(computation_id, computation.id);
|
||||||
properties.put(hostname, WPSConfig.getInstance().getWPSConfig().getServer().getHostname());
|
properties.put(hostname, WPSConfig.getInstance().getWPSConfig().getServer().getHostname());
|
||||||
properties.put(vre, computation.vre);
|
properties.put(vre, computation.vre);
|
||||||
|
@ -452,24 +465,34 @@ public class DataspaceManager implements Runnable {
|
||||||
|
|
||||||
List<String> scopes = new ArrayList<String>();
|
List<String> scopes = new ArrayList<String>();
|
||||||
scopes.add(config.getGcubeScope());
|
scopes.add(config.getGcubeScope());
|
||||||
ws.createGcubeItem(computation.id, computation.operatorDescription, scopes, computation.user, itemType, properties, computationsFolderItem.getId());
|
|
||||||
|
//TODO: update gcubeItem not recreate it...
|
||||||
|
GCubeItem gcubeItem = new GCubeItem();
|
||||||
|
gcubeItem.setName(this.statusComputationName);
|
||||||
|
gcubeItem.setDescription(computation.operatorDescription);
|
||||||
|
gcubeItem.setScopes(scopes.toArray(new String[scopes.size()]));
|
||||||
|
gcubeItem.setItemType(itemType);
|
||||||
|
gcubeItem.setMetadata(new Metadata(properties));
|
||||||
|
gcubeItem.setCreator(AuthorizationProvider.instance.get().getClient().getId());
|
||||||
|
|
||||||
|
computationsContainer.newGcubeItem(gcubeItem);
|
||||||
|
|
||||||
LOGGER.debug("Dataspace->finished uploading computation data");
|
LOGGER.debug("Dataspace->finished uploading computation data");
|
||||||
}
|
}
|
||||||
|
|
||||||
public String getStatus(String status){
|
public String getStatus(String status) {
|
||||||
double statusD = 0;
|
double statusD = 0;
|
||||||
try{
|
try {
|
||||||
statusD = Double.parseDouble(status);
|
statusD = Double.parseDouble(status);
|
||||||
}catch(Exception e){
|
} catch (Exception e) {
|
||||||
return status;
|
return status;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (statusD==100)
|
if (statusD == 100)
|
||||||
return "completed";
|
return "completed";
|
||||||
else if (statusD==-2)
|
else if (statusD == -2)
|
||||||
return "error";
|
return "error";
|
||||||
else if (statusD==-1)
|
else if (statusD == -1)
|
||||||
return "cancelled";
|
return "cancelled";
|
||||||
else
|
else
|
||||||
return status;
|
return status;
|
||||||
|
@ -479,66 +502,76 @@ public class DataspaceManager implements Runnable {
|
||||||
|
|
||||||
LOGGER.debug("Dataspace->deleting computation item");
|
LOGGER.debug("Dataspace->deleting computation item");
|
||||||
LOGGER.debug("Dataspace->connecting to Workspace");
|
LOGGER.debug("Dataspace->connecting to Workspace");
|
||||||
HomeManagerFactory factory = HomeLibrary.getHomeManagerFactory();
|
StorageHubClient shc = new StorageHubClient();
|
||||||
HomeManager manager = factory.getHomeManager();
|
FolderContainer dataminerContainer = (FolderContainer) shc.getWSRoot().findByName(dataminerFolder)
|
||||||
LOGGER.debug("Dataspace->getting user");
|
.getContainers().get(0);
|
||||||
User user = manager.createUser(computation.user);
|
FolderContainer computationContainer = (FolderContainer) dataminerContainer.findByName(computationsFolder)
|
||||||
Home home = manager.getHome(user);
|
.getContainers().get(0);
|
||||||
LOGGER.debug("Dataspace->getting root folder");
|
|
||||||
Workspace ws = home.getWorkspace();
|
|
||||||
WorkspaceFolder root = ws.getRoot();
|
|
||||||
WorkspaceFolder dataminerFolderWS = (WorkspaceFolder) root.find(dataminerFolder);
|
|
||||||
WorkspaceItem computationsFolderItem = dataminerFolderWS.find(computationsFolder);
|
|
||||||
LOGGER.debug("Dataspace->removing computation data");
|
LOGGER.debug("Dataspace->removing computation data");
|
||||||
WorkspaceFolder computationsFolderWs = ((WorkspaceFolder) computationsFolderItem);
|
|
||||||
WorkspaceItem wi = computationsFolderWs.find(computation.id);
|
|
||||||
if (wi!=null){
|
|
||||||
LOGGER.debug("Dataspace->Found "+computation.id+" under "+computationsFolderWs.getName()+" - removing");
|
|
||||||
wi.remove();
|
|
||||||
}
|
|
||||||
else
|
|
||||||
LOGGER.debug("Dataspace->Warning Could not find "+computation.id+" under "+computationsFolderWs.getName());
|
|
||||||
|
|
||||||
int maxtries = 3;
|
List<ItemContainer<? extends Item>> wi = computationContainer.findByName(this.statusComputationName).getContainers();
|
||||||
int i =1;
|
if (!wi.isEmpty()) {
|
||||||
while (ws.exists(computation.id,computationsFolderWs.getId()) && i<maxtries){
|
for (ItemContainer<? extends Item> container : wi) {
|
||||||
LOGGER.debug("Dataspace->computation data still exist... retrying "+i);
|
boolean retry = false;
|
||||||
Thread.sleep(1000);
|
do {
|
||||||
computationsFolderWs.find(computation.id).remove();
|
try {
|
||||||
i++;
|
container.forceDelete();
|
||||||
}
|
retry = false;
|
||||||
|
}catch (ItemLockedException e) {
|
||||||
|
LOGGER.warn("item locked, retrying");
|
||||||
|
Thread.sleep(1000);
|
||||||
|
retry = true;
|
||||||
|
}
|
||||||
|
}while (retry);
|
||||||
|
}
|
||||||
|
} else
|
||||||
|
LOGGER.debug("Dataspace->Warning Could not find {} under {}", this.statusComputationName,
|
||||||
|
computationContainer.get().getName());
|
||||||
|
|
||||||
LOGGER.debug("Dataspace->finished removing computation data - success "+!ws.exists(computation.id,computationsFolderWs.getId()));
|
|
||||||
|
/*
|
||||||
|
* TODO: ASK GIANPAOLO int maxtries = 3; int i =1; while
|
||||||
|
* (ws.exists(computation.id,computationsFolderWs.getId()) &&
|
||||||
|
* i<maxtries){
|
||||||
|
* LOGGER.debug("Dataspace->computation data still exist... retrying "+i
|
||||||
|
* ); Thread.sleep(1000);
|
||||||
|
* computationsFolderWs.find(computation.id).remove(); i++; }
|
||||||
|
*/
|
||||||
|
|
||||||
|
LOGGER.debug("Dataspace->finished removing computation data ");
|
||||||
}
|
}
|
||||||
|
|
||||||
public static String getExtension(String payload, String type){
|
// TODO
|
||||||
String extension = "";
|
public static String getExtension(String payload) {
|
||||||
if (type.toLowerCase().equals("text/plain")){}
|
LOGGER.debug("DataSpace->Get Extension from: " + payload);
|
||||||
else if (payload.toLowerCase().startsWith("http")){
|
String extension="";
|
||||||
|
if (payload.toLowerCase().startsWith("http")) {
|
||||||
try {
|
try {
|
||||||
URL obj= new URL(payload);
|
URL obj = new URL(payload);
|
||||||
URLConnection conn = obj.openConnection();
|
URLConnection conn = obj.openConnection();
|
||||||
// get all headers
|
// get all headers
|
||||||
Map<String, List<String>> map = conn.getHeaderFields();
|
Map<String, List<String>> map = conn.getHeaderFields();
|
||||||
for (Map.Entry<String, List<String>> entry : map.entrySet()) {
|
for (Map.Entry<String, List<String>> entry : map.entrySet()) {
|
||||||
String value = entry.getValue().toString();
|
String value = entry.getValue().toString();
|
||||||
if (value.toLowerCase().contains("filename=")){
|
LOGGER.debug("Header value: " + value);
|
||||||
System.out.println("DataSpace->Searching in http header: found "+value);
|
if (value.toLowerCase().contains("filename")) {
|
||||||
extension = value.substring(value.lastIndexOf("."),value.lastIndexOf("\""));
|
LOGGER.debug("DataSpace->Searching in http header: found " + value);
|
||||||
|
extension = value.substring(value.lastIndexOf("."), value.lastIndexOf("\""));
|
||||||
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
conn.getInputStream().close();
|
conn.getInputStream().close();
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
System.out.println("DataSpace->Error in the payload http link "+e.getMessage());
|
LOGGER.warn("DataSpace->Error in the payload http link ", e);
|
||||||
}
|
}
|
||||||
}
|
} else {
|
||||||
else {
|
|
||||||
File paylFile = new File(payload);
|
File paylFile = new File(payload);
|
||||||
if (paylFile.exists()){
|
if (paylFile.exists()) {
|
||||||
String paylname = paylFile.getName();
|
String paylname = paylFile.getName();
|
||||||
extension = paylname.substring(paylname.lastIndexOf("."));
|
extension = paylname.substring(paylname.lastIndexOf("."));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
LOGGER.debug("DataSpace->Extension retrieved: " + extension);
|
||||||
return extension;
|
return extension;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -2,7 +2,6 @@ package org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mapping.datasp
|
||||||
|
|
||||||
import java.io.StringReader;
|
import java.io.StringReader;
|
||||||
import java.io.StringWriter;
|
import java.io.StringWriter;
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
import javax.xml.parsers.DocumentBuilder;
|
import javax.xml.parsers.DocumentBuilder;
|
||||||
|
|
|
@ -1,3 +1,3 @@
|
||||||
maxcomputations=4
|
maxcomputations=1
|
||||||
saveond4sstorage=true
|
saveond4sstorage=true
|
||||||
simulationMode=false
|
simulationMode=false
|
|
@ -5,7 +5,7 @@
|
||||||
xsi:schemaLocation="http://www.opengis.net/wps/1.0.0 http://schemas.opengis.net/wps/1.0.0/wpsGetCapabilities_response.xsd"
|
xsi:schemaLocation="http://www.opengis.net/wps/1.0.0 http://schemas.opengis.net/wps/1.0.0/wpsGetCapabilities_response.xsd"
|
||||||
updateSequence="1">
|
updateSequence="1">
|
||||||
<ows:ServiceIdentification>
|
<ows:ServiceIdentification>
|
||||||
<ows:Title>D4Science WPS Service for synchronous executions</ows:Title>
|
<ows:Title>D4Science DataMiner Service</ows:Title>
|
||||||
<ows:Abstract>Service based on the 52°North implementation of WPS 1.0.0</ows:Abstract>
|
<ows:Abstract>Service based on the 52°North implementation of WPS 1.0.0</ows:Abstract>
|
||||||
<ows:Keywords>
|
<ows:Keywords>
|
||||||
<ows:Keyword>WPS</ows:Keyword>
|
<ows:Keyword>WPS</ows:Keyword>
|
||||||
|
@ -19,8 +19,8 @@
|
||||||
<ows:AccessConstraints>NONE</ows:AccessConstraints>
|
<ows:AccessConstraints>NONE</ows:AccessConstraints>
|
||||||
</ows:ServiceIdentification>
|
</ows:ServiceIdentification>
|
||||||
<ows:ServiceProvider>
|
<ows:ServiceProvider>
|
||||||
<ows:ProviderName>National Research Council of Italy</ows:ProviderName>
|
<ows:ProviderName>D4Science</ows:ProviderName>
|
||||||
<ows:ProviderSite xlink:href="www.d4science.org" />
|
<ows:ProviderSite xlink:href="https://www.d4science.org" />
|
||||||
<ows:ServiceContact>
|
<ows:ServiceContact>
|
||||||
<ows:IndividualName>Gianpaolo Coro</ows:IndividualName>
|
<ows:IndividualName>Gianpaolo Coro</ows:IndividualName>
|
||||||
<ows:PositionName>Researcher</ows:PositionName>
|
<ows:PositionName>Researcher</ows:PositionName>
|
||||||
|
@ -34,7 +34,7 @@
|
||||||
<ows:AdministrativeArea>Istituto di Scienza e Tecnologie dell'Informazione A. Faedo</ows:AdministrativeArea>
|
<ows:AdministrativeArea>Istituto di Scienza e Tecnologie dell'Informazione A. Faedo</ows:AdministrativeArea>
|
||||||
<ows:PostalCode>56124</ows:PostalCode>
|
<ows:PostalCode>56124</ows:PostalCode>
|
||||||
<ows:Country>Italy</ows:Country>
|
<ows:Country>Italy</ows:Country>
|
||||||
<ows:ElectronicMailAddress>gianpaolo.coro@isti.cnr.it</ows:ElectronicMailAddress>
|
<ows:ElectronicMailAddress>dataminer-managers@d4science.org</ows:ElectronicMailAddress>
|
||||||
</ows:Address>
|
</ows:Address>
|
||||||
</ows:ContactInfo>
|
</ows:ContactInfo>
|
||||||
</ows:ServiceContact>
|
</ows:ServiceContact>
|
||||||
|
@ -43,31 +43,29 @@
|
||||||
<ows:Operation name="GetCapabilities">
|
<ows:Operation name="GetCapabilities">
|
||||||
<ows:DCP>
|
<ows:DCP>
|
||||||
<ows:HTTP>
|
<ows:HTTP>
|
||||||
<ows:Get xlink:href="http://#HOST#:#PORT#/wps/WebProcessingService?" />
|
<ows:Get xlink:href="#PROTOCOL#://#HOST#:#PORT#/wps/WebProcessingService?" />
|
||||||
<ows:Post xlink:href="http://#HOST#:#PORT#/wps/WebProcessingService?" />
|
<ows:Post xlink:href="#PROTOCOL#://#HOST#:#PORT#/wps/WebProcessingService?" />
|
||||||
</ows:HTTP>
|
</ows:HTTP>
|
||||||
</ows:DCP>
|
</ows:DCP>
|
||||||
</ows:Operation>
|
</ows:Operation>
|
||||||
<ows:Operation name="DescribeProcess">
|
<ows:Operation name="DescribeProcess">
|
||||||
<ows:DCP>
|
<ows:DCP>
|
||||||
<ows:HTTP>
|
<ows:HTTP>
|
||||||
<ows:Get xlink:href="http://#HOST#:#PORT#/wps/WebProcessingService?" />
|
<ows:Get xlink:href="#PROTOCOL#://#HOST#:#PORT#/wps/WebProcessingService?" />
|
||||||
<ows:Post xlink:href="http://#HOST#:#PORT#/wps/WebProcessingService?" />
|
<ows:Post xlink:href="#PROTOCOL#://#HOST#:#PORT#/wps/WebProcessingService?" />
|
||||||
</ows:HTTP>
|
</ows:HTTP>
|
||||||
</ows:DCP>
|
</ows:DCP>
|
||||||
</ows:Operation>
|
</ows:Operation>
|
||||||
<ows:Operation name="Execute">
|
<ows:Operation name="Execute">
|
||||||
<ows:DCP>
|
<ows:DCP>
|
||||||
<ows:HTTP>
|
<ows:HTTP>
|
||||||
<ows:Get xlink:href="http://#HOST#:#PORT#/wps/WebProcessingService?" />
|
<ows:Get xlink:href="#PROTOCOL#://#HOST#:#PORT#/wps/WebProcessingService?" />
|
||||||
<ows:Post xlink:href="http://#HOST#:#PORT#/wps/WebProcessingService?" />
|
<ows:Post xlink:href="#PROTOCOL#://#HOST#:#PORT#/wps/WebProcessingService?" />
|
||||||
</ows:HTTP>
|
</ows:HTTP>
|
||||||
</ows:DCP>
|
</ows:DCP>
|
||||||
</ows:Operation>
|
</ows:Operation>
|
||||||
</ows:OperationsMetadata>
|
</ows:OperationsMetadata>
|
||||||
<wps:ProcessOfferings>
|
<wps:ProcessOfferings>#PROCESSES#</wps:ProcessOfferings>
|
||||||
#PROCESSES#
|
|
||||||
</wps:ProcessOfferings>
|
|
||||||
<wps:Languages>
|
<wps:Languages>
|
||||||
<wps:Default>
|
<wps:Default>
|
||||||
<ows:Language>en-GB</ows:Language>
|
<ows:Language>en-GB</ows:Language>
|
||||||
|
|
|
@ -17,21 +17,21 @@ import org.junit.Test;
|
||||||
|
|
||||||
public class AlgorithmTest {
|
public class AlgorithmTest {
|
||||||
|
|
||||||
List<String> executeOnly = Arrays.asList();
|
List<String> executeOnly = Arrays.asList("#BIONYM", "#AQUAMAPS_SUITABLE", "#AQUAMAPS_SUITABLE 21 sp", "#BIONYM1024","#CMSY2");
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void executeAlgorithmsFromFile() throws Exception{
|
public void executeAlgorithmsFromFile() throws Exception{
|
||||||
|
|
||||||
String env = "dev";
|
String env = "dev";
|
||||||
|
|
||||||
Properties prop = new Properties();
|
/*Properties prop = new Properties();
|
||||||
prop.load(AlgorithmTest.class.getResourceAsStream("/test_params.properties"));
|
prop.load(AlgorithmTest.class.getResourceAsStream("/test_params.properties"));
|
||||||
|
*/
|
||||||
|
|
||||||
String protocol = "http";
|
String protocol = "http";
|
||||||
String hostname = prop.getProperty(env+".host");
|
String hostname = "dataminer-genericworkers.d4science.org";
|
||||||
String token = prop.getProperty(env+".token");
|
String token = "257800d8-24bf-4bae-83cd-ea99369e7dd6-843339462";
|
||||||
String layerID = prop.getProperty(env+".layer");
|
String layerID = "08ee0d70-4d8b-4f42-8b06-d709482bca95";
|
||||||
|
|
||||||
Iterator<String> uris = getUrisIterator();
|
Iterator<String> uris = getUrisIterator();
|
||||||
|
|
||||||
|
@ -43,7 +43,7 @@ public class AlgorithmTest {
|
||||||
if (nextLine.startsWith("#"))
|
if (nextLine.startsWith("#"))
|
||||||
algorithmName = nextLine;
|
algorithmName = nextLine;
|
||||||
else{
|
else{
|
||||||
if (!(executeOnly.isEmpty() || executeOnly.contains(algorithmName))) continue;
|
if (executeOnly.contains(algorithmName)) continue;
|
||||||
String callUrl = nextLine.replace("{PROTOCOL}", protocol).replace("{HOST}", hostname).replace("{TOKEN}", token).replace("{LAYERID}", layerID);
|
String callUrl = nextLine.replace("{PROTOCOL}", protocol).replace("{HOST}", hostname).replace("{TOKEN}", token).replace("{LAYERID}", layerID);
|
||||||
try{
|
try{
|
||||||
long start = System.currentTimeMillis();
|
long start = System.currentTimeMillis();
|
||||||
|
|
Loading…
Reference in New Issue