From c964cc69ad6f5b42c2f84668f0ee0260026e169f Mon Sep 17 00:00:00 2001 From: Giancarlo Panichi Date: Mon, 2 May 2016 17:58:14 +0000 Subject: [PATCH] 2521: Explore the possibility to port the StatMan interface onto Dataminer https://support.d4science.org/issues/2521 Updated TableListParameter Support git-svn-id: https://svn.d4science.research-infrastructures.eu/gcube/trunk/portlets/user/data-miner-manager@128416 82a268e6-3cf1-43bd-a215-b396298e98cf --- log.txt | 771 ++++++++++++++++++ .../client/DataMinerManagerController.java | 1 - .../client/events/ComputationReadyEvent.java | 20 +- ...StartComputationExecutionRequestEvent.java | 26 +- .../ComputationExecutionPanel.java | 40 +- .../client/experiments/ComputationPanel.java | 5 +- .../ComputationParametersPanel.java | 22 +- .../client/experiments/WorkflowPanel.java | 47 +- .../client/parametersfield/BooleanFld.java | 2 +- .../client/parametersfield/ColumnFld.java | 2 +- .../client/parametersfield/ColumnListFld.java | 2 +- .../client/parametersfield/DoubleFld.java | 2 +- .../client/parametersfield/EnumFld.java | 2 +- .../client/parametersfield/FloatFld.java | 2 +- .../client/parametersfield/IntFld.java | 2 +- .../client/parametersfield/ListIntFld.java | 2 +- .../parametersfield/OperatorFieldWidget.java | 28 +- .../client/parametersfield/StringFld.java | 2 +- .../parametersfield/TabularListFld.java | 399 +++++++-- .../client/rpc/DataMinerPortletService.java | 3 +- .../rpc/DataMinerPortletServiceAsync.java | 4 +- .../server/DataMinerManagerServiceImpl.java | 10 +- .../server/smservice/SClient.java | 4 +- .../server/smservice/SClient4WPS.java | 9 +- .../server/smservice/wps/WPS2SM.java | 67 +- .../dataminermanager/shared/Constants.java | 13 +- .../dataminermanager/dataminermanager.gwt.xml | 8 +- 27 files changed, 1240 insertions(+), 255 deletions(-) create mode 100644 log.txt diff --git a/log.txt b/log.txt new file mode 100644 index 0000000..4e7bbc4 --- /dev/null +++ b/log.txt @@ -0,0 +1,771 @@ +0 [qtp684204552-34] DEBUG org.gcube.application.framework.accesslogger.library.impl.AccessLogger - Creating a message handling object in order to handle the message queue +1 [qtp684204552-34] DEBUG org.gcube.application.framework.accesslogger.library.impl.AccessLogger - Constructing a new access logger. Create a new file if it does not exist for the current date +86 [qtp684204552-34] INFO org.gcube.portlets.user.dataminermanager.server.DataMinerManagerServiceImpl - DataMinerManager started! +128 [qtp684204552-34] INFO org.gcube.portlets.user.dataminermanager.server.util.SessionUtil - no user found in session, use test user +152 [qtp684204552-34] DEBUG org.gcube.application.framework.core.session.ASLSession - Scope is null, returning null +152 [qtp684204552-34] DEBUG org.gcube.application.framework.core.session.ASLSession - Getting security token: null in thread 34 +153 [qtp684204552-34] INFO org.gcube.application.framework.core.session.ASLSession - The scope about to set is: /gcube/devsec/devVRE +154 [qtp684204552-34] INFO org.gcube.common.scope.impl.ScopeProviderScanner - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@40e8d656 +156 [qtp684204552-34] DEBUG org.gcube.common.scope.impl.DefaultScopeProvider - setting scope /gcube/devsec/devVRE in thread 34 +156 [qtp684204552-34] INFO org.gcube.application.framework.core.session.ASLSession - Logging the entrance +157 [qtp684204552-34] DEBUG org.gcube.application.framework.accesslogger.model.TemplateModel - A new entry line has been created. The entry is: +157 [qtp684204552-34] DEBUG org.gcube.application.framework.accesslogger.model.TemplateModel - 2016-05-02 10:16:14, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +157 [qtp684204552-34] INFO org.gcube.portlets.user.dataminermanager.server.util.SessionUtil - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +157 [qtp684204552-34] DEBUG org.gcube.portlets.user.dataminermanager.server.DataMinerManagerServiceImpl - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +3229 [qtp684204552-33] INFO org.gcube.portlets.user.dataminermanager.server.util.SessionUtil - no user found in session, use test user +3229 [qtp684204552-33] DEBUG org.gcube.application.framework.core.session.ASLSession - Scope is null, returning null +3229 [qtp684204552-33] DEBUG org.gcube.application.framework.core.session.ASLSession - Getting security token: null in thread 33 +3229 [qtp684204552-33] INFO org.gcube.application.framework.core.session.ASLSession - The scope about to set is: /gcube/devsec/devVRE +3230 [qtp684204552-33] DEBUG org.gcube.common.scope.impl.DefaultScopeProvider - setting scope /gcube/devsec/devVRE in thread 33 +3230 [qtp684204552-33] INFO org.gcube.application.framework.core.session.ASLSession - Logging the entrance +3230 [qtp684204552-33] DEBUG org.gcube.application.framework.accesslogger.model.TemplateModel - A new entry line has been created. The entry is: +3230 [qtp684204552-33] DEBUG org.gcube.application.framework.accesslogger.model.TemplateModel - 2016-05-02 10:16:17, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +3230 [qtp684204552-33] INFO org.gcube.portlets.user.dataminermanager.server.util.SessionUtil - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +3376 [qtp684204552-33] INFO org.gcube.common.clients.delegates.DiscoveryDelegate - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +3509 [qtp684204552-33] INFO org.gcube.common.scan.DefaultScanner - matched 28 resources from 112 urls in 117 ms +3688 [qtp684204552-33] INFO org.gcube.common.scope.impl.ServiceMapScanner - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +3692 [qtp684204552-33] INFO org.gcube.common.scope.impl.ServiceMapScanner - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +3692 [qtp684204552-33] INFO org.gcube.common.scope.impl.ServiceMapScanner - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +3693 [qtp684204552-33] INFO org.gcube.common.scope.impl.ServiceMapScanner - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +3693 [qtp684204552-33] INFO org.gcube.common.scope.impl.ServiceMapScanner - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +3694 [qtp684204552-33] INFO org.gcube.common.scope.impl.ServiceMapScanner - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +3694 [qtp684204552-33] INFO org.gcube.common.scope.impl.ServiceMapScanner - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +3694 [qtp684204552-33] INFO org.gcube.common.scope.impl.ServiceMapScanner - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +3695 [qtp684204552-33] INFO org.gcube.common.scope.impl.ServiceMapScanner - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +3696 [qtp684204552-33] INFO org.gcube.common.scope.impl.ServiceMapScanner - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +3697 [qtp684204552-33] INFO org.gcube.common.scope.impl.ServiceMapScanner - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +3697 [qtp684204552-33] INFO org.gcube.common.scope.impl.ServiceMapScanner - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +3698 [qtp684204552-33] INFO org.gcube.common.scope.impl.ServiceMapScanner - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +3698 [qtp684204552-33] INFO org.gcube.common.scope.impl.ServiceMapScanner - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +3869 [qtp684204552-33] INFO org.gcube.common.clients.stubs.jaxws.StubFactory - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +4038 [qtp684204552-33] INFO org.gcube.common.clients.stubs.jaxws.StubCache - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +4168 [qtp684204552-33] INFO org.gcube.resources.discovery.icclient.ICClient - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +4320 [qtp684204552-33] INFO org.gcube.common.clients.stubs.jaxws.handlers.HandlerRegistry - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@2d2fff7 +4321 [qtp684204552-33] INFO org.gcube.common.clients.stubs.jaxws.handlers.HandlerRegistry - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@7356a1b2 +4322 [qtp684204552-33] INFO org.gcube.common.clients.stubs.jaxws.handlers.HandlerRegistry - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@4b6bbfbd +4322 [qtp684204552-33] INFO org.gcube.common.clients.stubs.jaxws.handlers.HandlerRegistry - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@1781f0d5 +4496 [qtp684204552-33] INFO org.gcube.resources.discovery.icclient.ICClient - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 328 ms +4674 [qtp684204552-33] INFO org.gcube.common.clients.delegates.DiscoveryDelegate - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+4732 [qtp684204552-33] DEBUG org.gcube.common.clients.cache.DefaultEndpointCache - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +4733 [qtp684204552-33] INFO org.gcube.portlets.user.dataminermanager.server.util.SessionUtil - received token: f0666597-4302-49ce-bea2-555b94e569cb +4735 [qtp684204552-33] DEBUG org.gcube.portlets.user.dataminermanager.server.smservice.SClient4WPSBuilder - Build SM4WPS +4735 [qtp684204552-33] DEBUG org.gcube.portlets.user.dataminermanager.server.smservice.SClient4WPSBuilder - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +4738 [qtp684204552-33] DEBUG org.gcube.common.scope.impl.DefaultScopeProvider - setting scope /gcube/devsec/devVRE in thread 33 +4749 [qtp684204552-33] INFO org.gcube.common.clients.stubs.jaxws.StubCache - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +4750 [qtp684204552-33] INFO org.gcube.resources.discovery.icclient.ICClient - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +4769 [qtp684204552-33] INFO org.gcube.resources.discovery.icclient.ICClient - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 18 ms +4772 [qtp684204552-33] DEBUG org.gcube.portlets.user.dataminermanager.server.smservice.SClient4WPS - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +4772 [qtp684204552-33] INFO org.gcube.portlets.user.dataminermanager.server.smservice.SClient4WPS - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +4773 [qtp684204552-33] DEBUG org.gcube.portlets.user.dataminermanager.server.smservice.SClient4WPS - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +4787 [qtp684204552-33] INFO org.gcube.portlets.user.dataminermanager.server.smservice.wps.StatWPSClientSession - CONNECT +4788 [qtp684204552-32] DEBUG org.gcube.application.framework.core.session.ASLSession - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +4805 [qtp684204552-32] DEBUG org.gcube.application.framework.core.session.ASLSession - Could not parse file properties.xml for property. Setting it to default. +4805 [qtp684204552-32] INFO org.gcube.application.framework.core.session.ASLSession - Session Timeout is: 1800000 +4805 [qtp684204552-32] DEBUG org.gcube.common.scope.impl.DefaultScopeProvider - setting scope /gcube/devsec/devVRE in thread 32 +4805 [qtp684204552-32] DEBUG org.gcube.application.framework.core.session.ASLSession - Getting security token: null in thread 32 +4805 [qtp684204552-32] INFO org.gcube.portlets.user.dataminermanager.server.util.SessionUtil - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +4805 [qtp684204552-32] DEBUG org.gcube.portlets.user.dataminermanager.server.DataMinerManagerServiceImpl - getDataMinerWorkArea() +4809 [qtp684204552-32] INFO HomeManageFactory - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +4809 [qtp684204552-32] INFO HomeManageFactory - getHomeManagerFactory +5047 [qtp684204552-32] INFO org.gcube.common.homelibrary.util.config.HomeLibraryConfiguration - calculating the persistence folder +5047 [qtp684204552-32] INFO org.gcube.common.homelibrary.util.config.HomeLibraryConfiguration - Using tmp dir /tmp +5047 [qtp684204552-32] INFO HomeManageFactory - getInstance persistenceRoot: /tmp/home_library_persistence +5076 [qtp684204552-32] DEBUG org.gcube.common.homelibrary.jcr.JCRHomeManagerFactory - Initialize content manager +5080 [qtp684204552-32] DEBUG org.gcube.common.homelibrary.jcr.repository.JCRRepository - Initialize repository +5080 [qtp684204552-32] DEBUG org.gcube.common.homelibrary.jcr.repository.JCRRepository - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +5080 [qtp684204552-32] DEBUG org.gcube.common.scope.impl.DefaultScopeProvider - setting scope /gcube in thread 32 +5150 [qtp684204552-32] INFO org.gcube.common.clients.stubs.jaxws.StubCache - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +5153 [qtp684204552-32] INFO org.gcube.resources.discovery.icclient.ICClient - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +5176 [qtp684204552-32] INFO org.gcube.resources.discovery.icclient.ICClient - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 23 ms +5228 [qtp684204552-32] ERROR org.gcube.common.homelibrary.jcr.repository.JCRRepository - error decrypting resource +java.security.InvalidKeyException: Unable to load the Key gcube.gcubekey from the classpath + at org.gcube.common.encryption.SymmetricKey.load(SymmetricKey.java:66) + at org.gcube.common.encryption.SymmetricKey.load(SymmetricKey.java:53) + at org.gcube.common.encryption.SymmetricKey.getKey(SymmetricKey.java:44) + at org.gcube.common.encryption.StringEncrypter.decrypt(StringEncrypter.java:58) + at org.gcube.common.homelibrary.jcr.repository.JCRRepository.initializeRepository(JCRRepository.java:119) + at org.gcube.common.homelibrary.jcr.repository.JCRRepository.initialize(JCRRepository.java:272) + at org.gcube.common.homelibrary.jcr.JCRHomeManagerFactory.initialize(JCRHomeManagerFactory.java:46) + at org.gcube.common.homelibrary.home.HomeLibrary.createHomeManagerFactoryInstance(HomeLibrary.java:91) + at org.gcube.common.homelibrary.home.HomeLibrary.getHomeManagerFactory(HomeLibrary.java:106) + at org.gcube.common.homelibrary.home.HomeLibrary.getHomeManagerFactory(HomeLibrary.java:124) + at org.gcube.common.homelibrary.home.HomeLibrary.getUserHome(HomeLibrary.java:198) + at org.gcube.common.homelibrary.home.HomeLibrary.getUserWorkspace(HomeLibrary.java:143) + at org.gcube.portlets.user.dataminermanager.server.storage.StorageUtil.getItemInRootFolderOnWorkspace(StorageUtil.java:214) + at org.gcube.portlets.user.dataminermanager.server.util.DataMinerWorkAreaManager.getDataMinerWorkArea(DataMinerWorkAreaManager.java:40) + at org.gcube.portlets.user.dataminermanager.server.DataMinerManagerServiceImpl.getDataMinerWorkArea(DataMinerManagerServiceImpl.java:394) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:606) + at com.google.gwt.user.server.rpc.RPC.invokeAndEncodeResponse(RPC.java:561) + at com.google.gwt.user.server.rpc.RemoteServiceServlet.processCall(RemoteServiceServlet.java:265) + at com.google.gwt.user.server.rpc.RemoteServiceServlet.processPost(RemoteServiceServlet.java:305) + at com.google.gwt.user.server.rpc.AbstractRemoteServiceServlet.doPost(AbstractRemoteServiceServlet.java:62) + at javax.servlet.http.HttpServlet.service(HttpServlet.java:755) + at javax.servlet.http.HttpServlet.service(HttpServlet.java:848) + at org.eclipse.jetty.servlet.ServletHolder.handle(ServletHolder.java:686) + at org.eclipse.jetty.servlet.ServletHandler.doHandle(ServletHandler.java:501) + at org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:137) + at org.eclipse.jetty.security.SecurityHandler.handle(SecurityHandler.java:557) + at org.eclipse.jetty.server.session.SessionHandler.doHandle(SessionHandler.java:231) + at org.eclipse.jetty.server.handler.ContextHandler.doHandle(ContextHandler.java:1086) + at org.eclipse.jetty.servlet.ServletHandler.doScope(ServletHandler.java:428) + at org.eclipse.jetty.server.session.SessionHandler.doScope(SessionHandler.java:193) + at org.eclipse.jetty.server.handler.ContextHandler.doScope(ContextHandler.java:1020) + at org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:135) + at org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:116) + at org.eclipse.jetty.server.handler.RequestLogHandler.handle(RequestLogHandler.java:68) + at org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:116) + at org.eclipse.jetty.server.Server.handle(Server.java:370) + at org.eclipse.jetty.server.AbstractHttpConnection.handleRequest(AbstractHttpConnection.java:489) + at org.eclipse.jetty.server.AbstractHttpConnection.content(AbstractHttpConnection.java:960) + at org.eclipse.jetty.server.AbstractHttpConnection$RequestHandler.content(AbstractHttpConnection.java:1021) + at org.eclipse.jetty.http.HttpParser.parseNext(HttpParser.java:865) + at org.eclipse.jetty.http.HttpParser.parseAvailable(HttpParser.java:240) + at org.eclipse.jetty.server.AsyncHttpConnection.handle(AsyncHttpConnection.java:82) + at org.eclipse.jetty.io.nio.SelectChannelEndPoint.handle(SelectChannelEndPoint.java:668) + at org.eclipse.jetty.io.nio.SelectChannelEndPoint$1.run(SelectChannelEndPoint.java:52) + at org.eclipse.jetty.util.thread.QueuedThreadPool.runJob(QueuedThreadPool.java:608) + at org.eclipse.jetty.util.thread.QueuedThreadPool$3.run(QueuedThreadPool.java:543) + at java.lang.Thread.run(Thread.java:745) +5229 [qtp684204552-32] DEBUG org.gcube.common.scope.impl.DefaultScopeProvider - setting scope /gcube/devsec/devVRE in thread 32 +5231 [qtp684204552-32] DEBUG org.gcube.portlets.user.dataminermanager.server.util.DataMinerWorkAreaManager - DataMiner Folder is set to null +6644 [qtp684204552-33] INFO org.gcube.portlets.user.dataminermanager.server.smservice.wps.StatWPSClientSession - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +6651 [qtp684204552-33] DEBUG org.gcube.portlets.user.dataminermanager.server.smservice.SClient4WPS - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CLIMATE, name=Climate, briefDescription=CLIMATE, description=CLIMATE, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIOCLIMATE_HCAF, name=Bioclimate Hcaf, briefDescription=A transducer algorithm that generates an Half-degree Cells Authority File (HCAF) dataset for a certain time frame, with environmental parameters used by the AquaMaps approach. Evaluates the climatic changes impact on the variation of the ocean features contained in HCAF tables, description=A transducer algorithm that generates an Half-degree Cells Authority File (HCAF) dataset for a certain time frame, with environmental parameters used by the AquaMaps approach. Evaluates the climatic changes impact on the variation of the ocean features contained in HCAF tables, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIOCLIMATE_HSPEN, name=Bioclimate Hspen, briefDescription=A transducer algorithm that generates a table containing species envelops (HSPEN) in time, i.e. models capturing species tolerance with respect to environmental parameters, used by the AquaMaps approach. Evaluates the climatic changes impact on the variation of the salinity values in several ranges of a set of species envelopes, description=A transducer algorithm that generates a table containing species envelops (HSPEN) in time, i.e. models capturing species tolerance with respect to environmental parameters, used by the AquaMaps approach. Evaluates the climatic changes impact on the variation of the salinity values in several ranges of a set of species envelopes, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIOCLIMATE_HCAF, name=Bioclimate Hcaf, briefDescription=A transducer algorithm that generates an Half-degree Cells Authority File (HCAF) dataset for a certain time frame, with environmental parameters used by the AquaMaps approach. Evaluates the climatic changes impact on the variation of the ocean features contained in HCAF tables, description=A transducer algorithm that generates an Half-degree Cells Authority File (HCAF) dataset for a certain time frame, with environmental parameters used by the AquaMaps approach. Evaluates the climatic changes impact on the variation of the ocean features contained in HCAF tables, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIOCLIMATE_HSPEN, name=Bioclimate Hspen, briefDescription=A transducer algorithm that generates a table containing species envelops (HSPEN) in time, i.e. models capturing species tolerance with respect to environmental parameters, used by the AquaMaps approach. Evaluates the climatic changes impact on the variation of the salinity values in several ranges of a set of species envelopes, description=A transducer algorithm that generates a table containing species envelops (HSPEN) in time, i.e. models capturing species tolerance with respect to environmental parameters, used by the AquaMaps approach. Evaluates the climatic changes impact on the variation of the salinity values in several ranges of a set of species envelopes, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +55281 [qtp684204552-33] DEBUG org.gcube.application.framework.core.session.ASLSession - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +55281 [qtp684204552-33] DEBUG org.gcube.application.framework.core.session.ASLSession - Could not parse file properties.xml for property. Setting it to default. +55281 [qtp684204552-33] INFO org.gcube.application.framework.core.session.ASLSession - Session Timeout is: 1800000 +55281 [qtp684204552-33] DEBUG org.gcube.common.scope.impl.DefaultScopeProvider - setting scope /gcube/devsec/devVRE in thread 33 +55281 [qtp684204552-33] DEBUG org.gcube.application.framework.core.session.ASLSession - Getting security token: null in thread 33 +101246 [qtp684204552-31] DEBUG org.gcube.common.scope.impl.DefaultScopeProvider - setting scope /gcube/devsec/devVRE in thread 31 +101247 [qtp684204552-31] DEBUG org.gcube.application.framework.core.session.ASLSession - Getting security token: null in thread 31 +101247 [qtp684204552-31] INFO org.gcube.portlets.user.dataminermanager.server.util.SessionUtil - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +101247 [qtp684204552-31] INFO org.gcube.portlets.user.dataminermanager.server.smservice.SClient4WPS - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIOCLIMATE_HCAF +101247 [qtp684204552-31] DEBUG org.gcube.portlets.user.dataminermanager.server.smservice.SClient4WPS - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +101247 [qtp684204552-31] INFO org.gcube.portlets.user.dataminermanager.server.smservice.wps.StatWPSClientSession - CONNECT +102480 [qtp684204552-31] DEBUG org.gcube.portlets.user.dataminermanager.server.smservice.SClient4WPS - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIOCLIMATE_HCAF + BIOCLIMATE_HCAF + A transducer algorithm that generates an Half-degree Cells Authority File (HCAF) dataset for a certain time frame, with environmental parameters used by the AquaMaps approach. Evaluates the climatic changes impact on the variation of the ocean features contained in HCAF tables + + + HCAF_Table_List + list of HCAF tables to analyze [a sequence of http links separated by | , each indicating a table in UTF-8 encoding following this template: (HCAF) http://goo.gl/SZG9uM] + Name of the parameter: HCAF_Table_List. list of HCAF tables to analyze [a sequence of http links separated by | , each indicating a table in UTF-8 encoding following this template: (HCAF) http://goo.gl/SZG9uM] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + HCAF_Table_Names + list of HCAF table names to be used as labels [a sequence of values separated by | ] (format: String) + Name of the parameter: HCAF_Table_Names. list of HCAF table names to be used as labels [a sequence of values separated by | ] (format: String) + + + + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +102484 [qtp684204552-31] DEBUG org.gcube.portlets.user.dataminermanager.server.smservice.SClient4WPS - WPSClient->Fetching Inputs +102485 [qtp684204552-31] DEBUG org.gcube.portlets.user.dataminermanager.server.smservice.SClient4WPS - WPSClient->Input: + HCAF_Table_List + list of HCAF tables to analyze [a sequence of http links separated by | , each indicating a table in UTF-8 encoding following this template: (HCAF) http://goo.gl/SZG9uM] + Name of the parameter: HCAF_Table_List. list of HCAF tables to analyze [a sequence of http links separated by | , each indicating a table in UTF-8 encoding following this template: (HCAF) http://goo.gl/SZG9uM] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +102485 [qtp684204552-31] DEBUG org.gcube.portlets.user.dataminermanager.server.smservice.SClient4WPS - WPSClient->Input: + HCAF_Table_Names + list of HCAF table names to be used as labels [a sequence of values separated by | ] (format: String) + Name of the parameter: HCAF_Table_Names. list of HCAF table names to be used as labels [a sequence of values separated by | ] (format: String) + + + + + +102488 [qtp684204552-31] DEBUG org.gcube.portlets.user.dataminermanager.server.smservice.SClient4WPS - WPSClient->Fetching Outputs +102489 [qtp684204552-31] DEBUG org.gcube.portlets.user.dataminermanager.server.smservice.SClient4WPS - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +102489 [qtp684204552-31] INFO org.gcube.portlets.user.dataminermanager.server.smservice.wps.StatWPSClientSession - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +102492 [qtp684204552-31] DEBUG org.gcube.portlets.user.dataminermanager.server.smservice.wps.WPS2SM - Conversion to SM Type->HCAF_Table_List is a Complex Input +102494 [qtp684204552-31] DEBUG org.gcube.portlets.user.dataminermanager.server.smservice.wps.WPS2SM - Max Megabytes: 1 +102497 [qtp684204552-31] DEBUG org.gcube.portlets.user.dataminermanager.server.smservice.wps.WPS2SM - MimeType: text/xml +102498 [qtp684204552-31] DEBUG org.gcube.portlets.user.dataminermanager.server.smservice.wps.WPS2SM - Schema: null +102498 [qtp684204552-31] DEBUG org.gcube.portlets.user.dataminermanager.server.smservice.wps.WPS2SM - Encoding: null +102498 [qtp684204552-31] DEBUG org.gcube.portlets.user.dataminermanager.server.smservice.wps.WPS2SM - Conversion to SM Type->Title:list of HCAF tables to analyze [a sequence of http links separated by | , each indicating a table in UTF-8 encoding following this template: (HCAF) http://goo.gl/SZG9uM] +102498 [qtp684204552-31] DEBUG org.gcube.portlets.user.dataminermanager.server.smservice.wps.WPS2SM - Conversion to SM Type->Name:HCAF_Table_List +102498 [qtp684204552-31] DEBUG org.gcube.portlets.user.dataminermanager.server.smservice.wps.WPS2SM - Conversion to SM Type->Number of Inputs to Manage:1 +102498 [qtp684204552-31] DEBUG org.gcube.portlets.user.dataminermanager.server.smservice.SClient4WPS - InputParameter: FileParameter [mimeType=text/xml, value=null, name=HCAF_Table_List, description=list of HCAF tables to analyze [a sequence of http links separated by | , each indicating a table in UTF-8 encoding following this template: (HCAF) http://goo.gl/SZG9uM] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=FILE] +102498 [qtp684204552-31] DEBUG org.gcube.portlets.user.dataminermanager.server.smservice.wps.WPS2SM - Conversion to SM Type->HCAF_Table_Names is a Literal Input +102503 [qtp684204552-31] DEBUG org.gcube.portlets.user.dataminermanager.server.smservice.wps.WPS2SM - WPS type: +102503 [qtp684204552-31] DEBUG org.gcube.portlets.user.dataminermanager.server.smservice.wps.WPS2SM - Guessed type: java.lang.String +102503 [qtp684204552-31] DEBUG org.gcube.portlets.user.dataminermanager.server.smservice.wps.WPS2SM - Guessed default value: +102503 [qtp684204552-31] DEBUG org.gcube.portlets.user.dataminermanager.server.smservice.wps.WPS2SM - Machter title: list of HCAF table names to be used as labels [a sequence of values separated by | ] (format: String) [Min N. of Entries:1; Max N. of Entries:1] +102503 [qtp684204552-31] DEBUG org.gcube.portlets.user.dataminermanager.server.smservice.wps.WPS2SM - Machter find: true +102503 [qtp684204552-31] DEBUG org.gcube.portlets.user.dataminermanager.server.smservice.wps.WPS2SM - Machter group: a sequence of values separated by | +102504 [qtp684204552-31] DEBUG org.gcube.portlets.user.dataminermanager.server.smservice.wps.WPS2SM - Machter start: 47 +102504 [qtp684204552-31] DEBUG org.gcube.portlets.user.dataminermanager.server.smservice.wps.WPS2SM - Machter end: 82 +102504 [qtp684204552-31] DEBUG org.gcube.portlets.user.dataminermanager.server.smservice.wps.WPS2SM - Machter Group Count: 1 +102504 [qtp684204552-31] DEBUG org.gcube.portlets.user.dataminermanager.server.smservice.wps.WPS2SM - Matcher separator: | +102504 [qtp684204552-31] DEBUG org.gcube.portlets.user.dataminermanager.server.smservice.wps.WPS2SM - Conversion to SM Type->Title:list of HCAF table names to be used as labels [a sequence of values separated by | ] (format: String) +102504 [qtp684204552-31] DEBUG org.gcube.portlets.user.dataminermanager.server.smservice.wps.WPS2SM - Conversion to SM Type->Name:HCAF_Table_Names +102504 [qtp684204552-31] DEBUG org.gcube.portlets.user.dataminermanager.server.smservice.wps.WPS2SM - Conversion to SM Type->Number of Inputs to Manage:1 +102504 [qtp684204552-31] DEBUG org.gcube.portlets.user.dataminermanager.server.smservice.SClient4WPS - InputParameter: ListParameter [type=java.lang.String, value=null, separator=|, name=HCAF_Table_Names, description=list of HCAF table names to be used as labels [a sequence of values separated by | ] (format: String) [Min N. of Entries:1; Max N. of Entries:1], typology=LIST] +102504 [qtp684204552-31] DEBUG org.gcube.portlets.user.dataminermanager.server.smservice.SClient4WPS - Parameters: [FileParameter [mimeType=text/xml, value=null, name=HCAF_Table_List, description=list of HCAF tables to analyze [a sequence of http links separated by | , each indicating a table in UTF-8 encoding following this template: (HCAF) http://goo.gl/SZG9uM] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=FILE], ListParameter [type=java.lang.String, value=null, separator=|, name=HCAF_Table_Names, description=list of HCAF table names to be used as labels [a sequence of values separated by | ] (format: String) [Min N. of Entries:1; Max N. of Entries:1], typology=LIST]] +102780 [qtp684204552-35] DEBUG org.gcube.common.scope.impl.DefaultScopeProvider - setting scope /gcube/devsec/devVRE in thread 35 +102780 [qtp684204552-35] DEBUG org.gcube.application.framework.core.session.ASLSession - Getting security token: null in thread 35 +102780 [qtp684204552-33] DEBUG org.gcube.common.scope.impl.DefaultScopeProvider - setting scope /gcube/devsec/devVRE in thread 33 +102781 [qtp684204552-33] DEBUG org.gcube.application.framework.core.session.ASLSession - Getting security token: null in thread 33 +102781 [qtp684204552-35] DEBUG org.gcube.common.scope.impl.DefaultScopeProvider - setting scope /gcube/devsec/devVRE in thread 35 +102781 [qtp684204552-33] DEBUG org.gcube.common.scope.impl.DefaultScopeProvider - setting scope /gcube/devsec/devVRE in thread 33 +102781 [qtp684204552-35] INFO HomeManageFactory - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +102781 [qtp684204552-35] INFO HomeManageFactory - getHomeManagerFactory +102781 [qtp684204552-33] INFO HomeManageFactory - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +102781 [qtp684204552-33] INFO HomeManageFactory - getHomeManagerFactory +102781 [qtp684204552-35] INFO org.gcube.common.homelibrary.util.config.HomeLibraryConfiguration - calculating the persistence folder +102781 [qtp684204552-33] INFO org.gcube.common.homelibrary.util.config.HomeLibraryConfiguration - calculating the persistence folder +102781 [qtp684204552-35] INFO org.gcube.common.homelibrary.util.config.HomeLibraryConfiguration - Using tmp dir /tmp +102782 [qtp684204552-33] INFO org.gcube.common.homelibrary.util.config.HomeLibraryConfiguration - Using tmp dir /tmp +102782 [qtp684204552-35] INFO HomeManageFactory - getInstance persistenceRoot: /tmp/home_library_persistence +102782 [qtp684204552-35] DEBUG org.gcube.common.homelibrary.jcr.JCRHomeManagerFactory - Initialize content manager +102782 [qtp684204552-35] DEBUG org.gcube.common.homelibrary.jcr.repository.JCRRepository - Initialize repository +102782 [qtp684204552-35] DEBUG org.gcube.common.homelibrary.jcr.repository.JCRRepository - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +102782 [qtp684204552-35] DEBUG org.gcube.common.scope.impl.DefaultScopeProvider - setting scope /gcube in thread 35 +102792 [qtp684204552-35] INFO org.gcube.common.clients.stubs.jaxws.StubCache - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +102793 [qtp684204552-35] INFO org.gcube.resources.discovery.icclient.ICClient - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +102817 [qtp684204552-35] INFO org.gcube.resources.discovery.icclient.ICClient - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 23 ms +102827 [qtp684204552-35] ERROR org.gcube.common.homelibrary.jcr.repository.JCRRepository - error decrypting resource +java.security.InvalidKeyException: Unable to load the Key gcube.gcubekey from the classpath + at org.gcube.common.encryption.SymmetricKey.load(SymmetricKey.java:66) + at org.gcube.common.encryption.SymmetricKey.load(SymmetricKey.java:53) + at org.gcube.common.encryption.SymmetricKey.getKey(SymmetricKey.java:44) + at org.gcube.common.encryption.StringEncrypter.decrypt(StringEncrypter.java:58) + at org.gcube.common.homelibrary.jcr.repository.JCRRepository.initializeRepository(JCRRepository.java:119) + at org.gcube.common.homelibrary.jcr.repository.JCRRepository.initialize(JCRRepository.java:272) + at org.gcube.common.homelibrary.jcr.JCRHomeManagerFactory.initialize(JCRHomeManagerFactory.java:46) + at org.gcube.common.homelibrary.home.HomeLibrary.createHomeManagerFactoryInstance(HomeLibrary.java:91) + at org.gcube.common.homelibrary.home.HomeLibrary.getHomeManagerFactory(HomeLibrary.java:106) + at org.gcube.common.homelibrary.home.HomeLibrary.getHomeManagerFactory(HomeLibrary.java:124) + at org.gcube.common.homelibrary.home.HomeLibrary.getUserHome(HomeLibrary.java:198) + at org.gcube.common.homelibrary.home.HomeLibrary.getUserWorkspace(HomeLibrary.java:143) + at org.gcube.portlets.widgets.wsexplorer.server.WorkspaceExplorerServiceImpl.getWorkspace(WorkspaceExplorerServiceImpl.java:104) + at org.gcube.portlets.widgets.wsexplorer.server.WorkspaceExplorerServiceImpl.getItemByCategory(WorkspaceExplorerServiceImpl.java:207) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:606) + at com.google.gwt.user.server.rpc.RPC.invokeAndEncodeResponse(RPC.java:561) + at com.google.gwt.user.server.rpc.RemoteServiceServlet.processCall(RemoteServiceServlet.java:265) + at com.google.gwt.user.server.rpc.RemoteServiceServlet.processPost(RemoteServiceServlet.java:305) + at com.google.gwt.user.server.rpc.AbstractRemoteServiceServlet.doPost(AbstractRemoteServiceServlet.java:62) + at javax.servlet.http.HttpServlet.service(HttpServlet.java:755) + at javax.servlet.http.HttpServlet.service(HttpServlet.java:848) + at org.eclipse.jetty.servlet.ServletHolder.handle(ServletHolder.java:686) + at org.eclipse.jetty.servlet.ServletHandler.doHandle(ServletHandler.java:501) + at org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:137) + at org.eclipse.jetty.security.SecurityHandler.handle(SecurityHandler.java:557) + at org.eclipse.jetty.server.session.SessionHandler.doHandle(SessionHandler.java:231) + at org.eclipse.jetty.server.handler.ContextHandler.doHandle(ContextHandler.java:1086) + at org.eclipse.jetty.servlet.ServletHandler.doScope(ServletHandler.java:428) + at org.eclipse.jetty.server.session.SessionHandler.doScope(SessionHandler.java:193) + at org.eclipse.jetty.server.handler.ContextHandler.doScope(ContextHandler.java:1020) + at org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:135) + at org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:116) + at org.eclipse.jetty.server.handler.RequestLogHandler.handle(RequestLogHandler.java:68) + at org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:116) + at org.eclipse.jetty.server.Server.handle(Server.java:370) + at org.eclipse.jetty.server.AbstractHttpConnection.handleRequest(AbstractHttpConnection.java:489) + at org.eclipse.jetty.server.AbstractHttpConnection.content(AbstractHttpConnection.java:960) + at org.eclipse.jetty.server.AbstractHttpConnection$RequestHandler.content(AbstractHttpConnection.java:1021) + at org.eclipse.jetty.http.HttpParser.parseNext(HttpParser.java:865) + at org.eclipse.jetty.http.HttpParser.parseAvailable(HttpParser.java:240) + at org.eclipse.jetty.server.AsyncHttpConnection.handle(AsyncHttpConnection.java:82) + at org.eclipse.jetty.io.nio.SelectChannelEndPoint.handle(SelectChannelEndPoint.java:668) + at org.eclipse.jetty.io.nio.SelectChannelEndPoint$1.run(SelectChannelEndPoint.java:52) + at org.eclipse.jetty.util.thread.QueuedThreadPool.runJob(QueuedThreadPool.java:608) + at org.eclipse.jetty.util.thread.QueuedThreadPool$3.run(QueuedThreadPool.java:543) + at java.lang.Thread.run(Thread.java:745) +102829 [qtp684204552-35] DEBUG org.gcube.common.scope.impl.DefaultScopeProvider - setting scope /gcube/devsec/devVRE in thread 35 +102829 [qtp684204552-33] INFO HomeManageFactory - getInstance persistenceRoot: /tmp/home_library_persistence +102829 [qtp684204552-35] ERROR org.gcube.portlets.widgets.wsexplorer.server.WorkspaceExplorerServiceImpl - Error during get item by category +org.gcube.common.homelibrary.home.exceptions.InternalErrorException: org.gcube.common.homelibrary.home.exceptions.InternalErrorException: org.gcube.common.homelibrary.home.exceptions.InternalErrorException: cannot discover password and username in scope /gcube/devsec/devVRE + at org.gcube.common.homelibrary.jcr.JCRHomeManagerFactory.initialize(JCRHomeManagerFactory.java:48) + at org.gcube.common.homelibrary.home.HomeLibrary.createHomeManagerFactoryInstance(HomeLibrary.java:91) + at org.gcube.common.homelibrary.home.HomeLibrary.getHomeManagerFactory(HomeLibrary.java:106) + at org.gcube.common.homelibrary.home.HomeLibrary.getHomeManagerFactory(HomeLibrary.java:124) + at org.gcube.common.homelibrary.home.HomeLibrary.getUserHome(HomeLibrary.java:198) + at org.gcube.common.homelibrary.home.HomeLibrary.getUserWorkspace(HomeLibrary.java:143) + at org.gcube.portlets.widgets.wsexplorer.server.WorkspaceExplorerServiceImpl.getWorkspace(WorkspaceExplorerServiceImpl.java:104) + at org.gcube.portlets.widgets.wsexplorer.server.WorkspaceExplorerServiceImpl.getItemByCategory(WorkspaceExplorerServiceImpl.java:207) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:606) + at com.google.gwt.user.server.rpc.RPC.invokeAndEncodeResponse(RPC.java:561) + at com.google.gwt.user.server.rpc.RemoteServiceServlet.processCall(RemoteServiceServlet.java:265) + at com.google.gwt.user.server.rpc.RemoteServiceServlet.processPost(RemoteServiceServlet.java:305) + at com.google.gwt.user.server.rpc.AbstractRemoteServiceServlet.doPost(AbstractRemoteServiceServlet.java:62) + at javax.servlet.http.HttpServlet.service(HttpServlet.java:755) + at javax.servlet.http.HttpServlet.service(HttpServlet.java:848) + at org.eclipse.jetty.servlet.ServletHolder.handle(ServletHolder.java:686) + at org.eclipse.jetty.servlet.ServletHandler.doHandle(ServletHandler.java:501) + at org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:137) + at org.eclipse.jetty.security.SecurityHandler.handle(SecurityHandler.java:557) + at org.eclipse.jetty.server.session.SessionHandler.doHandle(SessionHandler.java:231) + at org.eclipse.jetty.server.handler.ContextHandler.doHandle(ContextHandler.java:1086) + at org.eclipse.jetty.servlet.ServletHandler.doScope(ServletHandler.java:428) + at org.eclipse.jetty.server.session.SessionHandler.doScope(SessionHandler.java:193) + at org.eclipse.jetty.server.handler.ContextHandler.doScope(ContextHandler.java:1020) + at org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:135) + at org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:116) + at org.eclipse.jetty.server.handler.RequestLogHandler.handle(RequestLogHandler.java:68) + at org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:116) + at org.eclipse.jetty.server.Server.handle(Server.java:370) + at org.eclipse.jetty.server.AbstractHttpConnection.handleRequest(AbstractHttpConnection.java:489) + at org.eclipse.jetty.server.AbstractHttpConnection.content(AbstractHttpConnection.java:960) + at org.eclipse.jetty.server.AbstractHttpConnection$RequestHandler.content(AbstractHttpConnection.java:1021) + at org.eclipse.jetty.http.HttpParser.parseNext(HttpParser.java:865) + at org.eclipse.jetty.http.HttpParser.parseAvailable(HttpParser.java:240) + at org.eclipse.jetty.server.AsyncHttpConnection.handle(AsyncHttpConnection.java:82) + at org.eclipse.jetty.io.nio.SelectChannelEndPoint.handle(SelectChannelEndPoint.java:668) + at org.eclipse.jetty.io.nio.SelectChannelEndPoint$1.run(SelectChannelEndPoint.java:52) + at org.eclipse.jetty.util.thread.QueuedThreadPool.runJob(QueuedThreadPool.java:608) + at org.eclipse.jetty.util.thread.QueuedThreadPool$3.run(QueuedThreadPool.java:543) + at java.lang.Thread.run(Thread.java:745) +Caused by: org.gcube.common.homelibrary.home.exceptions.InternalErrorException: org.gcube.common.homelibrary.home.exceptions.InternalErrorException: cannot discover password and username in scope /gcube/devsec/devVRE + at org.gcube.common.homelibrary.jcr.repository.JCRRepository.initializeRepository(JCRRepository.java:155) + at org.gcube.common.homelibrary.jcr.repository.JCRRepository.initialize(JCRRepository.java:272) + at org.gcube.common.homelibrary.jcr.JCRHomeManagerFactory.initialize(JCRHomeManagerFactory.java:46) + ... 42 more +Caused by: org.gcube.common.homelibrary.home.exceptions.InternalErrorException: cannot discover password and username in scope /gcube/devsec/devVRE + at org.gcube.common.homelibrary.jcr.repository.JCRRepository.initializeRepository(JCRRepository.java:147) + ... 44 more +102829 [qtp684204552-33] DEBUG org.gcube.common.homelibrary.jcr.JCRHomeManagerFactory - Initialize content manager +102831 [qtp684204552-33] DEBUG org.gcube.common.homelibrary.jcr.repository.JCRRepository - Initialize repository +102831 [qtp684204552-33] DEBUG org.gcube.common.homelibrary.jcr.repository.JCRRepository - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +102831 [qtp684204552-33] DEBUG org.gcube.common.scope.impl.DefaultScopeProvider - setting scope /gcube in thread 33 +102841 [qtp684204552-33] INFO org.gcube.common.clients.stubs.jaxws.StubCache - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +102842 [qtp684204552-33] INFO org.gcube.resources.discovery.icclient.ICClient - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +102861 [qtp684204552-33] INFO org.gcube.resources.discovery.icclient.ICClient - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 19 ms +102872 [qtp684204552-33] ERROR org.gcube.common.homelibrary.jcr.repository.JCRRepository - error decrypting resource +java.security.InvalidKeyException: Unable to load the Key gcube.gcubekey from the classpath + at org.gcube.common.encryption.SymmetricKey.load(SymmetricKey.java:66) + at org.gcube.common.encryption.SymmetricKey.load(SymmetricKey.java:53) + at org.gcube.common.encryption.SymmetricKey.getKey(SymmetricKey.java:44) + at org.gcube.common.encryption.StringEncrypter.decrypt(StringEncrypter.java:58) + at org.gcube.common.homelibrary.jcr.repository.JCRRepository.initializeRepository(JCRRepository.java:119) + at org.gcube.common.homelibrary.jcr.repository.JCRRepository.initialize(JCRRepository.java:272) + at org.gcube.common.homelibrary.jcr.JCRHomeManagerFactory.initialize(JCRHomeManagerFactory.java:46) + at org.gcube.common.homelibrary.home.HomeLibrary.createHomeManagerFactoryInstance(HomeLibrary.java:91) + at org.gcube.common.homelibrary.home.HomeLibrary.getHomeManagerFactory(HomeLibrary.java:106) + at org.gcube.common.homelibrary.home.HomeLibrary.getHomeManagerFactory(HomeLibrary.java:124) + at org.gcube.common.homelibrary.home.HomeLibrary.getUserHome(HomeLibrary.java:198) + at org.gcube.common.homelibrary.home.HomeLibrary.getUserWorkspace(HomeLibrary.java:143) + at org.gcube.portlets.widgets.wsexplorer.server.WorkspaceExplorerServiceImpl.getWorkspace(WorkspaceExplorerServiceImpl.java:104) + at org.gcube.portlets.widgets.wsexplorer.server.WorkspaceExplorerServiceImpl.getItemByCategory(WorkspaceExplorerServiceImpl.java:207) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:606) + at com.google.gwt.user.server.rpc.RPC.invokeAndEncodeResponse(RPC.java:561) + at com.google.gwt.user.server.rpc.RemoteServiceServlet.processCall(RemoteServiceServlet.java:265) + at com.google.gwt.user.server.rpc.RemoteServiceServlet.processPost(RemoteServiceServlet.java:305) + at com.google.gwt.user.server.rpc.AbstractRemoteServiceServlet.doPost(AbstractRemoteServiceServlet.java:62) + at javax.servlet.http.HttpServlet.service(HttpServlet.java:755) + at javax.servlet.http.HttpServlet.service(HttpServlet.java:848) + at org.eclipse.jetty.servlet.ServletHolder.handle(ServletHolder.java:686) + at org.eclipse.jetty.servlet.ServletHandler.doHandle(ServletHandler.java:501) + at org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:137) + at org.eclipse.jetty.security.SecurityHandler.handle(SecurityHandler.java:557) + at org.eclipse.jetty.server.session.SessionHandler.doHandle(SessionHandler.java:231) + at org.eclipse.jetty.server.handler.ContextHandler.doHandle(ContextHandler.java:1086) + at org.eclipse.jetty.servlet.ServletHandler.doScope(ServletHandler.java:428) + at org.eclipse.jetty.server.session.SessionHandler.doScope(SessionHandler.java:193) + at org.eclipse.jetty.server.handler.ContextHandler.doScope(ContextHandler.java:1020) + at org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:135) + at org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:116) + at org.eclipse.jetty.server.handler.RequestLogHandler.handle(RequestLogHandler.java:68) + at org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:116) + at org.eclipse.jetty.server.Server.handle(Server.java:370) + at org.eclipse.jetty.server.AbstractHttpConnection.handleRequest(AbstractHttpConnection.java:489) + at org.eclipse.jetty.server.AbstractHttpConnection.content(AbstractHttpConnection.java:960) + at org.eclipse.jetty.server.AbstractHttpConnection$RequestHandler.content(AbstractHttpConnection.java:1021) + at org.eclipse.jetty.http.HttpParser.parseNext(HttpParser.java:865) + at org.eclipse.jetty.http.HttpParser.parseAvailable(HttpParser.java:240) + at org.eclipse.jetty.server.AsyncHttpConnection.handle(AsyncHttpConnection.java:82) + at org.eclipse.jetty.io.nio.SelectChannelEndPoint.handle(SelectChannelEndPoint.java:668) + at org.eclipse.jetty.io.nio.SelectChannelEndPoint$1.run(SelectChannelEndPoint.java:52) + at org.eclipse.jetty.util.thread.QueuedThreadPool.runJob(QueuedThreadPool.java:608) + at org.eclipse.jetty.util.thread.QueuedThreadPool$3.run(QueuedThreadPool.java:543) + at java.lang.Thread.run(Thread.java:745) +102874 [qtp684204552-33] DEBUG org.gcube.common.scope.impl.DefaultScopeProvider - setting scope /gcube/devsec/devVRE in thread 33 +102874 [qtp684204552-33] ERROR org.gcube.portlets.widgets.wsexplorer.server.WorkspaceExplorerServiceImpl - Error during get item by category +org.gcube.common.homelibrary.home.exceptions.InternalErrorException: org.gcube.common.homelibrary.home.exceptions.InternalErrorException: org.gcube.common.homelibrary.home.exceptions.InternalErrorException: cannot discover password and username in scope /gcube/devsec/devVRE + at org.gcube.common.homelibrary.jcr.JCRHomeManagerFactory.initialize(JCRHomeManagerFactory.java:48) + at org.gcube.common.homelibrary.home.HomeLibrary.createHomeManagerFactoryInstance(HomeLibrary.java:91) + at org.gcube.common.homelibrary.home.HomeLibrary.getHomeManagerFactory(HomeLibrary.java:106) + at org.gcube.common.homelibrary.home.HomeLibrary.getHomeManagerFactory(HomeLibrary.java:124) + at org.gcube.common.homelibrary.home.HomeLibrary.getUserHome(HomeLibrary.java:198) + at org.gcube.common.homelibrary.home.HomeLibrary.getUserWorkspace(HomeLibrary.java:143) + at org.gcube.portlets.widgets.wsexplorer.server.WorkspaceExplorerServiceImpl.getWorkspace(WorkspaceExplorerServiceImpl.java:104) + at org.gcube.portlets.widgets.wsexplorer.server.WorkspaceExplorerServiceImpl.getItemByCategory(WorkspaceExplorerServiceImpl.java:207) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:606) + at com.google.gwt.user.server.rpc.RPC.invokeAndEncodeResponse(RPC.java:561) + at com.google.gwt.user.server.rpc.RemoteServiceServlet.processCall(RemoteServiceServlet.java:265) + at com.google.gwt.user.server.rpc.RemoteServiceServlet.processPost(RemoteServiceServlet.java:305) + at com.google.gwt.user.server.rpc.AbstractRemoteServiceServlet.doPost(AbstractRemoteServiceServlet.java:62) + at javax.servlet.http.HttpServlet.service(HttpServlet.java:755) + at javax.servlet.http.HttpServlet.service(HttpServlet.java:848) + at org.eclipse.jetty.servlet.ServletHolder.handle(ServletHolder.java:686) + at org.eclipse.jetty.servlet.ServletHandler.doHandle(ServletHandler.java:501) + at org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:137) + at org.eclipse.jetty.security.SecurityHandler.handle(SecurityHandler.java:557) + at org.eclipse.jetty.server.session.SessionHandler.doHandle(SessionHandler.java:231) + at org.eclipse.jetty.server.handler.ContextHandler.doHandle(ContextHandler.java:1086) + at org.eclipse.jetty.servlet.ServletHandler.doScope(ServletHandler.java:428) + at org.eclipse.jetty.server.session.SessionHandler.doScope(SessionHandler.java:193) + at org.eclipse.jetty.server.handler.ContextHandler.doScope(ContextHandler.java:1020) + at org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:135) + at org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:116) + at org.eclipse.jetty.server.handler.RequestLogHandler.handle(RequestLogHandler.java:68) + at org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:116) + at org.eclipse.jetty.server.Server.handle(Server.java:370) + at org.eclipse.jetty.server.AbstractHttpConnection.handleRequest(AbstractHttpConnection.java:489) + at org.eclipse.jetty.server.AbstractHttpConnection.content(AbstractHttpConnection.java:960) + at org.eclipse.jetty.server.AbstractHttpConnection$RequestHandler.content(AbstractHttpConnection.java:1021) + at org.eclipse.jetty.http.HttpParser.parseNext(HttpParser.java:865) + at org.eclipse.jetty.http.HttpParser.parseAvailable(HttpParser.java:240) + at org.eclipse.jetty.server.AsyncHttpConnection.handle(AsyncHttpConnection.java:82) + at org.eclipse.jetty.io.nio.SelectChannelEndPoint.handle(SelectChannelEndPoint.java:668) + at org.eclipse.jetty.io.nio.SelectChannelEndPoint$1.run(SelectChannelEndPoint.java:52) + at org.eclipse.jetty.util.thread.QueuedThreadPool.runJob(QueuedThreadPool.java:608) + at org.eclipse.jetty.util.thread.QueuedThreadPool$3.run(QueuedThreadPool.java:543) + at java.lang.Thread.run(Thread.java:745) +Caused by: org.gcube.common.homelibrary.home.exceptions.InternalErrorException: org.gcube.common.homelibrary.home.exceptions.InternalErrorException: cannot discover password and username in scope /gcube/devsec/devVRE + at org.gcube.common.homelibrary.jcr.repository.JCRRepository.initializeRepository(JCRRepository.java:155) + at org.gcube.common.homelibrary.jcr.repository.JCRRepository.initialize(JCRRepository.java:272) + at org.gcube.common.homelibrary.jcr.JCRHomeManagerFactory.initialize(JCRHomeManagerFactory.java:46) + ... 42 more +Caused by: org.gcube.common.homelibrary.home.exceptions.InternalErrorException: cannot discover password and username in scope /gcube/devsec/devVRE + at org.gcube.common.homelibrary.jcr.repository.JCRRepository.initializeRepository(JCRRepository.java:147) + ... 44 more +103169 [qtp684204552-31] DEBUG org.gcube.common.scope.impl.DefaultScopeProvider - setting scope /gcube/devsec/devVRE in thread 31 +103169 [qtp684204552-31] DEBUG org.gcube.application.framework.core.session.ASLSession - Getting security token: null in thread 31 +103169 [qtp684204552-31] DEBUG org.gcube.common.scope.impl.DefaultScopeProvider - setting scope /gcube/devsec/devVRE in thread 31 +103169 [qtp684204552-31] INFO HomeManageFactory - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +103169 [qtp684204552-31] INFO HomeManageFactory - getHomeManagerFactory +103169 [qtp684204552-31] INFO org.gcube.common.homelibrary.util.config.HomeLibraryConfiguration - calculating the persistence folder +103169 [qtp684204552-31] INFO org.gcube.common.homelibrary.util.config.HomeLibraryConfiguration - Using tmp dir /tmp +103169 [qtp684204552-31] INFO HomeManageFactory - getInstance persistenceRoot: /tmp/home_library_persistence +103170 [qtp684204552-31] DEBUG org.gcube.common.homelibrary.jcr.JCRHomeManagerFactory - Initialize content manager +103170 [qtp684204552-31] DEBUG org.gcube.common.homelibrary.jcr.repository.JCRRepository - Initialize repository +103170 [qtp684204552-31] DEBUG org.gcube.common.homelibrary.jcr.repository.JCRRepository - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +103170 [qtp684204552-31] DEBUG org.gcube.common.scope.impl.DefaultScopeProvider - setting scope /gcube in thread 31 +103170 [qtp684204552-34] DEBUG org.gcube.common.scope.impl.DefaultScopeProvider - setting scope /gcube/devsec/devVRE in thread 34 +103170 [qtp684204552-34] DEBUG org.gcube.application.framework.core.session.ASLSession - Getting security token: null in thread 34 +103170 [qtp684204552-34] DEBUG org.gcube.common.scope.impl.DefaultScopeProvider - setting scope /gcube/devsec/devVRE in thread 34 +103171 [qtp684204552-34] INFO HomeManageFactory - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +103171 [qtp684204552-34] INFO HomeManageFactory - getHomeManagerFactory +103171 [qtp684204552-34] INFO org.gcube.common.homelibrary.util.config.HomeLibraryConfiguration - calculating the persistence folder +103171 [qtp684204552-34] INFO org.gcube.common.homelibrary.util.config.HomeLibraryConfiguration - Using tmp dir /tmp +103189 [qtp684204552-31] INFO org.gcube.common.clients.stubs.jaxws.StubCache - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +103191 [qtp684204552-31] INFO org.gcube.resources.discovery.icclient.ICClient - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +103215 [qtp684204552-31] INFO org.gcube.resources.discovery.icclient.ICClient - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 24 ms +103225 [qtp684204552-31] ERROR org.gcube.common.homelibrary.jcr.repository.JCRRepository - error decrypting resource +java.security.InvalidKeyException: Unable to load the Key gcube.gcubekey from the classpath + at org.gcube.common.encryption.SymmetricKey.load(SymmetricKey.java:66) + at org.gcube.common.encryption.SymmetricKey.load(SymmetricKey.java:53) + at org.gcube.common.encryption.SymmetricKey.getKey(SymmetricKey.java:44) + at org.gcube.common.encryption.StringEncrypter.decrypt(StringEncrypter.java:58) + at org.gcube.common.homelibrary.jcr.repository.JCRRepository.initializeRepository(JCRRepository.java:119) + at org.gcube.common.homelibrary.jcr.repository.JCRRepository.initialize(JCRRepository.java:272) + at org.gcube.common.homelibrary.jcr.JCRHomeManagerFactory.initialize(JCRHomeManagerFactory.java:46) + at org.gcube.common.homelibrary.home.HomeLibrary.createHomeManagerFactoryInstance(HomeLibrary.java:91) + at org.gcube.common.homelibrary.home.HomeLibrary.getHomeManagerFactory(HomeLibrary.java:106) + at org.gcube.common.homelibrary.home.HomeLibrary.getHomeManagerFactory(HomeLibrary.java:124) + at org.gcube.common.homelibrary.home.HomeLibrary.getUserHome(HomeLibrary.java:198) + at org.gcube.common.homelibrary.home.HomeLibrary.getUserWorkspace(HomeLibrary.java:143) + at org.gcube.portlets.widgets.wsexplorer.server.WorkspaceExplorerServiceImpl.getWorkspace(WorkspaceExplorerServiceImpl.java:104) + at org.gcube.portlets.widgets.wsexplorer.server.WorkspaceExplorerServiceImpl.getItemByCategory(WorkspaceExplorerServiceImpl.java:207) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:606) + at com.google.gwt.user.server.rpc.RPC.invokeAndEncodeResponse(RPC.java:561) + at com.google.gwt.user.server.rpc.RemoteServiceServlet.processCall(RemoteServiceServlet.java:265) + at com.google.gwt.user.server.rpc.RemoteServiceServlet.processPost(RemoteServiceServlet.java:305) + at com.google.gwt.user.server.rpc.AbstractRemoteServiceServlet.doPost(AbstractRemoteServiceServlet.java:62) + at javax.servlet.http.HttpServlet.service(HttpServlet.java:755) + at javax.servlet.http.HttpServlet.service(HttpServlet.java:848) + at org.eclipse.jetty.servlet.ServletHolder.handle(ServletHolder.java:686) + at org.eclipse.jetty.servlet.ServletHandler.doHandle(ServletHandler.java:501) + at org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:137) + at org.eclipse.jetty.security.SecurityHandler.handle(SecurityHandler.java:557) + at org.eclipse.jetty.server.session.SessionHandler.doHandle(SessionHandler.java:231) + at org.eclipse.jetty.server.handler.ContextHandler.doHandle(ContextHandler.java:1086) + at org.eclipse.jetty.servlet.ServletHandler.doScope(ServletHandler.java:428) + at org.eclipse.jetty.server.session.SessionHandler.doScope(SessionHandler.java:193) + at org.eclipse.jetty.server.handler.ContextHandler.doScope(ContextHandler.java:1020) + at org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:135) + at org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:116) + at org.eclipse.jetty.server.handler.RequestLogHandler.handle(RequestLogHandler.java:68) + at org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:116) + at org.eclipse.jetty.server.Server.handle(Server.java:370) + at org.eclipse.jetty.server.AbstractHttpConnection.handleRequest(AbstractHttpConnection.java:489) + at org.eclipse.jetty.server.AbstractHttpConnection.content(AbstractHttpConnection.java:960) + at org.eclipse.jetty.server.AbstractHttpConnection$RequestHandler.content(AbstractHttpConnection.java:1021) + at org.eclipse.jetty.http.HttpParser.parseNext(HttpParser.java:865) + at org.eclipse.jetty.http.HttpParser.parseAvailable(HttpParser.java:240) + at org.eclipse.jetty.server.AsyncHttpConnection.handle(AsyncHttpConnection.java:82) + at org.eclipse.jetty.io.nio.SelectChannelEndPoint.handle(SelectChannelEndPoint.java:668) + at org.eclipse.jetty.io.nio.SelectChannelEndPoint$1.run(SelectChannelEndPoint.java:52) + at org.eclipse.jetty.util.thread.QueuedThreadPool.runJob(QueuedThreadPool.java:608) + at org.eclipse.jetty.util.thread.QueuedThreadPool$3.run(QueuedThreadPool.java:543) + at java.lang.Thread.run(Thread.java:745) +103228 [qtp684204552-31] DEBUG org.gcube.common.scope.impl.DefaultScopeProvider - setting scope /gcube/devsec/devVRE in thread 31 +103228 [qtp684204552-31] ERROR org.gcube.portlets.widgets.wsexplorer.server.WorkspaceExplorerServiceImpl - Error during get item by category +org.gcube.common.homelibrary.home.exceptions.InternalErrorException: org.gcube.common.homelibrary.home.exceptions.InternalErrorException: org.gcube.common.homelibrary.home.exceptions.InternalErrorException: cannot discover password and username in scope /gcube/devsec/devVRE + at org.gcube.common.homelibrary.jcr.JCRHomeManagerFactory.initialize(JCRHomeManagerFactory.java:48) + at org.gcube.common.homelibrary.home.HomeLibrary.createHomeManagerFactoryInstance(HomeLibrary.java:91) + at org.gcube.common.homelibrary.home.HomeLibrary.getHomeManagerFactory(HomeLibrary.java:106) + at org.gcube.common.homelibrary.home.HomeLibrary.getHomeManagerFactory(HomeLibrary.java:124) + at org.gcube.common.homelibrary.home.HomeLibrary.getUserHome(HomeLibrary.java:198) + at org.gcube.common.homelibrary.home.HomeLibrary.getUserWorkspace(HomeLibrary.java:143) + at org.gcube.portlets.widgets.wsexplorer.server.WorkspaceExplorerServiceImpl.getWorkspace(WorkspaceExplorerServiceImpl.java:104) + at org.gcube.portlets.widgets.wsexplorer.server.WorkspaceExplorerServiceImpl.getItemByCategory(WorkspaceExplorerServiceImpl.java:207) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:606) + at com.google.gwt.user.server.rpc.RPC.invokeAndEncodeResponse(RPC.java:561) + at com.google.gwt.user.server.rpc.RemoteServiceServlet.processCall(RemoteServiceServlet.java:265) + at com.google.gwt.user.server.rpc.RemoteServiceServlet.processPost(RemoteServiceServlet.java:305) + at com.google.gwt.user.server.rpc.AbstractRemoteServiceServlet.doPost(AbstractRemoteServiceServlet.java:62) + at javax.servlet.http.HttpServlet.service(HttpServlet.java:755) + at javax.servlet.http.HttpServlet.service(HttpServlet.java:848) + at org.eclipse.jetty.servlet.ServletHolder.handle(ServletHolder.java:686) + at org.eclipse.jetty.servlet.ServletHandler.doHandle(ServletHandler.java:501) + at org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:137) + at org.eclipse.jetty.security.SecurityHandler.handle(SecurityHandler.java:557) + at org.eclipse.jetty.server.session.SessionHandler.doHandle(SessionHandler.java:231) + at org.eclipse.jetty.server.handler.ContextHandler.doHandle(ContextHandler.java:1086) + at org.eclipse.jetty.servlet.ServletHandler.doScope(ServletHandler.java:428) + at org.eclipse.jetty.server.session.SessionHandler.doScope(SessionHandler.java:193) + at org.eclipse.jetty.server.handler.ContextHandler.doScope(ContextHandler.java:1020) + at org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:135) + at org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:116) + at org.eclipse.jetty.server.handler.RequestLogHandler.handle(RequestLogHandler.java:68) + at org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:116) + at org.eclipse.jetty.server.Server.handle(Server.java:370) + at org.eclipse.jetty.server.AbstractHttpConnection.handleRequest(AbstractHttpConnection.java:489) + at org.eclipse.jetty.server.AbstractHttpConnection.content(AbstractHttpConnection.java:960) + at org.eclipse.jetty.server.AbstractHttpConnection$RequestHandler.content(AbstractHttpConnection.java:1021) + at org.eclipse.jetty.http.HttpParser.parseNext(HttpParser.java:865) + at org.eclipse.jetty.http.HttpParser.parseAvailable(HttpParser.java:240) + at org.eclipse.jetty.server.AsyncHttpConnection.handle(AsyncHttpConnection.java:82) + at org.eclipse.jetty.io.nio.SelectChannelEndPoint.handle(SelectChannelEndPoint.java:668) + at org.eclipse.jetty.io.nio.SelectChannelEndPoint$1.run(SelectChannelEndPoint.java:52) + at org.eclipse.jetty.util.thread.QueuedThreadPool.runJob(QueuedThreadPool.java:608) + at org.eclipse.jetty.util.thread.QueuedThreadPool$3.run(QueuedThreadPool.java:543) + at java.lang.Thread.run(Thread.java:745) +Caused by: org.gcube.common.homelibrary.home.exceptions.InternalErrorException: org.gcube.common.homelibrary.home.exceptions.InternalErrorException: cannot discover password and username in scope /gcube/devsec/devVRE + at org.gcube.common.homelibrary.jcr.repository.JCRRepository.initializeRepository(JCRRepository.java:155) + at org.gcube.common.homelibrary.jcr.repository.JCRRepository.initialize(JCRRepository.java:272) + at org.gcube.common.homelibrary.jcr.JCRHomeManagerFactory.initialize(JCRHomeManagerFactory.java:46) + ... 42 more +Caused by: org.gcube.common.homelibrary.home.exceptions.InternalErrorException: cannot discover password and username in scope /gcube/devsec/devVRE + at org.gcube.common.homelibrary.jcr.repository.JCRRepository.initializeRepository(JCRRepository.java:147) + ... 44 more +103228 [qtp684204552-34] INFO HomeManageFactory - getInstance persistenceRoot: /tmp/home_library_persistence +103230 [qtp684204552-34] DEBUG org.gcube.common.homelibrary.jcr.JCRHomeManagerFactory - Initialize content manager +103230 [qtp684204552-34] DEBUG org.gcube.common.homelibrary.jcr.repository.JCRRepository - Initialize repository +103230 [qtp684204552-34] DEBUG org.gcube.common.homelibrary.jcr.repository.JCRRepository - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +103230 [qtp684204552-34] DEBUG org.gcube.common.scope.impl.DefaultScopeProvider - setting scope /gcube in thread 34 +103239 [qtp684204552-34] INFO org.gcube.common.clients.stubs.jaxws.StubCache - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +103241 [qtp684204552-34] INFO org.gcube.resources.discovery.icclient.ICClient - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +103259 [qtp684204552-34] INFO org.gcube.resources.discovery.icclient.ICClient - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 18 ms +103269 [qtp684204552-34] ERROR org.gcube.common.homelibrary.jcr.repository.JCRRepository - error decrypting resource +java.security.InvalidKeyException: Unable to load the Key gcube.gcubekey from the classpath + at org.gcube.common.encryption.SymmetricKey.load(SymmetricKey.java:66) + at org.gcube.common.encryption.SymmetricKey.load(SymmetricKey.java:53) + at org.gcube.common.encryption.SymmetricKey.getKey(SymmetricKey.java:44) + at org.gcube.common.encryption.StringEncrypter.decrypt(StringEncrypter.java:58) + at org.gcube.common.homelibrary.jcr.repository.JCRRepository.initializeRepository(JCRRepository.java:119) + at org.gcube.common.homelibrary.jcr.repository.JCRRepository.initialize(JCRRepository.java:272) + at org.gcube.common.homelibrary.jcr.JCRHomeManagerFactory.initialize(JCRHomeManagerFactory.java:46) + at org.gcube.common.homelibrary.home.HomeLibrary.createHomeManagerFactoryInstance(HomeLibrary.java:91) + at org.gcube.common.homelibrary.home.HomeLibrary.getHomeManagerFactory(HomeLibrary.java:106) + at org.gcube.common.homelibrary.home.HomeLibrary.getHomeManagerFactory(HomeLibrary.java:124) + at org.gcube.common.homelibrary.home.HomeLibrary.getUserHome(HomeLibrary.java:198) + at org.gcube.common.homelibrary.home.HomeLibrary.getUserWorkspace(HomeLibrary.java:143) + at org.gcube.portlets.widgets.wsexplorer.server.WorkspaceExplorerServiceImpl.getWorkspace(WorkspaceExplorerServiceImpl.java:104) + at org.gcube.portlets.widgets.wsexplorer.server.WorkspaceExplorerServiceImpl.getRoot(WorkspaceExplorerServiceImpl.java:117) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:606) + at com.google.gwt.user.server.rpc.RPC.invokeAndEncodeResponse(RPC.java:561) + at com.google.gwt.user.server.rpc.RemoteServiceServlet.processCall(RemoteServiceServlet.java:265) + at com.google.gwt.user.server.rpc.RemoteServiceServlet.processPost(RemoteServiceServlet.java:305) + at com.google.gwt.user.server.rpc.AbstractRemoteServiceServlet.doPost(AbstractRemoteServiceServlet.java:62) + at javax.servlet.http.HttpServlet.service(HttpServlet.java:755) + at javax.servlet.http.HttpServlet.service(HttpServlet.java:848) + at org.eclipse.jetty.servlet.ServletHolder.handle(ServletHolder.java:686) + at org.eclipse.jetty.servlet.ServletHandler.doHandle(ServletHandler.java:501) + at org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:137) + at org.eclipse.jetty.security.SecurityHandler.handle(SecurityHandler.java:557) + at org.eclipse.jetty.server.session.SessionHandler.doHandle(SessionHandler.java:231) + at org.eclipse.jetty.server.handler.ContextHandler.doHandle(ContextHandler.java:1086) + at org.eclipse.jetty.servlet.ServletHandler.doScope(ServletHandler.java:428) + at org.eclipse.jetty.server.session.SessionHandler.doScope(SessionHandler.java:193) + at org.eclipse.jetty.server.handler.ContextHandler.doScope(ContextHandler.java:1020) + at org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:135) + at org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:116) + at org.eclipse.jetty.server.handler.RequestLogHandler.handle(RequestLogHandler.java:68) + at org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:116) + at org.eclipse.jetty.server.Server.handle(Server.java:370) + at org.eclipse.jetty.server.AbstractHttpConnection.handleRequest(AbstractHttpConnection.java:489) + at org.eclipse.jetty.server.AbstractHttpConnection.content(AbstractHttpConnection.java:960) + at org.eclipse.jetty.server.AbstractHttpConnection$RequestHandler.content(AbstractHttpConnection.java:1021) + at org.eclipse.jetty.http.HttpParser.parseNext(HttpParser.java:865) + at org.eclipse.jetty.http.HttpParser.parseAvailable(HttpParser.java:240) + at org.eclipse.jetty.server.AsyncHttpConnection.handle(AsyncHttpConnection.java:82) + at org.eclipse.jetty.io.nio.SelectChannelEndPoint.handle(SelectChannelEndPoint.java:668) + at org.eclipse.jetty.io.nio.SelectChannelEndPoint$1.run(SelectChannelEndPoint.java:52) + at org.eclipse.jetty.util.thread.QueuedThreadPool.runJob(QueuedThreadPool.java:608) + at org.eclipse.jetty.util.thread.QueuedThreadPool$3.run(QueuedThreadPool.java:543) + at java.lang.Thread.run(Thread.java:745) +103269 [qtp684204552-34] DEBUG org.gcube.common.scope.impl.DefaultScopeProvider - setting scope /gcube/devsec/devVRE in thread 34 +103269 [qtp684204552-34] ERROR org.gcube.portlets.widgets.wsexplorer.server.WorkspaceExplorerServiceImpl - Error during root retrieving +org.gcube.common.homelibrary.home.exceptions.InternalErrorException: org.gcube.common.homelibrary.home.exceptions.InternalErrorException: org.gcube.common.homelibrary.home.exceptions.InternalErrorException: cannot discover password and username in scope /gcube/devsec/devVRE + at org.gcube.common.homelibrary.jcr.JCRHomeManagerFactory.initialize(JCRHomeManagerFactory.java:48) + at org.gcube.common.homelibrary.home.HomeLibrary.createHomeManagerFactoryInstance(HomeLibrary.java:91) + at org.gcube.common.homelibrary.home.HomeLibrary.getHomeManagerFactory(HomeLibrary.java:106) + at org.gcube.common.homelibrary.home.HomeLibrary.getHomeManagerFactory(HomeLibrary.java:124) + at org.gcube.common.homelibrary.home.HomeLibrary.getUserHome(HomeLibrary.java:198) + at org.gcube.common.homelibrary.home.HomeLibrary.getUserWorkspace(HomeLibrary.java:143) + at org.gcube.portlets.widgets.wsexplorer.server.WorkspaceExplorerServiceImpl.getWorkspace(WorkspaceExplorerServiceImpl.java:104) + at org.gcube.portlets.widgets.wsexplorer.server.WorkspaceExplorerServiceImpl.getRoot(WorkspaceExplorerServiceImpl.java:117) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:606) + at com.google.gwt.user.server.rpc.RPC.invokeAndEncodeResponse(RPC.java:561) + at com.google.gwt.user.server.rpc.RemoteServiceServlet.processCall(RemoteServiceServlet.java:265) + at com.google.gwt.user.server.rpc.RemoteServiceServlet.processPost(RemoteServiceServlet.java:305) + at com.google.gwt.user.server.rpc.AbstractRemoteServiceServlet.doPost(AbstractRemoteServiceServlet.java:62) + at javax.servlet.http.HttpServlet.service(HttpServlet.java:755) + at javax.servlet.http.HttpServlet.service(HttpServlet.java:848) + at org.eclipse.jetty.servlet.ServletHolder.handle(ServletHolder.java:686) + at org.eclipse.jetty.servlet.ServletHandler.doHandle(ServletHandler.java:501) + at org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:137) + at org.eclipse.jetty.security.SecurityHandler.handle(SecurityHandler.java:557) + at org.eclipse.jetty.server.session.SessionHandler.doHandle(SessionHandler.java:231) + at org.eclipse.jetty.server.handler.ContextHandler.doHandle(ContextHandler.java:1086) + at org.eclipse.jetty.servlet.ServletHandler.doScope(ServletHandler.java:428) + at org.eclipse.jetty.server.session.SessionHandler.doScope(SessionHandler.java:193) + at org.eclipse.jetty.server.handler.ContextHandler.doScope(ContextHandler.java:1020) + at org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:135) + at org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:116) + at org.eclipse.jetty.server.handler.RequestLogHandler.handle(RequestLogHandler.java:68) + at org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:116) + at org.eclipse.jetty.server.Server.handle(Server.java:370) + at org.eclipse.jetty.server.AbstractHttpConnection.handleRequest(AbstractHttpConnection.java:489) + at org.eclipse.jetty.server.AbstractHttpConnection.content(AbstractHttpConnection.java:960) + at org.eclipse.jetty.server.AbstractHttpConnection$RequestHandler.content(AbstractHttpConnection.java:1021) + at org.eclipse.jetty.http.HttpParser.parseNext(HttpParser.java:865) + at org.eclipse.jetty.http.HttpParser.parseAvailable(HttpParser.java:240) + at org.eclipse.jetty.server.AsyncHttpConnection.handle(AsyncHttpConnection.java:82) + at org.eclipse.jetty.io.nio.SelectChannelEndPoint.handle(SelectChannelEndPoint.java:668) + at org.eclipse.jetty.io.nio.SelectChannelEndPoint$1.run(SelectChannelEndPoint.java:52) + at org.eclipse.jetty.util.thread.QueuedThreadPool.runJob(QueuedThreadPool.java:608) + at org.eclipse.jetty.util.thread.QueuedThreadPool$3.run(QueuedThreadPool.java:543) + at java.lang.Thread.run(Thread.java:745) +Caused by: org.gcube.common.homelibrary.home.exceptions.InternalErrorException: org.gcube.common.homelibrary.home.exceptions.InternalErrorException: cannot discover password and username in scope /gcube/devsec/devVRE + at org.gcube.common.homelibrary.jcr.repository.JCRRepository.initializeRepository(JCRRepository.java:155) + at org.gcube.common.homelibrary.jcr.repository.JCRRepository.initialize(JCRRepository.java:272) + at org.gcube.common.homelibrary.jcr.JCRHomeManagerFactory.initialize(JCRHomeManagerFactory.java:46) + ... 42 more +Caused by: org.gcube.common.homelibrary.home.exceptions.InternalErrorException: cannot discover password and username in scope /gcube/devsec/devVRE + at org.gcube.common.homelibrary.jcr.repository.JCRRepository.initializeRepository(JCRRepository.java:147) + ... 44 more +110269 [qtp684204552-30] DEBUG org.gcube.common.scope.impl.DefaultScopeProvider - setting scope /gcube/devsec/devVRE in thread 30 +110269 [qtp684204552-30] DEBUG org.gcube.application.framework.core.session.ASLSession - Getting security token: null in thread 30 +165268 [qtp684204552-34] DEBUG org.gcube.common.scope.impl.DefaultScopeProvider - setting scope /gcube/devsec/devVRE in thread 34 +165268 [qtp684204552-34] DEBUG org.gcube.application.framework.core.session.ASLSession - Getting security token: null in thread 34 diff --git a/src/main/java/org/gcube/portlets/user/dataminermanager/client/DataMinerManagerController.java b/src/main/java/org/gcube/portlets/user/dataminermanager/client/DataMinerManagerController.java index 94a5047..817b286 100644 --- a/src/main/java/org/gcube/portlets/user/dataminermanager/client/DataMinerManagerController.java +++ b/src/main/java/org/gcube/portlets/user/dataminermanager/client/DataMinerManagerController.java @@ -245,7 +245,6 @@ public class DataMinerManagerController { final StartComputationExecutionRequestEvent event) { DataMinerPortletServiceAsync.INSTANCE.startComputation(event.getOp(), - event.getComputationTitle(), event.getComputationDescription(), new AsyncCallback() { @Override public void onSuccess(ComputationId computationId) { diff --git a/src/main/java/org/gcube/portlets/user/dataminermanager/client/events/ComputationReadyEvent.java b/src/main/java/org/gcube/portlets/user/dataminermanager/client/events/ComputationReadyEvent.java index 6ce48f1..c48b151 100644 --- a/src/main/java/org/gcube/portlets/user/dataminermanager/client/events/ComputationReadyEvent.java +++ b/src/main/java/org/gcube/portlets/user/dataminermanager/client/events/ComputationReadyEvent.java @@ -20,8 +20,6 @@ public class ComputationReadyEvent extends public static Type TYPE = new Type(); private Operator operator; - private String computationTitle; - private String computationDescription; public interface ComputationReadyEventHandler extends EventHandler { void onReady(ComputationReadyEvent event); @@ -32,12 +30,9 @@ public class ComputationReadyEvent extends ComputationReadyEventHandler handler); } - public ComputationReadyEvent(Operator operator, String computationTitle, - String computationDescription) { + public ComputationReadyEvent(Operator operator) { super(); this.operator = operator; - this.computationTitle = computationTitle; - this.computationDescription = computationDescription; } @Override @@ -66,19 +61,10 @@ public class ComputationReadyEvent extends return operator; } - public String getComputationTitle() { - return computationTitle; - } - - public String getComputationDescription() { - return computationDescription; - } - + @Override public String toString() { - return "ComputationReadyEvent [operator=" + operator - + ", computationTitle=" + computationTitle - + ", computationDescription=" + computationDescription + "]"; + return "ComputationReadyEvent [operator=" + operator + "]"; } } diff --git a/src/main/java/org/gcube/portlets/user/dataminermanager/client/events/StartComputationExecutionRequestEvent.java b/src/main/java/org/gcube/portlets/user/dataminermanager/client/events/StartComputationExecutionRequestEvent.java index c582436..e77c39c 100644 --- a/src/main/java/org/gcube/portlets/user/dataminermanager/client/events/StartComputationExecutionRequestEvent.java +++ b/src/main/java/org/gcube/portlets/user/dataminermanager/client/events/StartComputationExecutionRequestEvent.java @@ -21,24 +21,22 @@ public class StartComputationExecutionRequestEvent public static Type TYPE = new Type(); private Operator op; - private String computationTitle; - private String computationDescription; private int computationStatusPanelIndex; - public interface StartComputationExecutionRequestEventHandler extends EventHandler { + public interface StartComputationExecutionRequestEventHandler extends + EventHandler { void onStart(StartComputationExecutionRequestEvent event); } - public interface HasStartComputationExecutionRequestEventHandler extends HasHandlers { + public interface HasStartComputationExecutionRequestEventHandler extends + HasHandlers { public HandlerRegistration addStartComputationExecutionRequestEventHandler( StartComputationExecutionRequestEventHandler handler); } - public StartComputationExecutionRequestEvent(Operator op, String computationTitle, - String computationDescription, int computationStatusPanelIndex) { + public StartComputationExecutionRequestEvent(Operator op, + int computationStatusPanelIndex) { this.op = op; - this.computationTitle = computationTitle; - this.computationDescription = computationDescription; this.computationStatusPanelIndex = computationStatusPanelIndex; } @@ -66,14 +64,6 @@ public class StartComputationExecutionRequestEvent return op; } - public String getComputationTitle() { - return computationTitle; - } - - public String getComputationDescription() { - return computationDescription; - } - public int getComputationStatusPanelIndex() { return computationStatusPanelIndex; } @@ -81,12 +71,8 @@ public class StartComputationExecutionRequestEvent @Override public String toString() { return "StartComputationExecutionRequestEvent [op=" + op - + ", computationTitle=" + computationTitle - + ", computationDescription=" + computationDescription + ", computationStatusPanelIndex=" + computationStatusPanelIndex + "]"; } - - } diff --git a/src/main/java/org/gcube/portlets/user/dataminermanager/client/experiments/ComputationExecutionPanel.java b/src/main/java/org/gcube/portlets/user/dataminermanager/client/experiments/ComputationExecutionPanel.java index 2b2fe80..d20ae1c 100644 --- a/src/main/java/org/gcube/portlets/user/dataminermanager/client/experiments/ComputationExecutionPanel.java +++ b/src/main/java/org/gcube/portlets/user/dataminermanager/client/experiments/ComputationExecutionPanel.java @@ -72,29 +72,33 @@ public class ComputationExecutionPanel extends FramedPanel { } private void bind() { - EventBusProvider.INSTANCE.addHandler( - ResubmitComputationExecutionEvent.getType(), - new ResubmitComputationExecutionEvent.ResubmitComputationExecutionEventHandler() { - @Override - public void onResubmit(ResubmitComputationExecutionEvent event) { - resubmitComputation(event); - } - }); + EventBusProvider.INSTANCE + .addHandler( + ResubmitComputationExecutionEvent.getType(), + new ResubmitComputationExecutionEvent.ResubmitComputationExecutionEventHandler() { + @Override + public void onResubmit( + ResubmitComputationExecutionEvent event) { + resubmitComputation(event); + } + }); - EventBusProvider.INSTANCE.addHandler(StartComputationExecutionEvent.getType(), - new StartComputationExecutionEvent.StartComputationExecutionEventHandler() { + EventBusProvider.INSTANCE + .addHandler( + StartComputationExecutionEvent.getType(), + new StartComputationExecutionEvent.StartComputationExecutionEventHandler() { - @Override - public void onStart(StartComputationExecutionEvent event) { - startComputation(event); + @Override + public void onStart( + StartComputationExecutionEvent event) { + startComputation(event); - } + } - }); + }); } - public void startNewComputation(final Operator operator, - String computationTitle, String computationDescription) { + public void startNewComputation(final Operator operator) { Log.debug("Computation Panel: start new computation "); ComputationStatusPanel statusPanel = new ComputationStatusPanel( operator); @@ -108,7 +112,7 @@ public class ComputationExecutionPanel extends FramedPanel { forceLayout(); StartComputationExecutionRequestEvent event = new StartComputationExecutionRequestEvent( - operator, computationTitle, computationDescription, index); + operator, index); EventBusProvider.INSTANCE.fireEvent(event); } diff --git a/src/main/java/org/gcube/portlets/user/dataminermanager/client/experiments/ComputationPanel.java b/src/main/java/org/gcube/portlets/user/dataminermanager/client/experiments/ComputationPanel.java index 2363df9..bf5870a 100644 --- a/src/main/java/org/gcube/portlets/user/dataminermanager/client/experiments/ComputationPanel.java +++ b/src/main/java/org/gcube/portlets/user/dataminermanager/client/experiments/ComputationPanel.java @@ -124,13 +124,12 @@ public class ComputationPanel extends FramedPanel implements HasComputationReady computationParametersPanel = new ComputationParametersPanel(operator); computationParametersPanel.setHandler(new ComputationParametersPanelHandler() { @Override - public void startComputation(String computationTitle, - String computationDescription) { + public void startComputation() { if (computationParametersPanel != null) { forceLayout(); computationParametersPanel.updateOperatorParametersValues(); Operator op = computationParametersPanel.getOperator(); - ComputationReadyEvent event = new ComputationReadyEvent(op, computationTitle, computationDescription); + ComputationReadyEvent event = new ComputationReadyEvent(op); fireEvent(event); } diff --git a/src/main/java/org/gcube/portlets/user/dataminermanager/client/experiments/ComputationParametersPanel.java b/src/main/java/org/gcube/portlets/user/dataminermanager/client/experiments/ComputationParametersPanel.java index 31e8409..a980fe7 100644 --- a/src/main/java/org/gcube/portlets/user/dataminermanager/client/experiments/ComputationParametersPanel.java +++ b/src/main/java/org/gcube/portlets/user/dataminermanager/client/experiments/ComputationParametersPanel.java @@ -3,7 +3,6 @@ */ package org.gcube.portlets.user.dataminermanager.client.experiments; -import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -24,8 +23,6 @@ import org.gcube.portlets.user.dataminermanager.client.util.UtilsGXT3; import com.allen_sauer.gwt.log.client.Log; import com.google.gwt.core.client.GWT; import com.google.gwt.dom.client.Style.Unit; -import com.google.gwt.i18n.client.DateTimeFormat; -import com.google.gwt.i18n.client.DateTimeFormat.PredefinedFormat; import com.google.gwt.resources.client.ImageResource; import com.google.gwt.user.client.rpc.AsyncCallback; import com.google.gwt.user.client.ui.Image; @@ -40,7 +37,6 @@ import com.sencha.gxt.widget.core.client.event.SelectEvent; import com.sencha.gxt.widget.core.client.event.SelectEvent.SelectHandler; import com.sencha.gxt.widget.core.client.form.FieldSet; import com.sencha.gxt.widget.core.client.form.FormPanel; -import com.sencha.gxt.widget.core.client.form.TextField; /** * @@ -51,8 +47,7 @@ import com.sencha.gxt.widget.core.client.form.TextField; public class ComputationParametersPanel extends SimpleContainer { public interface ComputationParametersPanelHandler { - public void startComputation(String computationTitle, - String computationDescription); + public void startComputation(); } private static final String START_BUTTON_TOOLTIP = "Start Computation"; @@ -67,8 +62,6 @@ public class ComputationParametersPanel extends SimpleContainer { private VerticalLayoutContainer vParameters; private Map fieldWidgetsMap; private ComputationParametersPanelHandler handler = null; - private TextField titleField; - private String defaultComputationTitle; private TextButton submit; @@ -77,7 +70,6 @@ public class ComputationParametersPanel extends SimpleContainer { this.operator = operator; fieldWidgetsMap = new HashMap<>(); try { - setDefaultComputationTitle(); init(); create(); } catch (Throwable e) { @@ -164,10 +156,7 @@ public class ComputationParametersPanel extends SimpleContainer { @Override public void onSelect(SelectEvent event) { if (handler != null && parametersPanel.isValid()) { - String value = titleField.getValue(); - String title = (value == null || value.contentEquals("")) ? defaultComputationTitle - : value; - handler.startComputation(title, title); // TODO insert + handler.startComputation(); // TODO insert // description } @@ -303,11 +292,6 @@ public class ComputationParametersPanel extends SimpleContainer { this.handler = handler; } - public void setDefaultComputationTitle() { - String name = this.operator.getName(); - String date = DateTimeFormat - .getFormat(PredefinedFormat.DATE_TIME_SHORT).format(new Date()); - defaultComputationTitle = name + "-" + date; - } + } diff --git a/src/main/java/org/gcube/portlets/user/dataminermanager/client/experiments/WorkflowPanel.java b/src/main/java/org/gcube/portlets/user/dataminermanager/client/experiments/WorkflowPanel.java index 0f43d07..d2dbd71 100644 --- a/src/main/java/org/gcube/portlets/user/dataminermanager/client/experiments/WorkflowPanel.java +++ b/src/main/java/org/gcube/portlets/user/dataminermanager/client/experiments/WorkflowPanel.java @@ -23,7 +23,6 @@ public class WorkflowPanel extends TabPanel { public static final String DEFAULT_OPERATOR = "AQUAMAPS_SUITABLE"; - private ComputationExecutionPanel computationExecutionPanel; private ComputationPanel computationPanel; @@ -44,8 +43,8 @@ public class WorkflowPanel extends TabPanel { private void create() { - TabItemConfig tabWorkFlowLcItemConf = new TabItemConfig( - ".: Operator", false); + TabItemConfig tabWorkFlowLcItemConf = new TabItemConfig(".: Operator", + false); tabWorkFlowLcItemConf.setIcon(DataMinerManager.resources .folderExplore()); computationPanel = new ComputationPanel(); @@ -54,9 +53,8 @@ public class WorkflowPanel extends TabPanel { @Override public void onReady(ComputationReadyEvent event) { - Log.debug("StartComputationEvent Received:"+event); - startComputation(event.getOperator(), event.getComputationTitle(), - event.getComputationDescription()); + Log.debug("StartComputationEvent Received:" + event); + startComputation(event.getOperator()); } }); @@ -76,21 +74,22 @@ public class WorkflowPanel extends TabPanel { * */ private void bind() { - EventBusProvider.INSTANCE.addHandler( - ResubmitComputationExecutionEvent.getType(), - new ResubmitComputationExecutionEvent.ResubmitComputationExecutionEventHandler() { - @Override - public void onResubmit( - ResubmitComputationExecutionEvent event) { - resubmitComputation(); - } - }); + EventBusProvider.INSTANCE + .addHandler( + ResubmitComputationExecutionEvent.getType(), + new ResubmitComputationExecutionEvent.ResubmitComputationExecutionEventHandler() { + @Override + public void onResubmit( + ResubmitComputationExecutionEvent event) { + resubmitComputation(); + } + }); } - + /** * */ - private void resubmitComputation(){ + private void resubmitComputation() { setActiveWidget(computationExecutionPanel); } @@ -99,13 +98,10 @@ public class WorkflowPanel extends TabPanel { * @param operator * */ - private void startComputation(Operator op, String computationTitle, - String computationDescription) { + private void startComputation(Operator op) { setActiveWidget(computationExecutionPanel); - computationExecutionPanel.startNewComputation(op, computationTitle, - computationDescription); - - + computationExecutionPanel.startNewComputation(op); + } public void addOperator(Operator op) { @@ -113,8 +109,5 @@ public class WorkflowPanel extends TabPanel { computationPanel.addOperator(op); } - - - - + } diff --git a/src/main/java/org/gcube/portlets/user/dataminermanager/client/parametersfield/BooleanFld.java b/src/main/java/org/gcube/portlets/user/dataminermanager/client/parametersfield/BooleanFld.java index 0488a8c..6a2101b 100644 --- a/src/main/java/org/gcube/portlets/user/dataminermanager/client/parametersfield/BooleanFld.java +++ b/src/main/java/org/gcube/portlets/user/dataminermanager/client/parametersfield/BooleanFld.java @@ -52,7 +52,7 @@ public class BooleanFld extends AbstractFld { descr.addStyleName("workflow-fieldDescription"); } else { - checkBox.setToolTip(p.getDescription()); + //checkBox.setToolTip(p.getDescription()); descr = new HtmlLayoutContainer("

" + p.getDescription() + "

"); descr.addStyleName("workflow-fieldDescription"); diff --git a/src/main/java/org/gcube/portlets/user/dataminermanager/client/parametersfield/ColumnFld.java b/src/main/java/org/gcube/portlets/user/dataminermanager/client/parametersfield/ColumnFld.java index df409f1..6d460b2 100644 --- a/src/main/java/org/gcube/portlets/user/dataminermanager/client/parametersfield/ColumnFld.java +++ b/src/main/java/org/gcube/portlets/user/dataminermanager/client/parametersfield/ColumnFld.java @@ -81,7 +81,7 @@ public class ColumnFld extends AbstractFld implements descr.addStyleName("workflow-fieldDescription"); } else { - comboBox.setToolTip(columnParameter.getDescription()); + //comboBox.setToolTip(columnParameter.getDescription()); descr = new HtmlLayoutContainer("

" + columnParameter.getDescription() + "

"); descr.addStyleName("workflow-fieldDescription"); diff --git a/src/main/java/org/gcube/portlets/user/dataminermanager/client/parametersfield/ColumnListFld.java b/src/main/java/org/gcube/portlets/user/dataminermanager/client/parametersfield/ColumnListFld.java index c0dcd2b..218c733 100644 --- a/src/main/java/org/gcube/portlets/user/dataminermanager/client/parametersfield/ColumnListFld.java +++ b/src/main/java/org/gcube/portlets/user/dataminermanager/client/parametersfield/ColumnListFld.java @@ -74,7 +74,7 @@ public class ColumnListFld extends AbstractFld implements descr.addStyleName("workflow-fieldDescription"); } else { - grid.setToolTip(columnListParameter.getDescription()); + //grid.setToolTip(columnListParameter.getDescription()); descr = new HtmlLayoutContainer("

" + columnListParameter.getDescription() + "

"); descr.addStyleName("workflow-fieldDescription"); diff --git a/src/main/java/org/gcube/portlets/user/dataminermanager/client/parametersfield/DoubleFld.java b/src/main/java/org/gcube/portlets/user/dataminermanager/client/parametersfield/DoubleFld.java index 9a13f36..c0f9fa3 100644 --- a/src/main/java/org/gcube/portlets/user/dataminermanager/client/parametersfield/DoubleFld.java +++ b/src/main/java/org/gcube/portlets/user/dataminermanager/client/parametersfield/DoubleFld.java @@ -50,7 +50,7 @@ public class DoubleFld extends AbstractFld { descr.addStyleName("workflow-fieldDescription"); } else { - numberField.setToolTip(p.getDescription()); + //numberField.setToolTip(p.getDescription()); descr = new HtmlLayoutContainer("

" + p.getDescription() + "

"); descr.addStyleName("workflow-fieldDescription"); diff --git a/src/main/java/org/gcube/portlets/user/dataminermanager/client/parametersfield/EnumFld.java b/src/main/java/org/gcube/portlets/user/dataminermanager/client/parametersfield/EnumFld.java index ce0791b..25a87bb 100644 --- a/src/main/java/org/gcube/portlets/user/dataminermanager/client/parametersfield/EnumFld.java +++ b/src/main/java/org/gcube/portlets/user/dataminermanager/client/parametersfield/EnumFld.java @@ -57,7 +57,7 @@ public class EnumFld extends AbstractFld { descr.addStyleName("workflow-fieldDescription"); } else { - listBox.setToolTip(p.getDescription()); + //listBox.setToolTip(p.getDescription()); descr = new HtmlLayoutContainer("

" + p.getDescription() + "

"); descr.addStyleName("workflow-fieldDescription"); diff --git a/src/main/java/org/gcube/portlets/user/dataminermanager/client/parametersfield/FloatFld.java b/src/main/java/org/gcube/portlets/user/dataminermanager/client/parametersfield/FloatFld.java index 2cf1e3c..a3b0d5e 100644 --- a/src/main/java/org/gcube/portlets/user/dataminermanager/client/parametersfield/FloatFld.java +++ b/src/main/java/org/gcube/portlets/user/dataminermanager/client/parametersfield/FloatFld.java @@ -50,7 +50,7 @@ public class FloatFld extends AbstractFld { descr.addStyleName("workflow-fieldDescription"); } else { - numberField.setToolTip(p.getDescription()); + //numberField.setToolTip(p.getDescription()); descr = new HtmlLayoutContainer("

" + p.getDescription() + "

"); descr.addStyleName("workflow-fieldDescription"); diff --git a/src/main/java/org/gcube/portlets/user/dataminermanager/client/parametersfield/IntFld.java b/src/main/java/org/gcube/portlets/user/dataminermanager/client/parametersfield/IntFld.java index 370366c..5f86ae6 100644 --- a/src/main/java/org/gcube/portlets/user/dataminermanager/client/parametersfield/IntFld.java +++ b/src/main/java/org/gcube/portlets/user/dataminermanager/client/parametersfield/IntFld.java @@ -50,7 +50,7 @@ public class IntFld extends AbstractFld { descr.addStyleName("workflow-fieldDescription"); } else { - numberField.setToolTip(p.getDescription()); + //numberField.setToolTip(p.getDescription()); descr = new HtmlLayoutContainer("

" + p.getDescription() + "

"); descr.addStyleName("workflow-fieldDescription"); diff --git a/src/main/java/org/gcube/portlets/user/dataminermanager/client/parametersfield/ListIntFld.java b/src/main/java/org/gcube/portlets/user/dataminermanager/client/parametersfield/ListIntFld.java index ceaaf89..cfd8022 100644 --- a/src/main/java/org/gcube/portlets/user/dataminermanager/client/parametersfield/ListIntFld.java +++ b/src/main/java/org/gcube/portlets/user/dataminermanager/client/parametersfield/ListIntFld.java @@ -66,7 +66,7 @@ public class ListIntFld extends AbstractFld { descr.addStyleName("workflow-fieldDescription"); } else { - listContainer.setToolTip(listParameter.getDescription()); + //listContainer.setToolTip(listParameter.getDescription()); descr = new HtmlLayoutContainer("

" + listParameter.getDescription() + "

"); descr.addStyleName("workflow-fieldDescription"); diff --git a/src/main/java/org/gcube/portlets/user/dataminermanager/client/parametersfield/OperatorFieldWidget.java b/src/main/java/org/gcube/portlets/user/dataminermanager/client/parametersfield/OperatorFieldWidget.java index e0499ca..00d6608 100644 --- a/src/main/java/org/gcube/portlets/user/dataminermanager/client/parametersfield/OperatorFieldWidget.java +++ b/src/main/java/org/gcube/portlets/user/dataminermanager/client/parametersfield/OperatorFieldWidget.java @@ -32,7 +32,7 @@ public class OperatorFieldWidget { try { if (p.isObject()) - field = createObjectField((ObjectParameter) p); + field = createObjectField(p); else if (p.isEnum()) field = new EnumFld(p); else if (p.isTabular()) @@ -44,7 +44,7 @@ public class OperatorFieldWidget { else if (p.isColumnList()) field = new ColumnListFld(p); else if (p.isList()) - field = createListField((ListParameter) p); + field = createListField(p); else if (p.isFile()) field = new FileFld(p); @@ -88,18 +88,19 @@ public class OperatorFieldWidget { * @param p * @return */ - private AbstractFld createObjectField(ObjectParameter p) { - String type = p.getType(); + private AbstractFld createObjectField(Parameter p) { + ObjectParameter objectParameter=(ObjectParameter) p; + String type = objectParameter.getType(); if (type.contentEquals(Integer.class.getName())) { - return new IntFld(p); + return new IntFld(objectParameter); } else if (type.contentEquals(String.class.getName())) { - return new StringFld(p); + return new StringFld(objectParameter); } else if (type.contentEquals(Boolean.class.getName())) { - return new BooleanFld(p); + return new BooleanFld(objectParameter); } else if (type.contentEquals(Double.class.getName())) { - return new DoubleFld(p); + return new DoubleFld(objectParameter); } else if (type.contentEquals(Float.class.getName())) { - return new FloatFld(p); + return new FloatFld(objectParameter); } else return null; } @@ -107,15 +108,16 @@ public class OperatorFieldWidget { /** * */ - private AbstractFld createListField(ListParameter p) { - String type = p.getType(); + private AbstractFld createListField(Parameter p) { + ListParameter listParameter=(ListParameter) p; + String type = listParameter.getType(); if (type.contentEquals(String.class.getName()) || type.contentEquals("STRING")) { // TODO REMOVE "STRING" - return new ListStringFld(p); + return new ListStringFld(listParameter); } else if (type.contentEquals(Integer.class.getName()) || type.contentEquals("NUMBER")) { - return new ListIntFld(p); + return new ListIntFld(listParameter); } // } else if (type.contentEquals(Boolean.class.getName())) { // return new ListBooleanField(p); diff --git a/src/main/java/org/gcube/portlets/user/dataminermanager/client/parametersfield/StringFld.java b/src/main/java/org/gcube/portlets/user/dataminermanager/client/parametersfield/StringFld.java index 461ec59..bbc2eec 100644 --- a/src/main/java/org/gcube/portlets/user/dataminermanager/client/parametersfield/StringFld.java +++ b/src/main/java/org/gcube/portlets/user/dataminermanager/client/parametersfield/StringFld.java @@ -49,7 +49,7 @@ public class StringFld extends AbstractFld { descr.addStyleName("workflow-fieldDescription"); } else { - textField.setToolTip(p.getDescription()); + //textField.setToolTip(p.getDescription()); descr=new HtmlLayoutContainer("

"+p.getDescription()+"

"); descr.addStyleName("workflow-fieldDescription"); } diff --git a/src/main/java/org/gcube/portlets/user/dataminermanager/client/parametersfield/TabularListFld.java b/src/main/java/org/gcube/portlets/user/dataminermanager/client/parametersfield/TabularListFld.java index 0a2803a..98ce17c 100644 --- a/src/main/java/org/gcube/portlets/user/dataminermanager/client/parametersfield/TabularListFld.java +++ b/src/main/java/org/gcube/portlets/user/dataminermanager/client/parametersfield/TabularListFld.java @@ -4,31 +4,55 @@ package org.gcube.portlets.user.dataminermanager.client.parametersfield; import java.util.ArrayList; +import java.util.Arrays; import java.util.List; +import org.apache.commons.digester.SetRootRule; import org.gcube.portlets.user.dataminermanager.client.DataMinerManager; import org.gcube.portlets.user.dataminermanager.client.bean.parameters.Parameter; import org.gcube.portlets.user.dataminermanager.client.bean.parameters.TabularListParameter; import org.gcube.portlets.user.dataminermanager.client.bean.parameters.TabularParameter; +import org.gcube.portlets.user.dataminermanager.client.rpc.DataMinerPortletServiceAsync; +import org.gcube.portlets.user.dataminermanager.client.util.UtilsGXT3; +import org.gcube.portlets.user.dataminermanager.shared.data.TableItemSimple; +import org.gcube.portlets.user.dataminermanager.shared.exception.ExpiredSessionServiceException; +import org.gcube.portlets.widgets.wsexplorer.client.notification.WorkspaceExplorerSelectNotification.WorskpaceExplorerSelectNotificationListener; +import org.gcube.portlets.widgets.wsexplorer.client.select.WorkspaceExplorerSelectDialog; +import org.gcube.portlets.widgets.wsexplorer.shared.Item; +import org.gcube.portlets.widgets.wsexplorer.shared.ItemType; import com.allen_sauer.gwt.log.client.Log; +import com.google.gwt.user.client.rpc.AsyncCallback; import com.google.gwt.user.client.ui.Widget; +import com.sencha.gxt.core.client.dom.XDOM; +import com.sencha.gxt.core.client.util.Margins; import com.sencha.gxt.widget.core.client.button.TextButton; -import com.sencha.gxt.widget.core.client.container.HorizontalLayoutContainer; +import com.sencha.gxt.widget.core.client.container.BoxLayoutContainer.BoxLayoutData; +import com.sencha.gxt.widget.core.client.container.BoxLayoutContainer.BoxLayoutPack; +import com.sencha.gxt.widget.core.client.container.HBoxLayoutContainer; +import com.sencha.gxt.widget.core.client.container.HtmlLayoutContainer; +import com.sencha.gxt.widget.core.client.container.MarginData; +import com.sencha.gxt.widget.core.client.container.SimpleContainer; import com.sencha.gxt.widget.core.client.container.VerticalLayoutContainer; +import com.sencha.gxt.widget.core.client.container.VerticalLayoutContainer.VerticalLayoutData; import com.sencha.gxt.widget.core.client.event.SelectEvent; +import com.sencha.gxt.widget.core.client.form.TextField; /** * - * @author Giancarlo Panichi - * email: g.panichi@isti.cnr.it + * @author Giancarlo Panichi email: g.panichi@isti.cnr.it * */ public class TabularListFld extends AbstractFld { - private List items = new ArrayList(); + private SimpleContainer fieldContainer; + private HBoxLayoutContainer horiz; + private SimpleContainer listContainer; + private List items; private VerticalLayoutContainer vp; private TabularListParameter tabularListParameter; + private WorkspaceExplorerSelectDialog wselectDialog; /** * @param parameter @@ -36,48 +60,96 @@ public class TabularListFld extends AbstractFld { public TabularListFld(Parameter parameter) { super(parameter); Log.debug("TabularListField"); - this.tabularListParameter = (TabularListParameter) parameter; + + tabularListParameter = (TabularListParameter) parameter; + vp = new VerticalLayoutContainer(); - addField(null); - } + items = new ArrayList<>(); - private void addField(Item upperItem) { + listContainer = new SimpleContainer(); + listContainer.add(vp, new MarginData(new Margins(0))); - TabularParameter tabPar = new TabularParameter( - tabularListParameter.getName(), - tabularListParameter.getDescription(),null, - tabularListParameter.getTemplates()); + /* + * List templates = tabularListParameter.getTemplates(); String + * list = ""; boolean firstTemplate = true; for (String template : + * templates) { list += (firstTemplate ? "" : ", ") + + * Format.ellipse(template,50); firstTemplate = false; } + * HtmlLayoutContainer templatesList = new + * HtmlLayoutContainer("

Suitable Data Set Templates:
" + + * list+"

"); + * templatesList.addStyleName("workflow-parameters-description"); + */ - if (upperItem == null) { - Item item = new Item(tabPar, true); - items.add(item); - vp.add(item); + fieldContainer = new SimpleContainer(); + // fieldContainer.setResize(true); + horiz = new HBoxLayoutContainer(); + horiz.setPack(BoxLayoutPack.START); + horiz.setEnableOverflow(false); + + HtmlLayoutContainer descr; + + if (tabularListParameter.getDescription() == null) { + descr = new HtmlLayoutContainer("

"); + descr.addStyleName("workflow-fieldDescription"); } else { - // search the position of the upper item - int pos = 0; - for (int i = 0; i < items.size(); i++) - if (items.get(i) == upperItem) { - pos = i; - break; - } - - upperItem.showCancelButton(); - Item item = new Item(tabPar, false); - items.add(pos + 1, item); - vp.insert(item, pos + 1); + /* listContainer.setToolTip(listParameter.getDescription()); */ + descr = new HtmlLayoutContainer("

" + + tabularListParameter.getDescription() + "

"); + descr.addStyleName("workflow-fieldDescription"); } - vp.forceLayout(); + horiz.add(listContainer, new BoxLayoutData(new Margins(0))); + horiz.add(descr, new BoxLayoutData(new Margins(0))); + + fieldContainer.add(horiz, new MarginData(new Margins(0))); + addField(null); + + + fieldContainer.forceLayout(); + + } + + private void addField(TabItem upperItem) { + try { + + TabularParameter tabPar = new TabularParameter( + tabularListParameter.getName(), + tabularListParameter.getDescription(), null, + tabularListParameter.getTemplates()); + + if (upperItem == null) { + TabItem item = new TabItem(tabPar, true); + items.add(item); + vp.add(item, new VerticalLayoutData(1, -1, new Margins(0))); + } else { + // search the position of the upper item + int pos = items.indexOf(upperItem); + if (pos > -1) { + upperItem.showCancelButton(); + TabItem item = new TabItem(tabPar, false); + items.add(pos + 1, item); + vp.insert(item, pos + 1, new VerticalLayoutData(1, -1, + new Margins(0))); + } else { + TabItem item = new TabItem(tabPar, true); + items.add(item); + vp.add(item, new VerticalLayoutData(1, -1, new Margins(0))); + } + } + + } catch (Throwable e) { + Log.error(e.getLocalizedMessage()); + e.printStackTrace(); + } } /** * @param item */ - protected void removeField(Item item) { + private void removeField(TabItem item) { items.remove(item); vp.remove(item); - vp.forceLayout(); if (items.size() == 1) { items.get(0).hideCancelButton(); @@ -93,7 +165,7 @@ public class TabularListFld extends AbstractFld { String separator = tabularListParameter.getSeparator(); String value = ""; boolean first = true; - for (Item item : items) { + for (TabItem item : items) { String itemValue = item.getValue(); if (itemValue != null && !itemValue.contentEquals("")) { value += (first ? "" : separator) + itemValue; @@ -108,84 +180,251 @@ public class TabularListFld extends AbstractFld { */ @Override public Widget getWidget() { - return vp; + return fieldContainer; } - /** - * - */ @Override public boolean isValid() { boolean valid = false; - for (Item item : items) - if (item.getField().getValue() != null) { + for (TabItem item : items) + if (item.isValid()) { valid = true; break; } return valid; } - private class Item extends HorizontalLayoutContainer { + private class TabItem extends HBoxLayoutContainer { + + private TextButton selectButton, selectButton2; + private TableItemSimple selectedTableItem = null; + private TextButton addBtn; + private TextButton removeBtn; + private TextField tableDescription; + + /** + * @param objPar + */ + public TabItem(TabularParameter tabularParameter, boolean first) { + super(); + create(tabularParameter, first); + initDialog(); + + } + + private void create(TabularParameter tabularParameter, boolean first) { + + tableDescription = new TextField(); + tableDescription.setReadOnly(true); + //tableDescription.setVisible(false); + + selectButton = new TextButton("Select Data Set"); + selectButton.addSelectHandler(new SelectEvent.SelectHandler() { + + @Override + public void onSelect(SelectEvent event) { + wselectDialog.show(); + } + }); + + selectButton.setIcon(DataMinerManager.resources.folderExplore()); + selectButton.setToolTip("Select Data Set"); + + selectButton2 = new TextButton(""); + selectButton2.addSelectHandler(new SelectEvent.SelectHandler() { + + @Override + public void onSelect(SelectEvent event) { + wselectDialog.show(); + } + }); + + selectButton2.setIcon(DataMinerManager.resources.folderExplore()); + selectButton2.setToolTip("Select Another Data Set"); + selectButton2.setVisible(false); + + + addBtn = new TextButton(""); + + addBtn.setIcon(DataMinerManager.resources.add()); + + addBtn.addSelectHandler(new SelectEvent.SelectHandler() { + + @Override + public void onSelect(SelectEvent event) { + addField(TabItem.this); + fieldContainer.forceLayout(); + + } + }); + + removeBtn = new TextButton(""); + + removeBtn.setIcon(DataMinerManager.resources.cancel()); + + removeBtn.addSelectHandler(new SelectEvent.SelectHandler() { + + @Override + public void onSelect(SelectEvent event) { + selectedTableItem = null; + removeField(TabItem.this); + fieldContainer.forceLayout(); + + } + }); + + removeBtn.setVisible(!first); + + setPack(BoxLayoutPack.START); + setEnableOverflow(false); + + add(tableDescription, new BoxLayoutData(new Margins(0))); + add(selectButton, new BoxLayoutData(new Margins(0))); + add(selectButton2, new BoxLayoutData(new Margins(0))); + add(addBtn, new BoxLayoutData(new Margins(0))); + add(removeBtn, new BoxLayoutData(new Margins(0))); + + + } + + private void initDialog() { + + List selectableTypes = new ArrayList(); + selectableTypes.add(ItemType.EXTERNAL_FILE); + List showableTypes = new ArrayList(); + showableTypes.addAll(Arrays.asList(ItemType.values())); + + /* + * "application/zip", "application/x-zip", + * "application/x-zip-compressed", "application/octet-stream", + * "application/x-compress", "application/x-compressed", + * "multipart/x-zip" + */ + // List allowedMimeTypes = + // Arrays.asList("text/csv","text/plain","text/plain; charset=ISO-8859-1"); + + /** + * "zip" + */ + + /* + * List allowedFileExtensions = Arrays.asList("csv"); + * + * FilterCriteria filterCriteria = new + * FilterCriteria(allowedMimeTypes, allowedFileExtensions, new + * HashMap()); + */ + wselectDialog = new WorkspaceExplorerSelectDialog("Select CSV", + false); + // filterCriteria, selectableTypes); + + WorskpaceExplorerSelectNotificationListener handler = new WorskpaceExplorerSelectNotificationListener() { + + @Override + public void onSelectedItem(Item item) { + + if (item.isFolder() || item.isRoot()) { + UtilsGXT3.info("Attention", "Select a valid csv!"); + + } else { + TabItem.this.retrieveTableInformation(item); + + } + + } + + @Override + public void onFailed(Throwable throwable) { + Log.error("Error in create project: " + + throwable.getLocalizedMessage()); + UtilsGXT3.alert("Error", throwable.getLocalizedMessage()); + throwable.printStackTrace(); + } + + @Override + public void onAborted() { + + } + + @Override + public void onNotValidSelection() { + UtilsGXT3.info("Attention", "Select a valid csv!"); + } + }; + + wselectDialog + .addWorkspaceExplorerSelectNotificationListener(handler); + wselectDialog.setZIndex(XDOM.getTopZIndex()); + + } + + private void retrieveTableInformation(Item item) { + DataMinerPortletServiceAsync.INSTANCE.retrieveTableInformation( + item, new AsyncCallback() { + + @Override + public void onFailure(Throwable caught) { + Log.error("Error in retrieveTableInformation " + + caught.getMessage()); + if (caught instanceof ExpiredSessionServiceException) { + UtilsGXT3.alert("Error", "Expired Session"); + + } else { + UtilsGXT3.alert("Error", + "Error retrieving table information: " + + caught.getLocalizedMessage()); + } + + } + + @Override + public void onSuccess(TableItemSimple result) { + Log.debug("Retrieved: " + result); + selectedTableItem = result; + showFieldWithSelection(); + + } + }); + } - private TabularFld field; - private TextButton addButton; - - private TextButton removeButton; - - /** * - * @param tabularParameter - * @param first */ - public Item(TabularParameter tabularParameter, boolean first) { - super(); - this.field = new TabularFld(tabularParameter); - this.add(field.getWidget()); - - addButton= new TextButton(""); - addButton.setIcon(DataMinerManager.resources.add()); - addButton.addSelectHandler(new SelectEvent.SelectHandler() { - - @Override - public void onSelect(SelectEvent event) { - addField(Item.this); + private void showFieldWithSelection() { + try { + String tableName = selectedTableItem.getName(); + + if (tableName == null || tableName.isEmpty()) { + tableName = "NoName"; } - }); - - removeButton= new TextButton(""); - removeButton.setIcon(DataMinerManager.resources.cancel()); + tableDescription.setValue(tableName); + selectButton.setVisible(false); + selectButton2.setVisible(true); - removeButton.addSelectHandler(new SelectEvent.SelectHandler() { - - @Override - public void onSelect(SelectEvent event) { - removeField(Item.this); - } - }); - - removeButton.setVisible(!first); - - this.add(addButton); - this.add(removeButton); + } catch (Throwable e) { + Log.error(e.getLocalizedMessage()); + e.printStackTrace(); + } } public void showCancelButton() { - removeButton.setVisible(true); + removeBtn.setVisible(true); } public void hideCancelButton() { - removeButton.setVisible(false); + removeBtn.setVisible(false); } public String getValue() { - return field.getValue(); + return (selectedTableItem == null) ? null : selectedTableItem + .getId(); } - public TabularFld getField() { - return field; + public boolean isValid() { + return (selectedTableItem != null); } + } } diff --git a/src/main/java/org/gcube/portlets/user/dataminermanager/client/rpc/DataMinerPortletService.java b/src/main/java/org/gcube/portlets/user/dataminermanager/client/rpc/DataMinerPortletService.java index d5fa12a..ff434b5 100644 --- a/src/main/java/org/gcube/portlets/user/dataminermanager/client/rpc/DataMinerPortletService.java +++ b/src/main/java/org/gcube/portlets/user/dataminermanager/client/rpc/DataMinerPortletService.java @@ -61,8 +61,7 @@ public interface DataMinerPortletService extends RemoteService { public List getParameters(Operator operator) throws Exception; - public ComputationId startComputation(Operator op, String computationTitle, - String computationDescription) throws Exception; + public ComputationId startComputation(Operator op) throws Exception; public ComputationStatus getComputationStatus(ComputationId computationId) throws Exception; diff --git a/src/main/java/org/gcube/portlets/user/dataminermanager/client/rpc/DataMinerPortletServiceAsync.java b/src/main/java/org/gcube/portlets/user/dataminermanager/client/rpc/DataMinerPortletServiceAsync.java index 9c2c462..2309496 100644 --- a/src/main/java/org/gcube/portlets/user/dataminermanager/client/rpc/DataMinerPortletServiceAsync.java +++ b/src/main/java/org/gcube/portlets/user/dataminermanager/client/rpc/DataMinerPortletServiceAsync.java @@ -67,9 +67,7 @@ public interface DataMinerPortletServiceAsync { void getParameters(Operator operator, AsyncCallback> callback); - void startComputation(Operator op, String computationTitle, - String computationDescription, - AsyncCallback asyncCallback); + void startComputation(Operator op, AsyncCallback asyncCallback); void getComputationStatus(ComputationId computationId, AsyncCallback asyncCallback); diff --git a/src/main/java/org/gcube/portlets/user/dataminermanager/server/DataMinerManagerServiceImpl.java b/src/main/java/org/gcube/portlets/user/dataminermanager/server/DataMinerManagerServiceImpl.java index 89c9d2e..1d839e9 100644 --- a/src/main/java/org/gcube/portlets/user/dataminermanager/server/DataMinerManagerServiceImpl.java +++ b/src/main/java/org/gcube/portlets/user/dataminermanager/server/DataMinerManagerServiceImpl.java @@ -140,18 +140,14 @@ public class DataMinerManagerServiceImpl extends RemoteServiceServlet implements } @Override - public ComputationId startComputation(Operator operator, - String computationTitle, String computationDescription) + public ComputationId startComputation(Operator operator) throws Exception { try { HttpSession session = this.getThreadLocalRequest().getSession(); ASLSession aslSession = SessionUtil.getASLSession(session); - logger.debug("StartComputation(): [computationTitle=" - + computationTitle + ", computationDescription=" - + computationDescription + ", operator=" + operator + "]"); + logger.debug("StartComputation(): [ operator=" + operator + "]"); SClient smClient = SessionUtil.getSClient(session, aslSession); - return smClient.startComputation(operator, computationTitle, - computationDescription); + return smClient.startComputation(operator); } catch (Throwable e) { logger.error("Error in start computation: " diff --git a/src/main/java/org/gcube/portlets/user/dataminermanager/server/smservice/SClient.java b/src/main/java/org/gcube/portlets/user/dataminermanager/server/smservice/SClient.java index 70ee66f..e5af02e 100644 --- a/src/main/java/org/gcube/portlets/user/dataminermanager/server/smservice/SClient.java +++ b/src/main/java/org/gcube/portlets/user/dataminermanager/server/smservice/SClient.java @@ -2,7 +2,6 @@ package org.gcube.portlets.user.dataminermanager.server.smservice; import java.util.List; -import org.gcube.portlets.user.dataminermanager.client.bean.ComputationItem; import org.gcube.portlets.user.dataminermanager.client.bean.ComputationStatus; import org.gcube.portlets.user.dataminermanager.client.bean.Operator; import org.gcube.portlets.user.dataminermanager.client.bean.OperatorsClassification; @@ -30,8 +29,7 @@ public abstract class SClient { public abstract List getInputParameters(Operator operator) throws Exception; - public abstract ComputationId startComputation(Operator operator, - String computationTitle, String computationDescription) + public abstract ComputationId startComputation(Operator operator) throws Exception; public abstract ComputationStatus getComputationStatus( diff --git a/src/main/java/org/gcube/portlets/user/dataminermanager/server/smservice/SClient4WPS.java b/src/main/java/org/gcube/portlets/user/dataminermanager/server/smservice/SClient4WPS.java index 73ce255..ee5f733 100644 --- a/src/main/java/org/gcube/portlets/user/dataminermanager/server/smservice/SClient4WPS.java +++ b/src/main/java/org/gcube/portlets/user/dataminermanager/server/smservice/SClient4WPS.java @@ -667,8 +667,7 @@ public class SClient4WPS extends SClient { } @Override - public ComputationId startComputation(Operator operator, - String computationTitle, String computationDescription) + public ComputationId startComputation(Operator operator) throws Exception { ProcessInformations processInformations; try { @@ -697,8 +696,7 @@ public class SClient4WPS extends SClient { + parm.getValue()); } - String processUrl = compute(processInformations, userInputs, inputParameters, - computationTitle, computationDescription); + String processUrl = compute(processInformations, userInputs, inputParameters); logger.debug("Stated Computation ProcessLocation:" + processUrl); int idIndex = processUrl.lastIndexOf("?id="); @@ -718,8 +716,7 @@ public class SClient4WPS extends SClient { } private String compute(ProcessInformations processInformations, Map userInputs, - Map inputParameters, String computationTitle, - String computationDescription) throws Exception { + Map inputParameters) throws Exception { try { // setup the inputs org.n52.wps.client.ExecuteRequestBuilder executeBuilder = new org.n52.wps.client.ExecuteRequestBuilder( diff --git a/src/main/java/org/gcube/portlets/user/dataminermanager/server/smservice/wps/WPS2SM.java b/src/main/java/org/gcube/portlets/user/dataminermanager/server/smservice/wps/WPS2SM.java index 1fec287..5f5a5a8 100644 --- a/src/main/java/org/gcube/portlets/user/dataminermanager/server/smservice/wps/WPS2SM.java +++ b/src/main/java/org/gcube/portlets/user/dataminermanager/server/smservice/wps/WPS2SM.java @@ -24,13 +24,12 @@ import org.gcube.portlets.user.dataminermanager.client.bean.parameters.FileParam import org.gcube.portlets.user.dataminermanager.client.bean.parameters.ListParameter; import org.gcube.portlets.user.dataminermanager.client.bean.parameters.ObjectParameter; import org.gcube.portlets.user.dataminermanager.client.bean.parameters.Parameter; +import org.gcube.portlets.user.dataminermanager.client.bean.parameters.TabularListParameter; import org.gcube.portlets.user.dataminermanager.client.bean.parameters.TabularParameter; import org.gcube.portlets.user.dataminermanager.shared.exception.ServiceException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import com.itextpdf.text.pdf.PatternColor; - public class WPS2SM { private final static String SEPARATOR = "|"; @@ -132,15 +131,17 @@ public class WPS2SM { logger.debug("Machter group: " + matcher.group()); logger.debug("Machter start: " + matcher.start()); logger.debug("Machter end: " + matcher.end()); - logger.debug("Machter Group Count: " + matcher.groupCount()); + logger.debug("Machter Group Count: " + + matcher.groupCount()); String referredTabularParameterName = matcher.group(1); logger.debug("Matcher referredTabularParameterName: " + referredTabularParameterName); - + converted = new ColumnParameter(id, title, referredTabularParameterName, defaultValue); } else { - if (title.contains("[a sequence of values separated by ")) { + if (title + .contains("[a sequence of values separated by ")) { Pattern pattern = Pattern .compile("a sequence of values separated by (\\p{ASCII})"); Matcher matcher = pattern.matcher(title); @@ -149,8 +150,9 @@ public class WPS2SM { logger.debug("Machter group: " + matcher.group()); logger.debug("Machter start: " + matcher.start()); logger.debug("Machter end: " + matcher.end()); - logger.debug("Machter Group Count: " + matcher.groupCount()); - + logger.debug("Machter Group Count: " + + matcher.groupCount()); + String separator = matcher.group(1); logger.debug("Matcher separator: " + separator); @@ -158,7 +160,8 @@ public class WPS2SM { guessedType, separator); } else { converted = new ObjectParameter(id, title, - guessPrimitiveType(guessedType), defaultValue); + guessPrimitiveType(guessedType), + defaultValue); } } } @@ -204,19 +207,49 @@ public class WPS2SM { // rebuild title title = buildParameterDescription(title, maxMegaBytes, null, minOcc, maxOcc, null); - if ((maxOcc == 1) || (maxOcc < 0) || (maxOcc == 0)) + if ((maxOcc == 1) || (maxOcc < 0) || (maxOcc == 0)) { + if (title != null && !title.isEmpty()) { + if (title.contains("[a http link to a table")) { + converted = new TabularParameter(id, title, " ", + new ArrayList()); + } else { + if (title.contains("[a http link to a file")) { + converted = new FileParameter(id, title, mimeType); + } else { + if (title.contains("[a sequence of http links")) { + Pattern pattern = Pattern + .compile("\\[a sequence of http links separated by (\\p{ASCII}) , each indicating a table"); - if(title.contains("[a http link to a table")){ - converted = new TabularParameter(id, title, " ", - new ArrayList()); + Matcher matcher = pattern.matcher(title); + boolean match = false; + if (match = matcher.find()) { + logger.debug("Machter title: " + title); + logger.debug("Machter find: " + match); + logger.debug("Machter group: " + + matcher.group()); + logger.debug("Machter start: " + + matcher.start()); + logger.debug("Machter end: " + matcher.end()); + logger.debug("Machter Group Count: " + + matcher.groupCount()); + String separator = matcher.group(1); + logger.debug("Matcher separator: " + separator); + converted = new TabularListParameter(id, title, + separator); + } else { + converted = new FileParameter(id, title, mimeType); + } + } else { + converted = new FileParameter(id, title, mimeType); + } + } + } } else { converted = new FileParameter(id, title, mimeType); } - - else - converted = new ListParameter(id, title, String.class.getName(), - SEPARATOR); - + } else { + converted = new FileParameter(id, title, mimeType); + } return converted; } diff --git a/src/main/java/org/gcube/portlets/user/dataminermanager/shared/Constants.java b/src/main/java/org/gcube/portlets/user/dataminermanager/shared/Constants.java index f8a53b9..ddf319b 100644 --- a/src/main/java/org/gcube/portlets/user/dataminermanager/shared/Constants.java +++ b/src/main/java/org/gcube/portlets/user/dataminermanager/shared/Constants.java @@ -1,7 +1,7 @@ package org.gcube.portlets.user.dataminermanager.shared; public class Constants { - public static final boolean DEBUG_MODE = false; + public static final boolean DEBUG_MODE = true; public static final boolean TEST_ENABLE = false; public static final String APPLICATION_ID = "org.gcube.portlets.user.dataminermanager.portlet.DataMinerManager"; @@ -10,11 +10,14 @@ public class Constants { public static final String DATA_MINER_LANG = "DataMinerLang"; public static final String DEFAULT_USER = "giancarlo.panichi"; + //public static final String DEFAULT_USER = "statistical.manager@gmail.com"; public static final String DEFAULT_ROLE = "OrganizationMember"; public final static String DEFAULT_SCOPE = "/gcube/devsec/devVRE"; // public final static String DEFAULT_USER = "test.user"; // public final static String DEFAULT_SCOPE = "/gcube/devNext"; // public final static String DEFAULT_SCOPE = "/gcube/devNext/NextNext"; + + public static final String SClient = "DataMinerClient"; public static final String DATA_MINER_SERVICE_NAME = "DataMiner"; @@ -23,18 +26,13 @@ public class Constants { public static final String TD_DATASOURCE_FACTORY_ID = "DataMinerManager"; - public static final int TIME_UPDATE_MONITOR = 5 * 1000; - public static final int TIME_UPDATE_JOBS_GRID = 10 * 1000; public static final int TIME_UPDATE_COMPUTATION_STATUS_PANEL = 10 * 1000;// 7*1000; - public static final String maskLoadingStyle = "x-mask-loading"; public final static String[] ClassificationNames = { "User Perspective" }; // "Computation Perspective"}; public final static String UserClassificationName = ClassificationNames[0]; // public final static String computationClassificationName = // classificationNames[1]; - public static final String RealFileTemplate = "ZZ-FILE"; - public static final String UserFileTemplate = "FILE"; // WPS Data Miner public static final String WPSServiceURL = "http://dataminer-d-d4s.d4science.org:80/wps/"; @@ -43,6 +41,9 @@ public class Constants { public static final String WPSToken = "4ccc2c35-60c9-4c9b-9800-616538d5d48b";// "d7a4076c-e8c1-42fe-81e0-bdecb1e8074a"; public static final String WPSUser = "gianpaolo.coro"; public static final String SESSION_TOKEN = "SESSION_TOKEN"; + + //public static final String WPSToken = "45943442-74ef-408b-be64-d26b42cf4c08 "; + //public static final String WPSUser = "statistical.manager@gmail.com"; diff --git a/src/main/resources/org/gcube/portlets/user/dataminermanager/dataminermanager.gwt.xml b/src/main/resources/org/gcube/portlets/user/dataminermanager/dataminermanager.gwt.xml index 4e7e04e..ba12527 100644 --- a/src/main/resources/org/gcube/portlets/user/dataminermanager/dataminermanager.gwt.xml +++ b/src/main/resources/org/gcube/portlets/user/dataminermanager/dataminermanager.gwt.xml @@ -41,18 +41,18 @@ - + /> - +