diff --git a/log.txt b/log.txt deleted file mode 100644 index 4e7bbc4..0000000 --- a/log.txt +++ /dev/null @@ -1,771 +0,0 @@ -0 [qtp684204552-34] DEBUG org.gcube.application.framework.accesslogger.library.impl.AccessLogger - Creating a message handling object in order to handle the message queue -1 [qtp684204552-34] DEBUG org.gcube.application.framework.accesslogger.library.impl.AccessLogger - Constructing a new access logger. Create a new file if it does not exist for the current date -86 [qtp684204552-34] INFO org.gcube.portlets.user.dataminermanager.server.DataMinerManagerServiceImpl - DataMinerManager started! -128 [qtp684204552-34] INFO org.gcube.portlets.user.dataminermanager.server.util.SessionUtil - no user found in session, use test user -152 [qtp684204552-34] DEBUG org.gcube.application.framework.core.session.ASLSession - Scope is null, returning null -152 [qtp684204552-34] DEBUG org.gcube.application.framework.core.session.ASLSession - Getting security token: null in thread 34 -153 [qtp684204552-34] INFO org.gcube.application.framework.core.session.ASLSession - The scope about to set is: /gcube/devsec/devVRE -154 [qtp684204552-34] INFO org.gcube.common.scope.impl.ScopeProviderScanner - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@40e8d656 -156 [qtp684204552-34] DEBUG org.gcube.common.scope.impl.DefaultScopeProvider - setting scope /gcube/devsec/devVRE in thread 34 -156 [qtp684204552-34] INFO org.gcube.application.framework.core.session.ASLSession - Logging the entrance -157 [qtp684204552-34] DEBUG org.gcube.application.framework.accesslogger.model.TemplateModel - A new entry line has been created. The entry is: -157 [qtp684204552-34] DEBUG org.gcube.application.framework.accesslogger.model.TemplateModel - 2016-05-02 10:16:14, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre - -157 [qtp684204552-34] INFO org.gcube.portlets.user.dataminermanager.server.util.SessionUtil - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE -157 [qtp684204552-34] DEBUG org.gcube.portlets.user.dataminermanager.server.DataMinerManagerServiceImpl - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] -3229 [qtp684204552-33] INFO org.gcube.portlets.user.dataminermanager.server.util.SessionUtil - no user found in session, use test user -3229 [qtp684204552-33] DEBUG org.gcube.application.framework.core.session.ASLSession - Scope is null, returning null -3229 [qtp684204552-33] DEBUG org.gcube.application.framework.core.session.ASLSession - Getting security token: null in thread 33 -3229 [qtp684204552-33] INFO org.gcube.application.framework.core.session.ASLSession - The scope about to set is: /gcube/devsec/devVRE -3230 [qtp684204552-33] DEBUG org.gcube.common.scope.impl.DefaultScopeProvider - setting scope /gcube/devsec/devVRE in thread 33 -3230 [qtp684204552-33] INFO org.gcube.application.framework.core.session.ASLSession - Logging the entrance -3230 [qtp684204552-33] DEBUG org.gcube.application.framework.accesslogger.model.TemplateModel - A new entry line has been created. The entry is: -3230 [qtp684204552-33] DEBUG org.gcube.application.framework.accesslogger.model.TemplateModel - 2016-05-02 10:16:17, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre - -3230 [qtp684204552-33] INFO org.gcube.portlets.user.dataminermanager.server.util.SessionUtil - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE -3376 [qtp684204552-33] INFO org.gcube.common.clients.delegates.DiscoveryDelegate - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() -3509 [qtp684204552-33] INFO org.gcube.common.scan.DefaultScanner - matched 28 resources from 112 urls in 117 ms -3688 [qtp684204552-33] INFO org.gcube.common.scope.impl.ServiceMapScanner - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap -3692 [qtp684204552-33] INFO org.gcube.common.scope.impl.ServiceMapScanner - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap -3692 [qtp684204552-33] INFO org.gcube.common.scope.impl.ServiceMapScanner - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap -3693 [qtp684204552-33] INFO org.gcube.common.scope.impl.ServiceMapScanner - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap -3693 [qtp684204552-33] INFO org.gcube.common.scope.impl.ServiceMapScanner - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap -3694 [qtp684204552-33] INFO org.gcube.common.scope.impl.ServiceMapScanner - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap -3694 [qtp684204552-33] INFO org.gcube.common.scope.impl.ServiceMapScanner - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap -3694 [qtp684204552-33] INFO org.gcube.common.scope.impl.ServiceMapScanner - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap -3695 [qtp684204552-33] INFO org.gcube.common.scope.impl.ServiceMapScanner - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap -3696 [qtp684204552-33] INFO org.gcube.common.scope.impl.ServiceMapScanner - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap -3697 [qtp684204552-33] INFO org.gcube.common.scope.impl.ServiceMapScanner - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap -3697 [qtp684204552-33] INFO org.gcube.common.scope.impl.ServiceMapScanner - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap -3698 [qtp684204552-33] INFO org.gcube.common.scope.impl.ServiceMapScanner - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap -3698 [qtp684204552-33] INFO org.gcube.common.scope.impl.ServiceMapScanner - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap -3869 [qtp684204552-33] INFO org.gcube.common.clients.stubs.jaxws.StubFactory - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl -4038 [qtp684204552-33] INFO org.gcube.common.clients.stubs.jaxws.StubCache - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub -4168 [qtp684204552-33] INFO org.gcube.resources.discovery.icclient.ICClient - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() -4320 [qtp684204552-33] INFO org.gcube.common.clients.stubs.jaxws.handlers.HandlerRegistry - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@2d2fff7 -4321 [qtp684204552-33] INFO org.gcube.common.clients.stubs.jaxws.handlers.HandlerRegistry - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@7356a1b2 -4322 [qtp684204552-33] INFO org.gcube.common.clients.stubs.jaxws.handlers.HandlerRegistry - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@4b6bbfbd -4322 [qtp684204552-33] INFO org.gcube.common.clients.stubs.jaxws.handlers.HandlerRegistry - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@1781f0d5 -4496 [qtp684204552-33] INFO org.gcube.resources.discovery.icclient.ICClient - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 328 ms -4674 [qtp684204552-33] INFO org.gcube.common.clients.delegates.DiscoveryDelegate - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
-4732 [qtp684204552-33] DEBUG org.gcube.common.clients.cache.DefaultEndpointCache - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] -4733 [qtp684204552-33] INFO org.gcube.portlets.user.dataminermanager.server.util.SessionUtil - received token: f0666597-4302-49ce-bea2-555b94e569cb -4735 [qtp684204552-33] DEBUG org.gcube.portlets.user.dataminermanager.server.smservice.SClient4WPSBuilder - Build SM4WPS -4735 [qtp684204552-33] DEBUG org.gcube.portlets.user.dataminermanager.server.smservice.SClient4WPSBuilder - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] -4738 [qtp684204552-33] DEBUG org.gcube.common.scope.impl.DefaultScopeProvider - setting scope /gcube/devsec/devVRE in thread 33 -4749 [qtp684204552-33] INFO org.gcube.common.clients.stubs.jaxws.StubCache - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub -4750 [qtp684204552-33] INFO org.gcube.resources.discovery.icclient.ICClient - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() -4769 [qtp684204552-33] INFO org.gcube.resources.discovery.icclient.ICClient - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 18 ms -4772 [qtp684204552-33] DEBUG org.gcube.portlets.user.dataminermanager.server.smservice.SClient4WPS - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] -4772 [qtp684204552-33] INFO org.gcube.portlets.user.dataminermanager.server.smservice.SClient4WPS - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService -4773 [qtp684204552-33] DEBUG org.gcube.portlets.user.dataminermanager.server.smservice.SClient4WPS - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet -4787 [qtp684204552-33] INFO org.gcube.portlets.user.dataminermanager.server.smservice.wps.StatWPSClientSession - CONNECT -4788 [qtp684204552-32] DEBUG org.gcube.application.framework.core.session.ASLSession - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. -4805 [qtp684204552-32] DEBUG org.gcube.application.framework.core.session.ASLSession - Could not parse file properties.xml for property. Setting it to default. -4805 [qtp684204552-32] INFO org.gcube.application.framework.core.session.ASLSession - Session Timeout is: 1800000 -4805 [qtp684204552-32] DEBUG org.gcube.common.scope.impl.DefaultScopeProvider - setting scope /gcube/devsec/devVRE in thread 32 -4805 [qtp684204552-32] DEBUG org.gcube.application.framework.core.session.ASLSession - Getting security token: null in thread 32 -4805 [qtp684204552-32] INFO org.gcube.portlets.user.dataminermanager.server.util.SessionUtil - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE -4805 [qtp684204552-32] DEBUG org.gcube.portlets.user.dataminermanager.server.DataMinerManagerServiceImpl - getDataMinerWorkArea() -4809 [qtp684204552-32] INFO HomeManageFactory - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) -4809 [qtp684204552-32] INFO HomeManageFactory - getHomeManagerFactory -5047 [qtp684204552-32] INFO org.gcube.common.homelibrary.util.config.HomeLibraryConfiguration - calculating the persistence folder -5047 [qtp684204552-32] INFO org.gcube.common.homelibrary.util.config.HomeLibraryConfiguration - Using tmp dir /tmp -5047 [qtp684204552-32] INFO HomeManageFactory - getInstance persistenceRoot: /tmp/home_library_persistence -5076 [qtp684204552-32] DEBUG org.gcube.common.homelibrary.jcr.JCRHomeManagerFactory - Initialize content manager -5080 [qtp684204552-32] DEBUG org.gcube.common.homelibrary.jcr.repository.JCRRepository - Initialize repository -5080 [qtp684204552-32] DEBUG org.gcube.common.homelibrary.jcr.repository.JCRRepository - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE -5080 [qtp684204552-32] DEBUG org.gcube.common.scope.impl.DefaultScopeProvider - setting scope /gcube in thread 32 -5150 [qtp684204552-32] INFO org.gcube.common.clients.stubs.jaxws.StubCache - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub -5153 [qtp684204552-32] INFO org.gcube.resources.discovery.icclient.ICClient - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource -5176 [qtp684204552-32] INFO org.gcube.resources.discovery.icclient.ICClient - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 23 ms -5228 [qtp684204552-32] ERROR org.gcube.common.homelibrary.jcr.repository.JCRRepository - error decrypting resource -java.security.InvalidKeyException: Unable to load the Key gcube.gcubekey from the classpath - at org.gcube.common.encryption.SymmetricKey.load(SymmetricKey.java:66) - at org.gcube.common.encryption.SymmetricKey.load(SymmetricKey.java:53) - at org.gcube.common.encryption.SymmetricKey.getKey(SymmetricKey.java:44) - at org.gcube.common.encryption.StringEncrypter.decrypt(StringEncrypter.java:58) - at org.gcube.common.homelibrary.jcr.repository.JCRRepository.initializeRepository(JCRRepository.java:119) - at org.gcube.common.homelibrary.jcr.repository.JCRRepository.initialize(JCRRepository.java:272) - at org.gcube.common.homelibrary.jcr.JCRHomeManagerFactory.initialize(JCRHomeManagerFactory.java:46) - at org.gcube.common.homelibrary.home.HomeLibrary.createHomeManagerFactoryInstance(HomeLibrary.java:91) - at org.gcube.common.homelibrary.home.HomeLibrary.getHomeManagerFactory(HomeLibrary.java:106) - at org.gcube.common.homelibrary.home.HomeLibrary.getHomeManagerFactory(HomeLibrary.java:124) - at org.gcube.common.homelibrary.home.HomeLibrary.getUserHome(HomeLibrary.java:198) - at org.gcube.common.homelibrary.home.HomeLibrary.getUserWorkspace(HomeLibrary.java:143) - at org.gcube.portlets.user.dataminermanager.server.storage.StorageUtil.getItemInRootFolderOnWorkspace(StorageUtil.java:214) - at org.gcube.portlets.user.dataminermanager.server.util.DataMinerWorkAreaManager.getDataMinerWorkArea(DataMinerWorkAreaManager.java:40) - at org.gcube.portlets.user.dataminermanager.server.DataMinerManagerServiceImpl.getDataMinerWorkArea(DataMinerManagerServiceImpl.java:394) - at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) - at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) - at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) - at java.lang.reflect.Method.invoke(Method.java:606) - at com.google.gwt.user.server.rpc.RPC.invokeAndEncodeResponse(RPC.java:561) - at com.google.gwt.user.server.rpc.RemoteServiceServlet.processCall(RemoteServiceServlet.java:265) - at com.google.gwt.user.server.rpc.RemoteServiceServlet.processPost(RemoteServiceServlet.java:305) - at com.google.gwt.user.server.rpc.AbstractRemoteServiceServlet.doPost(AbstractRemoteServiceServlet.java:62) - at javax.servlet.http.HttpServlet.service(HttpServlet.java:755) - at javax.servlet.http.HttpServlet.service(HttpServlet.java:848) - at org.eclipse.jetty.servlet.ServletHolder.handle(ServletHolder.java:686) - at org.eclipse.jetty.servlet.ServletHandler.doHandle(ServletHandler.java:501) - at org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:137) - at org.eclipse.jetty.security.SecurityHandler.handle(SecurityHandler.java:557) - at org.eclipse.jetty.server.session.SessionHandler.doHandle(SessionHandler.java:231) - at org.eclipse.jetty.server.handler.ContextHandler.doHandle(ContextHandler.java:1086) - at org.eclipse.jetty.servlet.ServletHandler.doScope(ServletHandler.java:428) - at org.eclipse.jetty.server.session.SessionHandler.doScope(SessionHandler.java:193) - at org.eclipse.jetty.server.handler.ContextHandler.doScope(ContextHandler.java:1020) - at org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:135) - at org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:116) - at org.eclipse.jetty.server.handler.RequestLogHandler.handle(RequestLogHandler.java:68) - at org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:116) - at org.eclipse.jetty.server.Server.handle(Server.java:370) - at org.eclipse.jetty.server.AbstractHttpConnection.handleRequest(AbstractHttpConnection.java:489) - at org.eclipse.jetty.server.AbstractHttpConnection.content(AbstractHttpConnection.java:960) - at org.eclipse.jetty.server.AbstractHttpConnection$RequestHandler.content(AbstractHttpConnection.java:1021) - at org.eclipse.jetty.http.HttpParser.parseNext(HttpParser.java:865) - at org.eclipse.jetty.http.HttpParser.parseAvailable(HttpParser.java:240) - at org.eclipse.jetty.server.AsyncHttpConnection.handle(AsyncHttpConnection.java:82) - at org.eclipse.jetty.io.nio.SelectChannelEndPoint.handle(SelectChannelEndPoint.java:668) - at org.eclipse.jetty.io.nio.SelectChannelEndPoint$1.run(SelectChannelEndPoint.java:52) - at org.eclipse.jetty.util.thread.QueuedThreadPool.runJob(QueuedThreadPool.java:608) - at org.eclipse.jetty.util.thread.QueuedThreadPool$3.run(QueuedThreadPool.java:543) - at java.lang.Thread.run(Thread.java:745) -5229 [qtp684204552-32] DEBUG org.gcube.common.scope.impl.DefaultScopeProvider - setting scope /gcube/devsec/devVRE in thread 32 -5231 [qtp684204552-32] DEBUG org.gcube.portlets.user.dataminermanager.server.util.DataMinerWorkAreaManager - DataMiner Folder is set to null -6644 [qtp684204552-33] INFO org.gcube.portlets.user.dataminermanager.server.smservice.wps.StatWPSClientSession - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService -6651 [qtp684204552-33] DEBUG org.gcube.portlets.user.dataminermanager.server.smservice.SClient4WPS - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CLIMATE, name=Climate, briefDescription=CLIMATE, description=CLIMATE, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIOCLIMATE_HCAF, name=Bioclimate Hcaf, briefDescription=A transducer algorithm that generates an Half-degree Cells Authority File (HCAF) dataset for a certain time frame, with environmental parameters used by the AquaMaps approach. Evaluates the climatic changes impact on the variation of the ocean features contained in HCAF tables, description=A transducer algorithm that generates an Half-degree Cells Authority File (HCAF) dataset for a certain time frame, with environmental parameters used by the AquaMaps approach. Evaluates the climatic changes impact on the variation of the ocean features contained in HCAF tables, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIOCLIMATE_HSPEN, name=Bioclimate Hspen, briefDescription=A transducer algorithm that generates a table containing species envelops (HSPEN) in time, i.e. models capturing species tolerance with respect to environmental parameters, used by the AquaMaps approach. Evaluates the climatic changes impact on the variation of the salinity values in several ranges of a set of species envelopes, description=A transducer algorithm that generates a table containing species envelops (HSPEN) in time, i.e. models capturing species tolerance with respect to environmental parameters, used by the AquaMaps approach. Evaluates the climatic changes impact on the variation of the salinity values in several ranges of a set of species envelopes, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIOCLIMATE_HCAF, name=Bioclimate Hcaf, briefDescription=A transducer algorithm that generates an Half-degree Cells Authority File (HCAF) dataset for a certain time frame, with environmental parameters used by the AquaMaps approach. Evaluates the climatic changes impact on the variation of the ocean features contained in HCAF tables, description=A transducer algorithm that generates an Half-degree Cells Authority File (HCAF) dataset for a certain time frame, with environmental parameters used by the AquaMaps approach. Evaluates the climatic changes impact on the variation of the ocean features contained in HCAF tables, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIOCLIMATE_HSPEN, name=Bioclimate Hspen, briefDescription=A transducer algorithm that generates a table containing species envelops (HSPEN) in time, i.e. models capturing species tolerance with respect to environmental parameters, used by the AquaMaps approach. Evaluates the climatic changes impact on the variation of the salinity values in several ranges of a set of species envelopes, description=A transducer algorithm that generates a table containing species envelops (HSPEN) in time, i.e. models capturing species tolerance with respect to environmental parameters, used by the AquaMaps approach. Evaluates the climatic changes impact on the variation of the salinity values in several ranges of a set of species envelopes, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] -55281 [qtp684204552-33] DEBUG org.gcube.application.framework.core.session.ASLSession - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. -55281 [qtp684204552-33] DEBUG org.gcube.application.framework.core.session.ASLSession - Could not parse file properties.xml for property. Setting it to default. -55281 [qtp684204552-33] INFO org.gcube.application.framework.core.session.ASLSession - Session Timeout is: 1800000 -55281 [qtp684204552-33] DEBUG org.gcube.common.scope.impl.DefaultScopeProvider - setting scope /gcube/devsec/devVRE in thread 33 -55281 [qtp684204552-33] DEBUG org.gcube.application.framework.core.session.ASLSession - Getting security token: null in thread 33 -101246 [qtp684204552-31] DEBUG org.gcube.common.scope.impl.DefaultScopeProvider - setting scope /gcube/devsec/devVRE in thread 31 -101247 [qtp684204552-31] DEBUG org.gcube.application.framework.core.session.ASLSession - Getting security token: null in thread 31 -101247 [qtp684204552-31] INFO org.gcube.portlets.user.dataminermanager.server.util.SessionUtil - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE -101247 [qtp684204552-31] INFO org.gcube.portlets.user.dataminermanager.server.smservice.SClient4WPS - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIOCLIMATE_HCAF -101247 [qtp684204552-31] DEBUG org.gcube.portlets.user.dataminermanager.server.smservice.SClient4WPS - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService -101247 [qtp684204552-31] INFO org.gcube.portlets.user.dataminermanager.server.smservice.wps.StatWPSClientSession - CONNECT -102480 [qtp684204552-31] DEBUG org.gcube.portlets.user.dataminermanager.server.smservice.SClient4WPS - - org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIOCLIMATE_HCAF - BIOCLIMATE_HCAF - A transducer algorithm that generates an Half-degree Cells Authority File (HCAF) dataset for a certain time frame, with environmental parameters used by the AquaMaps approach. Evaluates the climatic changes impact on the variation of the ocean features contained in HCAF tables - - - HCAF_Table_List - list of HCAF tables to analyze [a sequence of http links separated by | , each indicating a table in UTF-8 encoding following this template: (HCAF) http://goo.gl/SZG9uM] - Name of the parameter: HCAF_Table_List. list of HCAF tables to analyze [a sequence of http links separated by | , each indicating a table in UTF-8 encoding following this template: (HCAF) http://goo.gl/SZG9uM] - - - - text/xml - - - - - text/xml - - - text/csv - - - text/plain - - - - - - HCAF_Table_Names - list of HCAF table names to be used as labels [a sequence of values separated by | ] (format: String) - Name of the parameter: HCAF_Table_Names. list of HCAF table names to be used as labels [a sequence of values separated by | ] (format: String) - - - - - - - - - non_deterministic_output - NonDeterministicOutput - Output that is not predetermined - - - - text/xml; subtype=gml/2.1.2 - http://schemas.opengis.net/gml/2.1.2/feature.xsd - - - - - text/xml; subtype=gml/2.1.2 - http://schemas.opengis.net/gml/2.1.2/feature.xsd - - - - - - -102484 [qtp684204552-31] DEBUG org.gcube.portlets.user.dataminermanager.server.smservice.SClient4WPS - WPSClient->Fetching Inputs -102485 [qtp684204552-31] DEBUG org.gcube.portlets.user.dataminermanager.server.smservice.SClient4WPS - WPSClient->Input: - HCAF_Table_List - list of HCAF tables to analyze [a sequence of http links separated by | , each indicating a table in UTF-8 encoding following this template: (HCAF) http://goo.gl/SZG9uM] - Name of the parameter: HCAF_Table_List. list of HCAF tables to analyze [a sequence of http links separated by | , each indicating a table in UTF-8 encoding following this template: (HCAF) http://goo.gl/SZG9uM] - - - - text/xml - - - - - text/xml - - - text/csv - - - text/plain - - - - -102485 [qtp684204552-31] DEBUG org.gcube.portlets.user.dataminermanager.server.smservice.SClient4WPS - WPSClient->Input: - HCAF_Table_Names - list of HCAF table names to be used as labels [a sequence of values separated by | ] (format: String) - Name of the parameter: HCAF_Table_Names. list of HCAF table names to be used as labels [a sequence of values separated by | ] (format: String) - - - - - -102488 [qtp684204552-31] DEBUG org.gcube.portlets.user.dataminermanager.server.smservice.SClient4WPS - WPSClient->Fetching Outputs -102489 [qtp684204552-31] DEBUG org.gcube.portlets.user.dataminermanager.server.smservice.SClient4WPS - WPSClient->Output: - non_deterministic_output - NonDeterministicOutput - Output that is not predetermined - - - - text/xml; subtype=gml/2.1.2 - http://schemas.opengis.net/gml/2.1.2/feature.xsd - - - - - text/xml; subtype=gml/2.1.2 - http://schemas.opengis.net/gml/2.1.2/feature.xsd - - - - -102489 [qtp684204552-31] INFO org.gcube.portlets.user.dataminermanager.server.smservice.wps.StatWPSClientSession - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService -102492 [qtp684204552-31] DEBUG org.gcube.portlets.user.dataminermanager.server.smservice.wps.WPS2SM - Conversion to SM Type->HCAF_Table_List is a Complex Input -102494 [qtp684204552-31] DEBUG org.gcube.portlets.user.dataminermanager.server.smservice.wps.WPS2SM - Max Megabytes: 1 -102497 [qtp684204552-31] DEBUG org.gcube.portlets.user.dataminermanager.server.smservice.wps.WPS2SM - MimeType: text/xml -102498 [qtp684204552-31] DEBUG org.gcube.portlets.user.dataminermanager.server.smservice.wps.WPS2SM - Schema: null -102498 [qtp684204552-31] DEBUG org.gcube.portlets.user.dataminermanager.server.smservice.wps.WPS2SM - Encoding: null -102498 [qtp684204552-31] DEBUG org.gcube.portlets.user.dataminermanager.server.smservice.wps.WPS2SM - Conversion to SM Type->Title:list of HCAF tables to analyze [a sequence of http links separated by | , each indicating a table in UTF-8 encoding following this template: (HCAF) http://goo.gl/SZG9uM] -102498 [qtp684204552-31] DEBUG org.gcube.portlets.user.dataminermanager.server.smservice.wps.WPS2SM - Conversion to SM Type->Name:HCAF_Table_List -102498 [qtp684204552-31] DEBUG org.gcube.portlets.user.dataminermanager.server.smservice.wps.WPS2SM - Conversion to SM Type->Number of Inputs to Manage:1 -102498 [qtp684204552-31] DEBUG org.gcube.portlets.user.dataminermanager.server.smservice.SClient4WPS - InputParameter: FileParameter [mimeType=text/xml, value=null, name=HCAF_Table_List, description=list of HCAF tables to analyze [a sequence of http links separated by | , each indicating a table in UTF-8 encoding following this template: (HCAF) http://goo.gl/SZG9uM] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=FILE] -102498 [qtp684204552-31] DEBUG org.gcube.portlets.user.dataminermanager.server.smservice.wps.WPS2SM - Conversion to SM Type->HCAF_Table_Names is a Literal Input -102503 [qtp684204552-31] DEBUG org.gcube.portlets.user.dataminermanager.server.smservice.wps.WPS2SM - WPS type: -102503 [qtp684204552-31] DEBUG org.gcube.portlets.user.dataminermanager.server.smservice.wps.WPS2SM - Guessed type: java.lang.String -102503 [qtp684204552-31] DEBUG org.gcube.portlets.user.dataminermanager.server.smservice.wps.WPS2SM - Guessed default value: -102503 [qtp684204552-31] DEBUG org.gcube.portlets.user.dataminermanager.server.smservice.wps.WPS2SM - Machter title: list of HCAF table names to be used as labels [a sequence of values separated by | ] (format: String) [Min N. of Entries:1; Max N. of Entries:1] -102503 [qtp684204552-31] DEBUG org.gcube.portlets.user.dataminermanager.server.smservice.wps.WPS2SM - Machter find: true -102503 [qtp684204552-31] DEBUG org.gcube.portlets.user.dataminermanager.server.smservice.wps.WPS2SM - Machter group: a sequence of values separated by | -102504 [qtp684204552-31] DEBUG org.gcube.portlets.user.dataminermanager.server.smservice.wps.WPS2SM - Machter start: 47 -102504 [qtp684204552-31] DEBUG org.gcube.portlets.user.dataminermanager.server.smservice.wps.WPS2SM - Machter end: 82 -102504 [qtp684204552-31] DEBUG org.gcube.portlets.user.dataminermanager.server.smservice.wps.WPS2SM - Machter Group Count: 1 -102504 [qtp684204552-31] DEBUG org.gcube.portlets.user.dataminermanager.server.smservice.wps.WPS2SM - Matcher separator: | -102504 [qtp684204552-31] DEBUG org.gcube.portlets.user.dataminermanager.server.smservice.wps.WPS2SM - Conversion to SM Type->Title:list of HCAF table names to be used as labels [a sequence of values separated by | ] (format: String) -102504 [qtp684204552-31] DEBUG org.gcube.portlets.user.dataminermanager.server.smservice.wps.WPS2SM - Conversion to SM Type->Name:HCAF_Table_Names -102504 [qtp684204552-31] DEBUG org.gcube.portlets.user.dataminermanager.server.smservice.wps.WPS2SM - Conversion to SM Type->Number of Inputs to Manage:1 -102504 [qtp684204552-31] DEBUG org.gcube.portlets.user.dataminermanager.server.smservice.SClient4WPS - InputParameter: ListParameter [type=java.lang.String, value=null, separator=|, name=HCAF_Table_Names, description=list of HCAF table names to be used as labels [a sequence of values separated by | ] (format: String) [Min N. of Entries:1; Max N. of Entries:1], typology=LIST] -102504 [qtp684204552-31] DEBUG org.gcube.portlets.user.dataminermanager.server.smservice.SClient4WPS - Parameters: [FileParameter [mimeType=text/xml, value=null, name=HCAF_Table_List, description=list of HCAF tables to analyze [a sequence of http links separated by | , each indicating a table in UTF-8 encoding following this template: (HCAF) http://goo.gl/SZG9uM] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=FILE], ListParameter [type=java.lang.String, value=null, separator=|, name=HCAF_Table_Names, description=list of HCAF table names to be used as labels [a sequence of values separated by | ] (format: String) [Min N. of Entries:1; Max N. of Entries:1], typology=LIST]] -102780 [qtp684204552-35] DEBUG org.gcube.common.scope.impl.DefaultScopeProvider - setting scope /gcube/devsec/devVRE in thread 35 -102780 [qtp684204552-35] DEBUG org.gcube.application.framework.core.session.ASLSession - Getting security token: null in thread 35 -102780 [qtp684204552-33] DEBUG org.gcube.common.scope.impl.DefaultScopeProvider - setting scope /gcube/devsec/devVRE in thread 33 -102781 [qtp684204552-33] DEBUG org.gcube.application.framework.core.session.ASLSession - Getting security token: null in thread 33 -102781 [qtp684204552-35] DEBUG org.gcube.common.scope.impl.DefaultScopeProvider - setting scope /gcube/devsec/devVRE in thread 35 -102781 [qtp684204552-33] DEBUG org.gcube.common.scope.impl.DefaultScopeProvider - setting scope /gcube/devsec/devVRE in thread 33 -102781 [qtp684204552-35] INFO HomeManageFactory - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) -102781 [qtp684204552-35] INFO HomeManageFactory - getHomeManagerFactory -102781 [qtp684204552-33] INFO HomeManageFactory - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) -102781 [qtp684204552-33] INFO HomeManageFactory - getHomeManagerFactory -102781 [qtp684204552-35] INFO org.gcube.common.homelibrary.util.config.HomeLibraryConfiguration - calculating the persistence folder -102781 [qtp684204552-33] INFO org.gcube.common.homelibrary.util.config.HomeLibraryConfiguration - calculating the persistence folder -102781 [qtp684204552-35] INFO org.gcube.common.homelibrary.util.config.HomeLibraryConfiguration - Using tmp dir /tmp -102782 [qtp684204552-33] INFO org.gcube.common.homelibrary.util.config.HomeLibraryConfiguration - Using tmp dir /tmp -102782 [qtp684204552-35] INFO HomeManageFactory - getInstance persistenceRoot: /tmp/home_library_persistence -102782 [qtp684204552-35] DEBUG org.gcube.common.homelibrary.jcr.JCRHomeManagerFactory - Initialize content manager -102782 [qtp684204552-35] DEBUG org.gcube.common.homelibrary.jcr.repository.JCRRepository - Initialize repository -102782 [qtp684204552-35] DEBUG org.gcube.common.homelibrary.jcr.repository.JCRRepository - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE -102782 [qtp684204552-35] DEBUG org.gcube.common.scope.impl.DefaultScopeProvider - setting scope /gcube in thread 35 -102792 [qtp684204552-35] INFO org.gcube.common.clients.stubs.jaxws.StubCache - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub -102793 [qtp684204552-35] INFO org.gcube.resources.discovery.icclient.ICClient - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource -102817 [qtp684204552-35] INFO org.gcube.resources.discovery.icclient.ICClient - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 23 ms -102827 [qtp684204552-35] ERROR org.gcube.common.homelibrary.jcr.repository.JCRRepository - error decrypting resource -java.security.InvalidKeyException: Unable to load the Key gcube.gcubekey from the classpath - at org.gcube.common.encryption.SymmetricKey.load(SymmetricKey.java:66) - at org.gcube.common.encryption.SymmetricKey.load(SymmetricKey.java:53) - at org.gcube.common.encryption.SymmetricKey.getKey(SymmetricKey.java:44) - at org.gcube.common.encryption.StringEncrypter.decrypt(StringEncrypter.java:58) - at org.gcube.common.homelibrary.jcr.repository.JCRRepository.initializeRepository(JCRRepository.java:119) - at org.gcube.common.homelibrary.jcr.repository.JCRRepository.initialize(JCRRepository.java:272) - at org.gcube.common.homelibrary.jcr.JCRHomeManagerFactory.initialize(JCRHomeManagerFactory.java:46) - at org.gcube.common.homelibrary.home.HomeLibrary.createHomeManagerFactoryInstance(HomeLibrary.java:91) - at org.gcube.common.homelibrary.home.HomeLibrary.getHomeManagerFactory(HomeLibrary.java:106) - at org.gcube.common.homelibrary.home.HomeLibrary.getHomeManagerFactory(HomeLibrary.java:124) - at org.gcube.common.homelibrary.home.HomeLibrary.getUserHome(HomeLibrary.java:198) - at org.gcube.common.homelibrary.home.HomeLibrary.getUserWorkspace(HomeLibrary.java:143) - at org.gcube.portlets.widgets.wsexplorer.server.WorkspaceExplorerServiceImpl.getWorkspace(WorkspaceExplorerServiceImpl.java:104) - at org.gcube.portlets.widgets.wsexplorer.server.WorkspaceExplorerServiceImpl.getItemByCategory(WorkspaceExplorerServiceImpl.java:207) - at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) - at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) - at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) - at java.lang.reflect.Method.invoke(Method.java:606) - at com.google.gwt.user.server.rpc.RPC.invokeAndEncodeResponse(RPC.java:561) - at com.google.gwt.user.server.rpc.RemoteServiceServlet.processCall(RemoteServiceServlet.java:265) - at com.google.gwt.user.server.rpc.RemoteServiceServlet.processPost(RemoteServiceServlet.java:305) - at com.google.gwt.user.server.rpc.AbstractRemoteServiceServlet.doPost(AbstractRemoteServiceServlet.java:62) - at javax.servlet.http.HttpServlet.service(HttpServlet.java:755) - at javax.servlet.http.HttpServlet.service(HttpServlet.java:848) - at org.eclipse.jetty.servlet.ServletHolder.handle(ServletHolder.java:686) - at org.eclipse.jetty.servlet.ServletHandler.doHandle(ServletHandler.java:501) - at org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:137) - at org.eclipse.jetty.security.SecurityHandler.handle(SecurityHandler.java:557) - at org.eclipse.jetty.server.session.SessionHandler.doHandle(SessionHandler.java:231) - at org.eclipse.jetty.server.handler.ContextHandler.doHandle(ContextHandler.java:1086) - at org.eclipse.jetty.servlet.ServletHandler.doScope(ServletHandler.java:428) - at org.eclipse.jetty.server.session.SessionHandler.doScope(SessionHandler.java:193) - at org.eclipse.jetty.server.handler.ContextHandler.doScope(ContextHandler.java:1020) - at org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:135) - at org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:116) - at org.eclipse.jetty.server.handler.RequestLogHandler.handle(RequestLogHandler.java:68) - at org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:116) - at org.eclipse.jetty.server.Server.handle(Server.java:370) - at org.eclipse.jetty.server.AbstractHttpConnection.handleRequest(AbstractHttpConnection.java:489) - at org.eclipse.jetty.server.AbstractHttpConnection.content(AbstractHttpConnection.java:960) - at org.eclipse.jetty.server.AbstractHttpConnection$RequestHandler.content(AbstractHttpConnection.java:1021) - at org.eclipse.jetty.http.HttpParser.parseNext(HttpParser.java:865) - at org.eclipse.jetty.http.HttpParser.parseAvailable(HttpParser.java:240) - at org.eclipse.jetty.server.AsyncHttpConnection.handle(AsyncHttpConnection.java:82) - at org.eclipse.jetty.io.nio.SelectChannelEndPoint.handle(SelectChannelEndPoint.java:668) - at org.eclipse.jetty.io.nio.SelectChannelEndPoint$1.run(SelectChannelEndPoint.java:52) - at org.eclipse.jetty.util.thread.QueuedThreadPool.runJob(QueuedThreadPool.java:608) - at org.eclipse.jetty.util.thread.QueuedThreadPool$3.run(QueuedThreadPool.java:543) - at java.lang.Thread.run(Thread.java:745) -102829 [qtp684204552-35] DEBUG org.gcube.common.scope.impl.DefaultScopeProvider - setting scope /gcube/devsec/devVRE in thread 35 -102829 [qtp684204552-33] INFO HomeManageFactory - getInstance persistenceRoot: /tmp/home_library_persistence -102829 [qtp684204552-35] ERROR org.gcube.portlets.widgets.wsexplorer.server.WorkspaceExplorerServiceImpl - Error during get item by category -org.gcube.common.homelibrary.home.exceptions.InternalErrorException: org.gcube.common.homelibrary.home.exceptions.InternalErrorException: org.gcube.common.homelibrary.home.exceptions.InternalErrorException: cannot discover password and username in scope /gcube/devsec/devVRE - at org.gcube.common.homelibrary.jcr.JCRHomeManagerFactory.initialize(JCRHomeManagerFactory.java:48) - at org.gcube.common.homelibrary.home.HomeLibrary.createHomeManagerFactoryInstance(HomeLibrary.java:91) - at org.gcube.common.homelibrary.home.HomeLibrary.getHomeManagerFactory(HomeLibrary.java:106) - at org.gcube.common.homelibrary.home.HomeLibrary.getHomeManagerFactory(HomeLibrary.java:124) - at org.gcube.common.homelibrary.home.HomeLibrary.getUserHome(HomeLibrary.java:198) - at org.gcube.common.homelibrary.home.HomeLibrary.getUserWorkspace(HomeLibrary.java:143) - at org.gcube.portlets.widgets.wsexplorer.server.WorkspaceExplorerServiceImpl.getWorkspace(WorkspaceExplorerServiceImpl.java:104) - at org.gcube.portlets.widgets.wsexplorer.server.WorkspaceExplorerServiceImpl.getItemByCategory(WorkspaceExplorerServiceImpl.java:207) - at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) - at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) - at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) - at java.lang.reflect.Method.invoke(Method.java:606) - at com.google.gwt.user.server.rpc.RPC.invokeAndEncodeResponse(RPC.java:561) - at com.google.gwt.user.server.rpc.RemoteServiceServlet.processCall(RemoteServiceServlet.java:265) - at com.google.gwt.user.server.rpc.RemoteServiceServlet.processPost(RemoteServiceServlet.java:305) - at com.google.gwt.user.server.rpc.AbstractRemoteServiceServlet.doPost(AbstractRemoteServiceServlet.java:62) - at javax.servlet.http.HttpServlet.service(HttpServlet.java:755) - at javax.servlet.http.HttpServlet.service(HttpServlet.java:848) - at org.eclipse.jetty.servlet.ServletHolder.handle(ServletHolder.java:686) - at org.eclipse.jetty.servlet.ServletHandler.doHandle(ServletHandler.java:501) - at org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:137) - at org.eclipse.jetty.security.SecurityHandler.handle(SecurityHandler.java:557) - at org.eclipse.jetty.server.session.SessionHandler.doHandle(SessionHandler.java:231) - at org.eclipse.jetty.server.handler.ContextHandler.doHandle(ContextHandler.java:1086) - at org.eclipse.jetty.servlet.ServletHandler.doScope(ServletHandler.java:428) - at org.eclipse.jetty.server.session.SessionHandler.doScope(SessionHandler.java:193) - at org.eclipse.jetty.server.handler.ContextHandler.doScope(ContextHandler.java:1020) - at org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:135) - at org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:116) - at org.eclipse.jetty.server.handler.RequestLogHandler.handle(RequestLogHandler.java:68) - at org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:116) - at org.eclipse.jetty.server.Server.handle(Server.java:370) - at org.eclipse.jetty.server.AbstractHttpConnection.handleRequest(AbstractHttpConnection.java:489) - at org.eclipse.jetty.server.AbstractHttpConnection.content(AbstractHttpConnection.java:960) - at org.eclipse.jetty.server.AbstractHttpConnection$RequestHandler.content(AbstractHttpConnection.java:1021) - at org.eclipse.jetty.http.HttpParser.parseNext(HttpParser.java:865) - at org.eclipse.jetty.http.HttpParser.parseAvailable(HttpParser.java:240) - at org.eclipse.jetty.server.AsyncHttpConnection.handle(AsyncHttpConnection.java:82) - at org.eclipse.jetty.io.nio.SelectChannelEndPoint.handle(SelectChannelEndPoint.java:668) - at org.eclipse.jetty.io.nio.SelectChannelEndPoint$1.run(SelectChannelEndPoint.java:52) - at org.eclipse.jetty.util.thread.QueuedThreadPool.runJob(QueuedThreadPool.java:608) - at org.eclipse.jetty.util.thread.QueuedThreadPool$3.run(QueuedThreadPool.java:543) - at java.lang.Thread.run(Thread.java:745) -Caused by: org.gcube.common.homelibrary.home.exceptions.InternalErrorException: org.gcube.common.homelibrary.home.exceptions.InternalErrorException: cannot discover password and username in scope /gcube/devsec/devVRE - at org.gcube.common.homelibrary.jcr.repository.JCRRepository.initializeRepository(JCRRepository.java:155) - at org.gcube.common.homelibrary.jcr.repository.JCRRepository.initialize(JCRRepository.java:272) - at org.gcube.common.homelibrary.jcr.JCRHomeManagerFactory.initialize(JCRHomeManagerFactory.java:46) - ... 42 more -Caused by: org.gcube.common.homelibrary.home.exceptions.InternalErrorException: cannot discover password and username in scope /gcube/devsec/devVRE - at org.gcube.common.homelibrary.jcr.repository.JCRRepository.initializeRepository(JCRRepository.java:147) - ... 44 more -102829 [qtp684204552-33] DEBUG org.gcube.common.homelibrary.jcr.JCRHomeManagerFactory - Initialize content manager -102831 [qtp684204552-33] DEBUG org.gcube.common.homelibrary.jcr.repository.JCRRepository - Initialize repository -102831 [qtp684204552-33] DEBUG org.gcube.common.homelibrary.jcr.repository.JCRRepository - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE -102831 [qtp684204552-33] DEBUG org.gcube.common.scope.impl.DefaultScopeProvider - setting scope /gcube in thread 33 -102841 [qtp684204552-33] INFO org.gcube.common.clients.stubs.jaxws.StubCache - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub -102842 [qtp684204552-33] INFO org.gcube.resources.discovery.icclient.ICClient - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource -102861 [qtp684204552-33] INFO org.gcube.resources.discovery.icclient.ICClient - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 19 ms -102872 [qtp684204552-33] ERROR org.gcube.common.homelibrary.jcr.repository.JCRRepository - error decrypting resource -java.security.InvalidKeyException: Unable to load the Key gcube.gcubekey from the classpath - at org.gcube.common.encryption.SymmetricKey.load(SymmetricKey.java:66) - at org.gcube.common.encryption.SymmetricKey.load(SymmetricKey.java:53) - at org.gcube.common.encryption.SymmetricKey.getKey(SymmetricKey.java:44) - at org.gcube.common.encryption.StringEncrypter.decrypt(StringEncrypter.java:58) - at org.gcube.common.homelibrary.jcr.repository.JCRRepository.initializeRepository(JCRRepository.java:119) - at org.gcube.common.homelibrary.jcr.repository.JCRRepository.initialize(JCRRepository.java:272) - at org.gcube.common.homelibrary.jcr.JCRHomeManagerFactory.initialize(JCRHomeManagerFactory.java:46) - at org.gcube.common.homelibrary.home.HomeLibrary.createHomeManagerFactoryInstance(HomeLibrary.java:91) - at org.gcube.common.homelibrary.home.HomeLibrary.getHomeManagerFactory(HomeLibrary.java:106) - at org.gcube.common.homelibrary.home.HomeLibrary.getHomeManagerFactory(HomeLibrary.java:124) - at org.gcube.common.homelibrary.home.HomeLibrary.getUserHome(HomeLibrary.java:198) - at org.gcube.common.homelibrary.home.HomeLibrary.getUserWorkspace(HomeLibrary.java:143) - at org.gcube.portlets.widgets.wsexplorer.server.WorkspaceExplorerServiceImpl.getWorkspace(WorkspaceExplorerServiceImpl.java:104) - at org.gcube.portlets.widgets.wsexplorer.server.WorkspaceExplorerServiceImpl.getItemByCategory(WorkspaceExplorerServiceImpl.java:207) - at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) - at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) - at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) - at java.lang.reflect.Method.invoke(Method.java:606) - at com.google.gwt.user.server.rpc.RPC.invokeAndEncodeResponse(RPC.java:561) - at com.google.gwt.user.server.rpc.RemoteServiceServlet.processCall(RemoteServiceServlet.java:265) - at com.google.gwt.user.server.rpc.RemoteServiceServlet.processPost(RemoteServiceServlet.java:305) - at com.google.gwt.user.server.rpc.AbstractRemoteServiceServlet.doPost(AbstractRemoteServiceServlet.java:62) - at javax.servlet.http.HttpServlet.service(HttpServlet.java:755) - at javax.servlet.http.HttpServlet.service(HttpServlet.java:848) - at org.eclipse.jetty.servlet.ServletHolder.handle(ServletHolder.java:686) - at org.eclipse.jetty.servlet.ServletHandler.doHandle(ServletHandler.java:501) - at org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:137) - at org.eclipse.jetty.security.SecurityHandler.handle(SecurityHandler.java:557) - at org.eclipse.jetty.server.session.SessionHandler.doHandle(SessionHandler.java:231) - at org.eclipse.jetty.server.handler.ContextHandler.doHandle(ContextHandler.java:1086) - at org.eclipse.jetty.servlet.ServletHandler.doScope(ServletHandler.java:428) - at org.eclipse.jetty.server.session.SessionHandler.doScope(SessionHandler.java:193) - at org.eclipse.jetty.server.handler.ContextHandler.doScope(ContextHandler.java:1020) - at org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:135) - at org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:116) - at org.eclipse.jetty.server.handler.RequestLogHandler.handle(RequestLogHandler.java:68) - at org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:116) - at org.eclipse.jetty.server.Server.handle(Server.java:370) - at org.eclipse.jetty.server.AbstractHttpConnection.handleRequest(AbstractHttpConnection.java:489) - at org.eclipse.jetty.server.AbstractHttpConnection.content(AbstractHttpConnection.java:960) - at org.eclipse.jetty.server.AbstractHttpConnection$RequestHandler.content(AbstractHttpConnection.java:1021) - at org.eclipse.jetty.http.HttpParser.parseNext(HttpParser.java:865) - at org.eclipse.jetty.http.HttpParser.parseAvailable(HttpParser.java:240) - at org.eclipse.jetty.server.AsyncHttpConnection.handle(AsyncHttpConnection.java:82) - at org.eclipse.jetty.io.nio.SelectChannelEndPoint.handle(SelectChannelEndPoint.java:668) - at org.eclipse.jetty.io.nio.SelectChannelEndPoint$1.run(SelectChannelEndPoint.java:52) - at org.eclipse.jetty.util.thread.QueuedThreadPool.runJob(QueuedThreadPool.java:608) - at org.eclipse.jetty.util.thread.QueuedThreadPool$3.run(QueuedThreadPool.java:543) - at java.lang.Thread.run(Thread.java:745) -102874 [qtp684204552-33] DEBUG org.gcube.common.scope.impl.DefaultScopeProvider - setting scope /gcube/devsec/devVRE in thread 33 -102874 [qtp684204552-33] ERROR org.gcube.portlets.widgets.wsexplorer.server.WorkspaceExplorerServiceImpl - Error during get item by category -org.gcube.common.homelibrary.home.exceptions.InternalErrorException: org.gcube.common.homelibrary.home.exceptions.InternalErrorException: org.gcube.common.homelibrary.home.exceptions.InternalErrorException: cannot discover password and username in scope /gcube/devsec/devVRE - at org.gcube.common.homelibrary.jcr.JCRHomeManagerFactory.initialize(JCRHomeManagerFactory.java:48) - at org.gcube.common.homelibrary.home.HomeLibrary.createHomeManagerFactoryInstance(HomeLibrary.java:91) - at org.gcube.common.homelibrary.home.HomeLibrary.getHomeManagerFactory(HomeLibrary.java:106) - at org.gcube.common.homelibrary.home.HomeLibrary.getHomeManagerFactory(HomeLibrary.java:124) - at org.gcube.common.homelibrary.home.HomeLibrary.getUserHome(HomeLibrary.java:198) - at org.gcube.common.homelibrary.home.HomeLibrary.getUserWorkspace(HomeLibrary.java:143) - at org.gcube.portlets.widgets.wsexplorer.server.WorkspaceExplorerServiceImpl.getWorkspace(WorkspaceExplorerServiceImpl.java:104) - at org.gcube.portlets.widgets.wsexplorer.server.WorkspaceExplorerServiceImpl.getItemByCategory(WorkspaceExplorerServiceImpl.java:207) - at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) - at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) - at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) - at java.lang.reflect.Method.invoke(Method.java:606) - at com.google.gwt.user.server.rpc.RPC.invokeAndEncodeResponse(RPC.java:561) - at com.google.gwt.user.server.rpc.RemoteServiceServlet.processCall(RemoteServiceServlet.java:265) - at com.google.gwt.user.server.rpc.RemoteServiceServlet.processPost(RemoteServiceServlet.java:305) - at com.google.gwt.user.server.rpc.AbstractRemoteServiceServlet.doPost(AbstractRemoteServiceServlet.java:62) - at javax.servlet.http.HttpServlet.service(HttpServlet.java:755) - at javax.servlet.http.HttpServlet.service(HttpServlet.java:848) - at org.eclipse.jetty.servlet.ServletHolder.handle(ServletHolder.java:686) - at org.eclipse.jetty.servlet.ServletHandler.doHandle(ServletHandler.java:501) - at org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:137) - at org.eclipse.jetty.security.SecurityHandler.handle(SecurityHandler.java:557) - at org.eclipse.jetty.server.session.SessionHandler.doHandle(SessionHandler.java:231) - at org.eclipse.jetty.server.handler.ContextHandler.doHandle(ContextHandler.java:1086) - at org.eclipse.jetty.servlet.ServletHandler.doScope(ServletHandler.java:428) - at org.eclipse.jetty.server.session.SessionHandler.doScope(SessionHandler.java:193) - at org.eclipse.jetty.server.handler.ContextHandler.doScope(ContextHandler.java:1020) - at org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:135) - at org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:116) - at org.eclipse.jetty.server.handler.RequestLogHandler.handle(RequestLogHandler.java:68) - at org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:116) - at org.eclipse.jetty.server.Server.handle(Server.java:370) - at org.eclipse.jetty.server.AbstractHttpConnection.handleRequest(AbstractHttpConnection.java:489) - at org.eclipse.jetty.server.AbstractHttpConnection.content(AbstractHttpConnection.java:960) - at org.eclipse.jetty.server.AbstractHttpConnection$RequestHandler.content(AbstractHttpConnection.java:1021) - at org.eclipse.jetty.http.HttpParser.parseNext(HttpParser.java:865) - at org.eclipse.jetty.http.HttpParser.parseAvailable(HttpParser.java:240) - at org.eclipse.jetty.server.AsyncHttpConnection.handle(AsyncHttpConnection.java:82) - at org.eclipse.jetty.io.nio.SelectChannelEndPoint.handle(SelectChannelEndPoint.java:668) - at org.eclipse.jetty.io.nio.SelectChannelEndPoint$1.run(SelectChannelEndPoint.java:52) - at org.eclipse.jetty.util.thread.QueuedThreadPool.runJob(QueuedThreadPool.java:608) - at org.eclipse.jetty.util.thread.QueuedThreadPool$3.run(QueuedThreadPool.java:543) - at java.lang.Thread.run(Thread.java:745) -Caused by: org.gcube.common.homelibrary.home.exceptions.InternalErrorException: org.gcube.common.homelibrary.home.exceptions.InternalErrorException: cannot discover password and username in scope /gcube/devsec/devVRE - at org.gcube.common.homelibrary.jcr.repository.JCRRepository.initializeRepository(JCRRepository.java:155) - at org.gcube.common.homelibrary.jcr.repository.JCRRepository.initialize(JCRRepository.java:272) - at org.gcube.common.homelibrary.jcr.JCRHomeManagerFactory.initialize(JCRHomeManagerFactory.java:46) - ... 42 more -Caused by: org.gcube.common.homelibrary.home.exceptions.InternalErrorException: cannot discover password and username in scope /gcube/devsec/devVRE - at org.gcube.common.homelibrary.jcr.repository.JCRRepository.initializeRepository(JCRRepository.java:147) - ... 44 more -103169 [qtp684204552-31] DEBUG org.gcube.common.scope.impl.DefaultScopeProvider - setting scope /gcube/devsec/devVRE in thread 31 -103169 [qtp684204552-31] DEBUG org.gcube.application.framework.core.session.ASLSession - Getting security token: null in thread 31 -103169 [qtp684204552-31] DEBUG org.gcube.common.scope.impl.DefaultScopeProvider - setting scope /gcube/devsec/devVRE in thread 31 -103169 [qtp684204552-31] INFO HomeManageFactory - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) -103169 [qtp684204552-31] INFO HomeManageFactory - getHomeManagerFactory -103169 [qtp684204552-31] INFO org.gcube.common.homelibrary.util.config.HomeLibraryConfiguration - calculating the persistence folder -103169 [qtp684204552-31] INFO org.gcube.common.homelibrary.util.config.HomeLibraryConfiguration - Using tmp dir /tmp -103169 [qtp684204552-31] INFO HomeManageFactory - getInstance persistenceRoot: /tmp/home_library_persistence -103170 [qtp684204552-31] DEBUG org.gcube.common.homelibrary.jcr.JCRHomeManagerFactory - Initialize content manager -103170 [qtp684204552-31] DEBUG org.gcube.common.homelibrary.jcr.repository.JCRRepository - Initialize repository -103170 [qtp684204552-31] DEBUG org.gcube.common.homelibrary.jcr.repository.JCRRepository - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE -103170 [qtp684204552-31] DEBUG org.gcube.common.scope.impl.DefaultScopeProvider - setting scope /gcube in thread 31 -103170 [qtp684204552-34] DEBUG org.gcube.common.scope.impl.DefaultScopeProvider - setting scope /gcube/devsec/devVRE in thread 34 -103170 [qtp684204552-34] DEBUG org.gcube.application.framework.core.session.ASLSession - Getting security token: null in thread 34 -103170 [qtp684204552-34] DEBUG org.gcube.common.scope.impl.DefaultScopeProvider - setting scope /gcube/devsec/devVRE in thread 34 -103171 [qtp684204552-34] INFO HomeManageFactory - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) -103171 [qtp684204552-34] INFO HomeManageFactory - getHomeManagerFactory -103171 [qtp684204552-34] INFO org.gcube.common.homelibrary.util.config.HomeLibraryConfiguration - calculating the persistence folder -103171 [qtp684204552-34] INFO org.gcube.common.homelibrary.util.config.HomeLibraryConfiguration - Using tmp dir /tmp -103189 [qtp684204552-31] INFO org.gcube.common.clients.stubs.jaxws.StubCache - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub -103191 [qtp684204552-31] INFO org.gcube.resources.discovery.icclient.ICClient - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource -103215 [qtp684204552-31] INFO org.gcube.resources.discovery.icclient.ICClient - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 24 ms -103225 [qtp684204552-31] ERROR org.gcube.common.homelibrary.jcr.repository.JCRRepository - error decrypting resource -java.security.InvalidKeyException: Unable to load the Key gcube.gcubekey from the classpath - at org.gcube.common.encryption.SymmetricKey.load(SymmetricKey.java:66) - at org.gcube.common.encryption.SymmetricKey.load(SymmetricKey.java:53) - at org.gcube.common.encryption.SymmetricKey.getKey(SymmetricKey.java:44) - at org.gcube.common.encryption.StringEncrypter.decrypt(StringEncrypter.java:58) - at org.gcube.common.homelibrary.jcr.repository.JCRRepository.initializeRepository(JCRRepository.java:119) - at org.gcube.common.homelibrary.jcr.repository.JCRRepository.initialize(JCRRepository.java:272) - at org.gcube.common.homelibrary.jcr.JCRHomeManagerFactory.initialize(JCRHomeManagerFactory.java:46) - at org.gcube.common.homelibrary.home.HomeLibrary.createHomeManagerFactoryInstance(HomeLibrary.java:91) - at org.gcube.common.homelibrary.home.HomeLibrary.getHomeManagerFactory(HomeLibrary.java:106) - at org.gcube.common.homelibrary.home.HomeLibrary.getHomeManagerFactory(HomeLibrary.java:124) - at org.gcube.common.homelibrary.home.HomeLibrary.getUserHome(HomeLibrary.java:198) - at org.gcube.common.homelibrary.home.HomeLibrary.getUserWorkspace(HomeLibrary.java:143) - at org.gcube.portlets.widgets.wsexplorer.server.WorkspaceExplorerServiceImpl.getWorkspace(WorkspaceExplorerServiceImpl.java:104) - at org.gcube.portlets.widgets.wsexplorer.server.WorkspaceExplorerServiceImpl.getItemByCategory(WorkspaceExplorerServiceImpl.java:207) - at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) - at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) - at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) - at java.lang.reflect.Method.invoke(Method.java:606) - at com.google.gwt.user.server.rpc.RPC.invokeAndEncodeResponse(RPC.java:561) - at com.google.gwt.user.server.rpc.RemoteServiceServlet.processCall(RemoteServiceServlet.java:265) - at com.google.gwt.user.server.rpc.RemoteServiceServlet.processPost(RemoteServiceServlet.java:305) - at com.google.gwt.user.server.rpc.AbstractRemoteServiceServlet.doPost(AbstractRemoteServiceServlet.java:62) - at javax.servlet.http.HttpServlet.service(HttpServlet.java:755) - at javax.servlet.http.HttpServlet.service(HttpServlet.java:848) - at org.eclipse.jetty.servlet.ServletHolder.handle(ServletHolder.java:686) - at org.eclipse.jetty.servlet.ServletHandler.doHandle(ServletHandler.java:501) - at org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:137) - at org.eclipse.jetty.security.SecurityHandler.handle(SecurityHandler.java:557) - at org.eclipse.jetty.server.session.SessionHandler.doHandle(SessionHandler.java:231) - at org.eclipse.jetty.server.handler.ContextHandler.doHandle(ContextHandler.java:1086) - at org.eclipse.jetty.servlet.ServletHandler.doScope(ServletHandler.java:428) - at org.eclipse.jetty.server.session.SessionHandler.doScope(SessionHandler.java:193) - at org.eclipse.jetty.server.handler.ContextHandler.doScope(ContextHandler.java:1020) - at org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:135) - at org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:116) - at org.eclipse.jetty.server.handler.RequestLogHandler.handle(RequestLogHandler.java:68) - at org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:116) - at org.eclipse.jetty.server.Server.handle(Server.java:370) - at org.eclipse.jetty.server.AbstractHttpConnection.handleRequest(AbstractHttpConnection.java:489) - at org.eclipse.jetty.server.AbstractHttpConnection.content(AbstractHttpConnection.java:960) - at org.eclipse.jetty.server.AbstractHttpConnection$RequestHandler.content(AbstractHttpConnection.java:1021) - at org.eclipse.jetty.http.HttpParser.parseNext(HttpParser.java:865) - at org.eclipse.jetty.http.HttpParser.parseAvailable(HttpParser.java:240) - at org.eclipse.jetty.server.AsyncHttpConnection.handle(AsyncHttpConnection.java:82) - at org.eclipse.jetty.io.nio.SelectChannelEndPoint.handle(SelectChannelEndPoint.java:668) - at org.eclipse.jetty.io.nio.SelectChannelEndPoint$1.run(SelectChannelEndPoint.java:52) - at org.eclipse.jetty.util.thread.QueuedThreadPool.runJob(QueuedThreadPool.java:608) - at org.eclipse.jetty.util.thread.QueuedThreadPool$3.run(QueuedThreadPool.java:543) - at java.lang.Thread.run(Thread.java:745) -103228 [qtp684204552-31] DEBUG org.gcube.common.scope.impl.DefaultScopeProvider - setting scope /gcube/devsec/devVRE in thread 31 -103228 [qtp684204552-31] ERROR org.gcube.portlets.widgets.wsexplorer.server.WorkspaceExplorerServiceImpl - Error during get item by category -org.gcube.common.homelibrary.home.exceptions.InternalErrorException: org.gcube.common.homelibrary.home.exceptions.InternalErrorException: org.gcube.common.homelibrary.home.exceptions.InternalErrorException: cannot discover password and username in scope /gcube/devsec/devVRE - at org.gcube.common.homelibrary.jcr.JCRHomeManagerFactory.initialize(JCRHomeManagerFactory.java:48) - at org.gcube.common.homelibrary.home.HomeLibrary.createHomeManagerFactoryInstance(HomeLibrary.java:91) - at org.gcube.common.homelibrary.home.HomeLibrary.getHomeManagerFactory(HomeLibrary.java:106) - at org.gcube.common.homelibrary.home.HomeLibrary.getHomeManagerFactory(HomeLibrary.java:124) - at org.gcube.common.homelibrary.home.HomeLibrary.getUserHome(HomeLibrary.java:198) - at org.gcube.common.homelibrary.home.HomeLibrary.getUserWorkspace(HomeLibrary.java:143) - at org.gcube.portlets.widgets.wsexplorer.server.WorkspaceExplorerServiceImpl.getWorkspace(WorkspaceExplorerServiceImpl.java:104) - at org.gcube.portlets.widgets.wsexplorer.server.WorkspaceExplorerServiceImpl.getItemByCategory(WorkspaceExplorerServiceImpl.java:207) - at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) - at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) - at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) - at java.lang.reflect.Method.invoke(Method.java:606) - at com.google.gwt.user.server.rpc.RPC.invokeAndEncodeResponse(RPC.java:561) - at com.google.gwt.user.server.rpc.RemoteServiceServlet.processCall(RemoteServiceServlet.java:265) - at com.google.gwt.user.server.rpc.RemoteServiceServlet.processPost(RemoteServiceServlet.java:305) - at com.google.gwt.user.server.rpc.AbstractRemoteServiceServlet.doPost(AbstractRemoteServiceServlet.java:62) - at javax.servlet.http.HttpServlet.service(HttpServlet.java:755) - at javax.servlet.http.HttpServlet.service(HttpServlet.java:848) - at org.eclipse.jetty.servlet.ServletHolder.handle(ServletHolder.java:686) - at org.eclipse.jetty.servlet.ServletHandler.doHandle(ServletHandler.java:501) - at org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:137) - at org.eclipse.jetty.security.SecurityHandler.handle(SecurityHandler.java:557) - at org.eclipse.jetty.server.session.SessionHandler.doHandle(SessionHandler.java:231) - at org.eclipse.jetty.server.handler.ContextHandler.doHandle(ContextHandler.java:1086) - at org.eclipse.jetty.servlet.ServletHandler.doScope(ServletHandler.java:428) - at org.eclipse.jetty.server.session.SessionHandler.doScope(SessionHandler.java:193) - at org.eclipse.jetty.server.handler.ContextHandler.doScope(ContextHandler.java:1020) - at org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:135) - at org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:116) - at org.eclipse.jetty.server.handler.RequestLogHandler.handle(RequestLogHandler.java:68) - at org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:116) - at org.eclipse.jetty.server.Server.handle(Server.java:370) - at org.eclipse.jetty.server.AbstractHttpConnection.handleRequest(AbstractHttpConnection.java:489) - at org.eclipse.jetty.server.AbstractHttpConnection.content(AbstractHttpConnection.java:960) - at org.eclipse.jetty.server.AbstractHttpConnection$RequestHandler.content(AbstractHttpConnection.java:1021) - at org.eclipse.jetty.http.HttpParser.parseNext(HttpParser.java:865) - at org.eclipse.jetty.http.HttpParser.parseAvailable(HttpParser.java:240) - at org.eclipse.jetty.server.AsyncHttpConnection.handle(AsyncHttpConnection.java:82) - at org.eclipse.jetty.io.nio.SelectChannelEndPoint.handle(SelectChannelEndPoint.java:668) - at org.eclipse.jetty.io.nio.SelectChannelEndPoint$1.run(SelectChannelEndPoint.java:52) - at org.eclipse.jetty.util.thread.QueuedThreadPool.runJob(QueuedThreadPool.java:608) - at org.eclipse.jetty.util.thread.QueuedThreadPool$3.run(QueuedThreadPool.java:543) - at java.lang.Thread.run(Thread.java:745) -Caused by: org.gcube.common.homelibrary.home.exceptions.InternalErrorException: org.gcube.common.homelibrary.home.exceptions.InternalErrorException: cannot discover password and username in scope /gcube/devsec/devVRE - at org.gcube.common.homelibrary.jcr.repository.JCRRepository.initializeRepository(JCRRepository.java:155) - at org.gcube.common.homelibrary.jcr.repository.JCRRepository.initialize(JCRRepository.java:272) - at org.gcube.common.homelibrary.jcr.JCRHomeManagerFactory.initialize(JCRHomeManagerFactory.java:46) - ... 42 more -Caused by: org.gcube.common.homelibrary.home.exceptions.InternalErrorException: cannot discover password and username in scope /gcube/devsec/devVRE - at org.gcube.common.homelibrary.jcr.repository.JCRRepository.initializeRepository(JCRRepository.java:147) - ... 44 more -103228 [qtp684204552-34] INFO HomeManageFactory - getInstance persistenceRoot: /tmp/home_library_persistence -103230 [qtp684204552-34] DEBUG org.gcube.common.homelibrary.jcr.JCRHomeManagerFactory - Initialize content manager -103230 [qtp684204552-34] DEBUG org.gcube.common.homelibrary.jcr.repository.JCRRepository - Initialize repository -103230 [qtp684204552-34] DEBUG org.gcube.common.homelibrary.jcr.repository.JCRRepository - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE -103230 [qtp684204552-34] DEBUG org.gcube.common.scope.impl.DefaultScopeProvider - setting scope /gcube in thread 34 -103239 [qtp684204552-34] INFO org.gcube.common.clients.stubs.jaxws.StubCache - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub -103241 [qtp684204552-34] INFO org.gcube.resources.discovery.icclient.ICClient - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource -103259 [qtp684204552-34] INFO org.gcube.resources.discovery.icclient.ICClient - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 18 ms -103269 [qtp684204552-34] ERROR org.gcube.common.homelibrary.jcr.repository.JCRRepository - error decrypting resource -java.security.InvalidKeyException: Unable to load the Key gcube.gcubekey from the classpath - at org.gcube.common.encryption.SymmetricKey.load(SymmetricKey.java:66) - at org.gcube.common.encryption.SymmetricKey.load(SymmetricKey.java:53) - at org.gcube.common.encryption.SymmetricKey.getKey(SymmetricKey.java:44) - at org.gcube.common.encryption.StringEncrypter.decrypt(StringEncrypter.java:58) - at org.gcube.common.homelibrary.jcr.repository.JCRRepository.initializeRepository(JCRRepository.java:119) - at org.gcube.common.homelibrary.jcr.repository.JCRRepository.initialize(JCRRepository.java:272) - at org.gcube.common.homelibrary.jcr.JCRHomeManagerFactory.initialize(JCRHomeManagerFactory.java:46) - at org.gcube.common.homelibrary.home.HomeLibrary.createHomeManagerFactoryInstance(HomeLibrary.java:91) - at org.gcube.common.homelibrary.home.HomeLibrary.getHomeManagerFactory(HomeLibrary.java:106) - at org.gcube.common.homelibrary.home.HomeLibrary.getHomeManagerFactory(HomeLibrary.java:124) - at org.gcube.common.homelibrary.home.HomeLibrary.getUserHome(HomeLibrary.java:198) - at org.gcube.common.homelibrary.home.HomeLibrary.getUserWorkspace(HomeLibrary.java:143) - at org.gcube.portlets.widgets.wsexplorer.server.WorkspaceExplorerServiceImpl.getWorkspace(WorkspaceExplorerServiceImpl.java:104) - at org.gcube.portlets.widgets.wsexplorer.server.WorkspaceExplorerServiceImpl.getRoot(WorkspaceExplorerServiceImpl.java:117) - at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) - at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) - at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) - at java.lang.reflect.Method.invoke(Method.java:606) - at com.google.gwt.user.server.rpc.RPC.invokeAndEncodeResponse(RPC.java:561) - at com.google.gwt.user.server.rpc.RemoteServiceServlet.processCall(RemoteServiceServlet.java:265) - at com.google.gwt.user.server.rpc.RemoteServiceServlet.processPost(RemoteServiceServlet.java:305) - at com.google.gwt.user.server.rpc.AbstractRemoteServiceServlet.doPost(AbstractRemoteServiceServlet.java:62) - at javax.servlet.http.HttpServlet.service(HttpServlet.java:755) - at javax.servlet.http.HttpServlet.service(HttpServlet.java:848) - at org.eclipse.jetty.servlet.ServletHolder.handle(ServletHolder.java:686) - at org.eclipse.jetty.servlet.ServletHandler.doHandle(ServletHandler.java:501) - at org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:137) - at org.eclipse.jetty.security.SecurityHandler.handle(SecurityHandler.java:557) - at org.eclipse.jetty.server.session.SessionHandler.doHandle(SessionHandler.java:231) - at org.eclipse.jetty.server.handler.ContextHandler.doHandle(ContextHandler.java:1086) - at org.eclipse.jetty.servlet.ServletHandler.doScope(ServletHandler.java:428) - at org.eclipse.jetty.server.session.SessionHandler.doScope(SessionHandler.java:193) - at org.eclipse.jetty.server.handler.ContextHandler.doScope(ContextHandler.java:1020) - at org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:135) - at org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:116) - at org.eclipse.jetty.server.handler.RequestLogHandler.handle(RequestLogHandler.java:68) - at org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:116) - at org.eclipse.jetty.server.Server.handle(Server.java:370) - at org.eclipse.jetty.server.AbstractHttpConnection.handleRequest(AbstractHttpConnection.java:489) - at org.eclipse.jetty.server.AbstractHttpConnection.content(AbstractHttpConnection.java:960) - at org.eclipse.jetty.server.AbstractHttpConnection$RequestHandler.content(AbstractHttpConnection.java:1021) - at org.eclipse.jetty.http.HttpParser.parseNext(HttpParser.java:865) - at org.eclipse.jetty.http.HttpParser.parseAvailable(HttpParser.java:240) - at org.eclipse.jetty.server.AsyncHttpConnection.handle(AsyncHttpConnection.java:82) - at org.eclipse.jetty.io.nio.SelectChannelEndPoint.handle(SelectChannelEndPoint.java:668) - at org.eclipse.jetty.io.nio.SelectChannelEndPoint$1.run(SelectChannelEndPoint.java:52) - at org.eclipse.jetty.util.thread.QueuedThreadPool.runJob(QueuedThreadPool.java:608) - at org.eclipse.jetty.util.thread.QueuedThreadPool$3.run(QueuedThreadPool.java:543) - at java.lang.Thread.run(Thread.java:745) -103269 [qtp684204552-34] DEBUG org.gcube.common.scope.impl.DefaultScopeProvider - setting scope /gcube/devsec/devVRE in thread 34 -103269 [qtp684204552-34] ERROR org.gcube.portlets.widgets.wsexplorer.server.WorkspaceExplorerServiceImpl - Error during root retrieving -org.gcube.common.homelibrary.home.exceptions.InternalErrorException: org.gcube.common.homelibrary.home.exceptions.InternalErrorException: org.gcube.common.homelibrary.home.exceptions.InternalErrorException: cannot discover password and username in scope /gcube/devsec/devVRE - at org.gcube.common.homelibrary.jcr.JCRHomeManagerFactory.initialize(JCRHomeManagerFactory.java:48) - at org.gcube.common.homelibrary.home.HomeLibrary.createHomeManagerFactoryInstance(HomeLibrary.java:91) - at org.gcube.common.homelibrary.home.HomeLibrary.getHomeManagerFactory(HomeLibrary.java:106) - at org.gcube.common.homelibrary.home.HomeLibrary.getHomeManagerFactory(HomeLibrary.java:124) - at org.gcube.common.homelibrary.home.HomeLibrary.getUserHome(HomeLibrary.java:198) - at org.gcube.common.homelibrary.home.HomeLibrary.getUserWorkspace(HomeLibrary.java:143) - at org.gcube.portlets.widgets.wsexplorer.server.WorkspaceExplorerServiceImpl.getWorkspace(WorkspaceExplorerServiceImpl.java:104) - at org.gcube.portlets.widgets.wsexplorer.server.WorkspaceExplorerServiceImpl.getRoot(WorkspaceExplorerServiceImpl.java:117) - at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) - at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) - at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) - at java.lang.reflect.Method.invoke(Method.java:606) - at com.google.gwt.user.server.rpc.RPC.invokeAndEncodeResponse(RPC.java:561) - at com.google.gwt.user.server.rpc.RemoteServiceServlet.processCall(RemoteServiceServlet.java:265) - at com.google.gwt.user.server.rpc.RemoteServiceServlet.processPost(RemoteServiceServlet.java:305) - at com.google.gwt.user.server.rpc.AbstractRemoteServiceServlet.doPost(AbstractRemoteServiceServlet.java:62) - at javax.servlet.http.HttpServlet.service(HttpServlet.java:755) - at javax.servlet.http.HttpServlet.service(HttpServlet.java:848) - at org.eclipse.jetty.servlet.ServletHolder.handle(ServletHolder.java:686) - at org.eclipse.jetty.servlet.ServletHandler.doHandle(ServletHandler.java:501) - at org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:137) - at org.eclipse.jetty.security.SecurityHandler.handle(SecurityHandler.java:557) - at org.eclipse.jetty.server.session.SessionHandler.doHandle(SessionHandler.java:231) - at org.eclipse.jetty.server.handler.ContextHandler.doHandle(ContextHandler.java:1086) - at org.eclipse.jetty.servlet.ServletHandler.doScope(ServletHandler.java:428) - at org.eclipse.jetty.server.session.SessionHandler.doScope(SessionHandler.java:193) - at org.eclipse.jetty.server.handler.ContextHandler.doScope(ContextHandler.java:1020) - at org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:135) - at org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:116) - at org.eclipse.jetty.server.handler.RequestLogHandler.handle(RequestLogHandler.java:68) - at org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:116) - at org.eclipse.jetty.server.Server.handle(Server.java:370) - at org.eclipse.jetty.server.AbstractHttpConnection.handleRequest(AbstractHttpConnection.java:489) - at org.eclipse.jetty.server.AbstractHttpConnection.content(AbstractHttpConnection.java:960) - at org.eclipse.jetty.server.AbstractHttpConnection$RequestHandler.content(AbstractHttpConnection.java:1021) - at org.eclipse.jetty.http.HttpParser.parseNext(HttpParser.java:865) - at org.eclipse.jetty.http.HttpParser.parseAvailable(HttpParser.java:240) - at org.eclipse.jetty.server.AsyncHttpConnection.handle(AsyncHttpConnection.java:82) - at org.eclipse.jetty.io.nio.SelectChannelEndPoint.handle(SelectChannelEndPoint.java:668) - at org.eclipse.jetty.io.nio.SelectChannelEndPoint$1.run(SelectChannelEndPoint.java:52) - at org.eclipse.jetty.util.thread.QueuedThreadPool.runJob(QueuedThreadPool.java:608) - at org.eclipse.jetty.util.thread.QueuedThreadPool$3.run(QueuedThreadPool.java:543) - at java.lang.Thread.run(Thread.java:745) -Caused by: org.gcube.common.homelibrary.home.exceptions.InternalErrorException: org.gcube.common.homelibrary.home.exceptions.InternalErrorException: cannot discover password and username in scope /gcube/devsec/devVRE - at org.gcube.common.homelibrary.jcr.repository.JCRRepository.initializeRepository(JCRRepository.java:155) - at org.gcube.common.homelibrary.jcr.repository.JCRRepository.initialize(JCRRepository.java:272) - at org.gcube.common.homelibrary.jcr.JCRHomeManagerFactory.initialize(JCRHomeManagerFactory.java:46) - ... 42 more -Caused by: org.gcube.common.homelibrary.home.exceptions.InternalErrorException: cannot discover password and username in scope /gcube/devsec/devVRE - at org.gcube.common.homelibrary.jcr.repository.JCRRepository.initializeRepository(JCRRepository.java:147) - ... 44 more -110269 [qtp684204552-30] DEBUG org.gcube.common.scope.impl.DefaultScopeProvider - setting scope /gcube/devsec/devVRE in thread 30 -110269 [qtp684204552-30] DEBUG org.gcube.application.framework.core.session.ASLSession - Getting security token: null in thread 30 -165268 [qtp684204552-34] DEBUG org.gcube.common.scope.impl.DefaultScopeProvider - setting scope /gcube/devsec/devVRE in thread 34 -165268 [qtp684204552-34] DEBUG org.gcube.application.framework.core.session.ASLSession - Getting security token: null in thread 34 diff --git a/pom.xml b/pom.xml index 02f81fd..fb546bd 100644 --- a/pom.xml +++ b/pom.xml @@ -131,6 +131,7 @@ com.google.gwt gwt-user ${gwtVersion} + provided diff --git a/src/main/java/org/gcube/portlets/user/dataminermanager/server/DataMinerManagerServiceImpl.java b/src/main/java/org/gcube/portlets/user/dataminermanager/server/DataMinerManagerServiceImpl.java index aa242a5..ed2941a 100644 --- a/src/main/java/org/gcube/portlets/user/dataminermanager/server/DataMinerManagerServiceImpl.java +++ b/src/main/java/org/gcube/portlets/user/dataminermanager/server/DataMinerManagerServiceImpl.java @@ -95,7 +95,7 @@ public class DataMinerManagerServiceImpl extends RemoteServiceServlet implements try { HttpSession session = this.getThreadLocalRequest().getSession(); ASLSession aslSession = SessionUtil.getASLSession(session); - SClient smClient = SessionUtil.getSClient(aslSession); + SClient smClient = SessionUtil.getSClient(aslSession, session); List list = smClient .getOperatorsClassifications(); return list; @@ -118,7 +118,7 @@ public class DataMinerManagerServiceImpl extends RemoteServiceServlet implements HttpSession session = this.getThreadLocalRequest().getSession(); ASLSession aslSession = SessionUtil.getASLSession(session); - SClient smClient = SessionUtil.getSClient(aslSession); + SClient smClient = SessionUtil.getSClient(aslSession, session); List list = smClient.getInputParameters(operator); return list; } catch (ServiceException e) { @@ -139,7 +139,7 @@ public class DataMinerManagerServiceImpl extends RemoteServiceServlet implements HttpSession session = this.getThreadLocalRequest().getSession(); ASLSession aslSession = SessionUtil.getASLSession(session); logger.debug("StartComputation(): [ operator=" + operator + "]"); - SClient smClient = SessionUtil.getSClient(aslSession); + SClient smClient = SessionUtil.getSClient(aslSession, session); ComputationId computationId = smClient.startComputation(operator); return computationId; } catch (ServiceException e) { @@ -162,7 +162,7 @@ public class DataMinerManagerServiceImpl extends RemoteServiceServlet implements Map properties = StorageUtil.getProperties( aslSession.getUsername(), itemDescription.getId()); logger.debug("Properties: " + properties); - SClient smClient = SessionUtil.getSClient(aslSession); + SClient smClient = SessionUtil.getSClient(aslSession, session); ComputationId computationId = smClient .resubmitComputation(properties); return computationId; @@ -183,7 +183,7 @@ public class DataMinerManagerServiceImpl extends RemoteServiceServlet implements try { HttpSession session = this.getThreadLocalRequest().getSession(); ASLSession aslSession = SessionUtil.getASLSession(session); - SClient smClient = SessionUtil.getSClient(aslSession); + SClient smClient = SessionUtil.getSClient(aslSession, session); ComputationStatus computationStatus = smClient .getComputationStatus(computationId); return computationStatus; @@ -296,7 +296,7 @@ public class DataMinerManagerServiceImpl extends RemoteServiceServlet implements HttpSession session = this.getThreadLocalRequest().getSession(); ASLSession aslSession = SessionUtil.getASLSession(session); logger.debug("CancelComputation(): " + computationId); - SClient smClient = SessionUtil.getSClient(aslSession); + SClient smClient = SessionUtil.getSClient(aslSession, session); String result = smClient.cancelComputation(computationId); // SessionUtil.putSClient(session, smClient); return result; @@ -317,7 +317,7 @@ public class DataMinerManagerServiceImpl extends RemoteServiceServlet implements Log.debug("getOutputDataByComputationId: " + computationId); HttpSession session = this.getThreadLocalRequest().getSession(); ASLSession aslSession = SessionUtil.getASLSession(session); - SClient smClient = SessionUtil.getSClient(aslSession); + SClient smClient = SessionUtil.getSClient(aslSession, session); OutputData outputData = smClient .getOutputDataByComputationId(computationId); Log.debug("OutputData: " + outputData); @@ -344,7 +344,7 @@ public class DataMinerManagerServiceImpl extends RemoteServiceServlet implements Map properties = StorageUtil.getProperties( aslSession.getUsername(), itemDescription.getId()); logger.debug("Properties: " + properties); - SClient smClient = SessionUtil.getSClient(aslSession); + SClient smClient = SessionUtil.getSClient(aslSession, session); ComputationData computationData = smClient .getComputationDataByComputationProperties(properties); return computationData; @@ -381,7 +381,7 @@ public class DataMinerManagerServiceImpl extends RemoteServiceServlet implements } ComputationId computationId = new ComputationId(); computationId.setId(compId); - SClient smClient = SessionUtil.getSClient(aslSession); + SClient smClient = SessionUtil.getSClient(aslSession, session); String result = smClient.cancelComputation(computationId); return result; } catch (ServiceException e) { diff --git a/src/main/java/org/gcube/portlets/user/dataminermanager/server/DownloadFolderServlet.java b/src/main/java/org/gcube/portlets/user/dataminermanager/server/DownloadFolderServlet.java index 9e73097..69e4fa8 100644 --- a/src/main/java/org/gcube/portlets/user/dataminermanager/server/DownloadFolderServlet.java +++ b/src/main/java/org/gcube/portlets/user/dataminermanager/server/DownloadFolderServlet.java @@ -76,7 +76,9 @@ public class DownloadFolderServlet extends HttpServlet { logger.debug("DownloadFolderServlet session id: " + session.getId()); ASLSession aslSession = SessionUtil.getASLSession(session); - + String token=SessionUtil.getToken(aslSession); + logger.debug("UserToken: "+token); + String itemId = request.getParameter(Constants.DOWNLOAD_FOLDER_SERVLET_ITEM_ID_PARAMETER); String folderName = request.getParameter(Constants.DOWNLOAD_FOLDER_SERVLET_FOLDER_NAME_PARAMETER); logger.debug("Request: [itemId=" + itemId + ", folderName=" diff --git a/src/main/java/org/gcube/portlets/user/dataminermanager/server/SessionUtil.java b/src/main/java/org/gcube/portlets/user/dataminermanager/server/SessionUtil.java index 0e8c561..794a536 100644 --- a/src/main/java/org/gcube/portlets/user/dataminermanager/server/SessionUtil.java +++ b/src/main/java/org/gcube/portlets/user/dataminermanager/server/SessionUtil.java @@ -6,6 +6,7 @@ package org.gcube.portlets.user.dataminermanager.server; import static org.gcube.common.authorization.client.Constants.authorizationService; import java.util.ArrayList; +import java.util.HashMap; import java.util.List; import javax.servlet.http.HttpSession; @@ -85,7 +86,7 @@ public class SessionUtil { } - public static SClient getSClient(ASLSession aslSession) + public static SClient getSClient(ASLSession aslSession, HttpSession session) throws Exception { if(aslSession==null){ @@ -94,24 +95,44 @@ public class SessionUtil { } SClient sClient; - Object obj=aslSession.getAttribute(Constants.SClient); + Object obj=session.getAttribute(Constants.SClientMap); if(obj==null){ - logger.info("Created new SClient"); + logger.info("Create new SClientMap"); + HashMap sClientMap=new HashMap<>(); + logger.info("Create new SClient"); ServiceCredential serviceCredential=new ServiceCredential(aslSession.getUsername(), aslSession.getScope(), SessionUtil.getToken(aslSession)); SClientBuilder sBuilder = new SClient4WPSBuilder(serviceCredential); - SClientDirector director = new SClientDirector(); director.setSClientBuilder(sBuilder); director.constructSClient(); sClient = director.getSClient(); - aslSession.setAttribute(Constants.SClient, sClient); + + sClientMap.put(aslSession.getScope(), sClient); + session.setAttribute(Constants.SClientMap, sClientMap); } else { - if (obj instanceof SClient) { - logger.info("Use SClient in session"); - sClient=(SClient) obj; + if (obj instanceof HashMap) { + @SuppressWarnings("unchecked") + HashMap sClientMap=(HashMap) obj; + if(sClientMap.containsKey(aslSession.getScope())){ + logger.info("Use SClient in session"); + sClient=sClientMap.get(aslSession.getScope()); + } else { + logger.info("Create new SClient"); + ServiceCredential serviceCredential=new ServiceCredential(aslSession.getUsername(), aslSession.getScope(), + SessionUtil.getToken(aslSession)); + SClientBuilder sBuilder = new SClient4WPSBuilder(serviceCredential); + + SClientDirector director = new SClientDirector(); + director.setSClientBuilder(sBuilder); + director.constructSClient(); + sClient = director.getSClient(); + sClientMap.put(aslSession.getScope(), sClient); + session.setAttribute(Constants.SClientMap, sClientMap); + } + } else { - logger.error("Portlet is Changed!"); + logger.error("Attention no SClientMap in Session!"); throw new ServiceException("Sign Out, portlet is changed, a new session is required!"); } } diff --git a/src/main/java/org/gcube/portlets/user/dataminermanager/server/smservice/SClient4WPS.java b/src/main/java/org/gcube/portlets/user/dataminermanager/server/smservice/SClient4WPS.java index ae59ff3..c113101 100644 --- a/src/main/java/org/gcube/portlets/user/dataminermanager/server/smservice/SClient4WPS.java +++ b/src/main/java/org/gcube/portlets/user/dataminermanager/server/smservice/SClient4WPS.java @@ -261,7 +261,7 @@ public class SClient4WPS extends SClient { operatorsClass.add(op); - logger.debug("OperatorClass: " + operatorsClass); + //logger.debug("OperatorClass: " + operatorsClass); return operatorsClass; } diff --git a/src/main/java/org/gcube/portlets/user/dataminermanager/shared/Constants.java b/src/main/java/org/gcube/portlets/user/dataminermanager/shared/Constants.java index 4bdcf3b..70df438 100644 --- a/src/main/java/org/gcube/portlets/user/dataminermanager/shared/Constants.java +++ b/src/main/java/org/gcube/portlets/user/dataminermanager/shared/Constants.java @@ -23,7 +23,7 @@ public class Constants { - public static final String SClient = "DataMinerClient"; + public static final String SClientMap = "DataMinerClientMap"; public static final String DATA_MINER_SERVICE_NAME = "DataMiner"; public static final String DATAMINER_SERVICE_CATEGORY = "DataAnalysis"; diff --git a/src/main/java/org/gcube/portlets/user/dataminermanager/shared/session/UserInfo.java b/src/main/java/org/gcube/portlets/user/dataminermanager/shared/session/UserInfo.java index 0f925ef..d36fcca 100644 --- a/src/main/java/org/gcube/portlets/user/dataminermanager/shared/session/UserInfo.java +++ b/src/main/java/org/gcube/portlets/user/dataminermanager/shared/session/UserInfo.java @@ -13,7 +13,7 @@ public class UserInfo implements Serializable { private static final long serialVersionUID = -2826549639677017234L; private String username; - private long groupId; + private Long groupId; private String groupName; private String scope; private String scopeName; @@ -34,7 +34,7 @@ public class UserInfo implements Serializable { * @param userEmailAddress * @param userFullName */ - public UserInfo(String username, long groupId, String groupName, + public UserInfo(String username, Long groupId, String groupName, String scope, String scopeName, String userEmailAddress, String userFullName) { super(); @@ -55,11 +55,11 @@ public class UserInfo implements Serializable { this.username = username; } - public long getGroupId() { + public Long getGroupId() { return groupId; } - public void setGroupId(long groupId) { + public void setGroupId(Long groupId) { this.groupId = groupId; } diff --git a/src/main/webapp/DataMinerManager.css b/src/main/webapp/DataMinerManager.css index 680d968..ce42acd 100644 --- a/src/main/webapp/DataMinerManager.css +++ b/src/main/webapp/DataMinerManager.css @@ -1,3 +1,38 @@ +/* FIX FOR Portal Theme */ +.aui input[type="text"], .aui select, .aui textarea, .aui .uneditable-input { + margin-bottom: 1px !important; + padding: 0px !important; +} + +.aui input, .aui textarea, .aui .uneditable-input { + width: 100% !important; +} + + +.aui p { + margin: 0px !important; +} + +.aui img { + vertical-align: baseline !important; +} + +.aui fieldset { + padding: 9px !important; + /* margin: 0; */ + border: 1px solid rgb(181, 184, 200) !important; +} + +.aui legend { + margin-bottom: 0px !important; + width: auto !important; + border-bottom: none !important; +} + + + +/* Data Miner */ + .smLayoutContainer { /* Blue box-shadow: inset 0 0 50px rgba(0, 0, 0, 0.11), 0 0 0 diff --git a/src/main/webapp/WEB-INF/portlet.xml b/src/main/webapp/WEB-INF/portlet.xml index d14b64d..d0abd7d 100644 --- a/src/main/webapp/WEB-INF/portlet.xml +++ b/src/main/webapp/WEB-INF/portlet.xml @@ -1,5 +1,8 @@ - + DataMinerManager DataMinerManager @@ -14,5 +17,11 @@ DataMinerManager DataMinerManager + + + portlet-setup-show-borders + false + +