Updated: support for FWS and new HL

git-svn-id: http://svn.research-infrastructures.eu/public/d4science/gcube/trunk/portlets/user/speciesdiscovery@82555 82a268e6-3cf1-43bd-a215-b396298e98cf
This commit is contained in:
Francesco Mangiacrapa 2013-10-07 12:40:14 +00:00
parent c8866fa4af
commit 2084304df9
11 changed files with 116 additions and 231 deletions

66
pom.xml
View File

@ -94,7 +94,7 @@
<artifactId>spd-model</artifactId>
<version>1.0.0-SNAPSHOT</version>
<!-- UNCOMMENT THIS FOR RELEASE -->
<!-- <version>[1.0.0-SNAPSHOT, 2.0.0-SNAPSHOT)</version> -->
<!-- <version>[1.0.0-SNAPSHOT, 2.0.0-SNAPSHOT)</version> -->
<scope>provided</scope>
</dependency>
@ -134,12 +134,12 @@
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.gcube.contentmanagement</groupId>
<artifactId>storage-resource-link-plugin</artifactId>
<version>[1.0.0-SNAPSHOT, 2.0.0-SNAPSHOT)</version>
<scope>provided</scope>
</dependency>
<!-- <dependency> -->
<!-- <groupId>org.gcube.contentmanagement</groupId> -->
<!-- <artifactId>storage-resource-link-plugin</artifactId> -->
<!-- <version>[1.0.0-SNAPSHOT, 2.0.0-SNAPSHOT)</version> -->
<!-- <scope>provided</scope> -->
<!-- </dependency> -->
<!-- WSLT dependencies -->
<dependency>
@ -191,11 +191,28 @@
<version>2.2.5</version>
<scope>provided</scope>
</dependency>
<!-- <dependency> -->
<!-- <groupId>org.gcube.core</groupId> -->
<!-- <artifactId>gcf</artifactId> -->
<!-- <scope>provided</scope> -->
<!-- </dependency> -->
<!-- FWS -->
<dependency>
<groupId>org.gcube.core</groupId>
<artifactId>gcf</artifactId>
<groupId>org.gcube.resources.discovery</groupId>
<artifactId>ic-client</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.gcube.core</groupId>
<artifactId>common-scope-maps</artifactId>
<version>[1.0.0-SNAPSHOT, 2.0.0-SNAPSHOT)</version>
<!-- UNCOMMENT THIS FOR RELEASE -->
<!-- <scope>provided</scope> -->
</dependency>
<!-- END FWS -->
<dependency>
<groupId>org.gcube.portlets.user</groupId>
<artifactId>gcube-widgets</artifactId>
@ -219,18 +236,33 @@
<scope>provided</scope>
</dependency>
<!-- <dependency> -->
<!-- <groupId>org.gcube.portlets.user</groupId> -->
<!-- <artifactId>home-library</artifactId> -->
<!-- <version>[4.3.0-SNAPSHOT, 5.0.0-SNAPSHOT)</version> -->
<!-- <scope>provided</scope> -->
<!-- </dependency> -->
<!-- <dependency> -->
<!-- <groupId>org.gcube.portlets.user</groupId> -->
<!-- <artifactId>home-library-jcr</artifactId> -->
<!-- <version>[1.3.0-SNAPSHOT, 2.0.0-SNAPSHOT)</version> -->
<!-- <scope>provided</scope> -->
<!-- </dependency> -->
<!-- HOME LIBRARY -->
<dependency>
<groupId>org.gcube.portlets.user</groupId>
<artifactId>home-library</artifactId>
<version>[4.3.0-SNAPSHOT, 5.0.0-SNAPSHOT)</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.gcube.portlets.user</groupId>
<groupId>org.gcube.common</groupId>
<artifactId>home-library-jcr</artifactId>
<version>[1.3.0-SNAPSHOT, 2.0.0-SNAPSHOT)</version>
<version>[1.0.0-SNAPSHOT, 2.0.0-SNAPSHOT)</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.gcube.common</groupId>
<artifactId>home-library</artifactId>
<version>[1.0.0-SNAPSHOT, 2.0.0-SNAPSHOT)</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.gcube.dvos</groupId>
<artifactId>usermanagement-core</artifactId>

View File

@ -26,14 +26,15 @@ import net.sf.csv4j.CSVWriter;
import org.apache.log4j.Level;
import org.apache.log4j.Logger;
import org.gcube.application.framework.core.session.ASLSession;
import org.gcube.common.homelibrary.home.HomeLibrary;
import org.gcube.common.homelibrary.home.workspace.Workspace;
import org.gcube.common.homelibrary.home.workspace.WorkspaceFolder;
import org.gcube.common.homelibrary.util.WorkspaceUtil;
import org.gcube.common.scope.api.ScopeProvider;
import org.gcube.data.spd.model.products.OccurrencePoint;
import org.gcube.data.spd.model.products.ResultElement;
import org.gcube.data.spd.model.products.TaxonomyItem;
import org.gcube.data.spd.stubs.types.Status;
import org.gcube.portlets.user.homelibrary.home.HomeLibrary;
import org.gcube.portlets.user.homelibrary.home.workspace.Workspace;
import org.gcube.portlets.user.homelibrary.home.workspace.WorkspaceFolder;
import org.gcube.portlets.user.homelibrary.util.WorkspaceUtil;
import org.gcube.portlets.user.speciesdiscovery.client.ConstantsSpeciesDiscovery;
import org.gcube.portlets.user.speciesdiscovery.client.model.ClassificationModel;
import org.gcube.portlets.user.speciesdiscovery.client.rpc.TaxonomySearchService;
@ -986,107 +987,12 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T
public void saveSelectedOccurrencePoints(String destinationFolderId, String fileName, SaveFileFormat fileFormat, OccurrencesSaveEnum typeCSV) throws SearchServiceException {
logger.trace("saveSelectedOccurrencePoints destinationFolderId: "+destinationFolderId+" fileName: "+fileName+" fileFormat: "+fileFormat+" typeCSV: "+typeCSV);
/* List<String> listId = getSelectedOccurrenceIds();
printId(listId);
//RETURN IF SIZE OF LIST OCCURRENCE ID IS 0
if(listId.size()==0){
logger.error("There are not occurrences point to save");
throw new SearchServiceException("There are not occurrences point to save");
}
File file = null;
String mimeType = null;
switch (fileFormat) {
case CSV: {
mimeType = "text/csv";
try {
file = generateCSVFile(listId, typeCSV);
} catch (Exception e) {
logger.error("An error occurred generating the csv file", e);
throw new SearchServiceException(e.getMessage());
}
} break;
case DARWIN_CORE:{
mimeType = "application/xhtml+xml";
try {
SpeciesService taxonomyService = getSpeciesService();
file = taxonomyService.getOccurrencesAsDarwinCoreByIds(listId);
} catch (Exception e) {
logger.error("An error occurred retrieving the darwin core file", e);
throw new SearchServiceException(e.getMessage());
}
} break;
}
try {
logger.trace("tmp file: "+file.getAbsolutePath());
Workspace workspace = HomeLibrary.getUserWorkspace(getASLSession());
WorkspaceFolder folder = (WorkspaceFolder)workspace.getItem(destinationFolderId);
fileName = WorkspaceUtil.getUniqueName(fileName, folder);
folder.createExternalFileItem(fileName, "Occurrence points generated files", mimeType, new FileInputStream(file));
file.delete();
logger.trace("Save complete");
} catch (Exception e) {
logger.error("An error occurred saving the generated file into the workspace", e);
throw new SearchServiceException(e.getMessage());
}
*/
//TODO REMOVE - THIS IS OLD METHOD
}
@Override
public void saveSelectedTaxonomyPoints(String destinationFolderId, String fileName, SaveFileFormat fileFormat) throws SearchServiceException {
/*
logger.trace("saveSelectedTaxonomyPoints destinationFolderId: "+destinationFolderId+" fileName: "+fileName+" fileFormat: "+fileFormat);
List<String> listId = getSelectedTaxonomyId();
printId(listId);
File file = null;
String mimeType = null;
switch (fileFormat) {
case DARWIN_CORE_ARCHIVE:{
mimeType = "";
try {
SpeciesService taxonomyService = getSpeciesService();
file = taxonomyService.getOccurrencesAsDarwinCoreArchive(listId);
} catch (Exception e) {
logger.error("An error occurred retrieving the darwin core file", e);
throw new SearchServiceException(e.getMessage());
}
} break;
}
try {
logger.trace("tmp file: "+file.getAbsolutePath());
Workspace workspace = HomeLibrary.getUserWorkspace(getASLSession());
WorkspaceFolder folder = (WorkspaceFolder)workspace.getItem(destinationFolderId);
fileName = WorkspaceUtil.getUniqueName(fileName, folder);
folder.createExternalFileItem(fileName, "Occurrence points generated files", mimeType, new FileInputStream(file));
file.delete();
logger.trace("Save complete");
} catch (Exception e) {
logger.error("An error occurred saving the generated file into the workspace", e);
throw new SearchServiceException(e.getMessage());
}
*/
//TODO OLD CALL
}
@SuppressWarnings("unchecked")
@ -1636,7 +1542,7 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T
InputStream inputStream = taxonomyService.getTaxonomyJobFileById(jobIdentifier);
if(inputStream!=null){
Workspace workspace = HomeLibrary.getUserWorkspace(getASLSession());
Workspace workspace = org.gcube.portlets.user.speciesdiscovery.server.util.GetWorkspaceUtil.getWorskspace(getASLSession());
logger.trace("input stream is not null");
// System.out.println("input stream is not null");
@ -1676,7 +1582,7 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T
InputStream inputStream = taxonomyService.getTaxonomyJobErrorFileById(jobIdentifier);
if(inputStream!=null){
Workspace workspace = HomeLibrary.getUserWorkspace(getASLSession());
Workspace workspace = org.gcube.portlets.user.speciesdiscovery.server.util.GetWorkspaceUtil.getWorskspace(getASLSession());
logger.trace("input stream is not null");
// System.out.println("input stream is not null");
@ -2005,7 +1911,7 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T
InputStream inputStream = taxonomyService.getOccurrenceJobFileById(jobModel.getJobIdentifier());
if(inputStream!=null){
Workspace workspace = HomeLibrary.getUserWorkspace(getASLSession());
Workspace workspace = org.gcube.portlets.user.speciesdiscovery.server.util.GetWorkspaceUtil.getWorskspace(getASLSession());
logger.trace("input stream is not null");
// System.out.println("input stream is not null");
@ -2056,7 +1962,7 @@ public class TaxonomySearchServiceImpl extends RemoteServiceServlet implements T
InputStream inputStream = taxonomyService.getOccurrenceJobErrorFileById(jobModel.getJobIdentifier());
if(inputStream!=null){
Workspace workspace = HomeLibrary.getUserWorkspace(getASLSession());
Workspace workspace = org.gcube.portlets.user.speciesdiscovery.server.util.GetWorkspaceUtil.getWorskspace(getASLSession());
logger.trace("input stream is not null");
// System.out.println("input stream is not null");

View File

@ -9,9 +9,6 @@ import javax.servlet.http.HttpSession;
import org.apache.log4j.Logger;
import org.gcube.application.framework.core.session.ASLSession;
import org.gcube.application.framework.core.session.SessionManager;
import org.gcube.common.core.scope.GCUBEScope;
import org.gcube.common.scope.api.ScopeProvider;
import org.gcube.portal.custom.scopemanager.scopehelper.ScopeHelper;
import org.gcube.portlets.user.speciesdiscovery.server.persistence.dao.OccurrenceJobPersistence;
import org.gcube.portlets.user.speciesdiscovery.server.persistence.dao.OccurrenceRowPersistence;
import org.gcube.portlets.user.speciesdiscovery.server.persistence.dao.ResultRowPersistence;

View File

@ -9,7 +9,7 @@ import javax.persistence.criteria.CriteriaBuilder;
import javax.persistence.criteria.CriteriaQuery;
import javax.persistence.criteria.Root;
import org.gcube.common.core.utils.logging.GCUBELog;
import org.apache.log4j.Logger;
import org.gcube.portlets.user.speciesdiscovery.shared.DatabaseServiceException;
public abstract class AbstractPersistence<T> {
@ -18,7 +18,7 @@ public abstract class AbstractPersistence<T> {
public final String AND = "AND";
protected GCUBELog logger = new GCUBELog(AbstractPersistence.class);
protected Logger logger = Logger.getLogger(AbstractPersistence.class);
AbstractPersistence(EntityManagerFactory factory){
this.entityManagerFactory = factory;

View File

@ -12,7 +12,7 @@ import javax.persistence.criteria.CriteriaBuilder;
import javax.persistence.criteria.CriteriaQuery;
import javax.persistence.criteria.Root;
import org.gcube.common.core.utils.logging.GCUBELog;
import org.apache.log4j.Logger;
import org.gcube.portlets.user.speciesdiscovery.shared.DatabaseServiceException;
import org.gcube.portlets.user.speciesdiscovery.shared.Occurrence;
@ -21,7 +21,7 @@ public class OccurrenceRowPersistence extends AbstractPersistence<Occurrence>{
protected CriteriaBuilder criteriaBuilder;
protected CriteriaQuery<Object> criteriaQuery;
protected Root<Occurrence> rootFrom;
protected GCUBELog logger = new GCUBELog(OccurrenceRowPersistence.class);
protected Logger logger = Logger.getLogger(OccurrenceRowPersistence.class);
public OccurrenceRowPersistence(EntityManagerFactory factory) throws DatabaseServiceException{
super(factory);

View File

@ -12,7 +12,7 @@ import javax.persistence.criteria.CriteriaBuilder;
import javax.persistence.criteria.CriteriaQuery;
import javax.persistence.criteria.Root;
import org.gcube.common.core.utils.logging.GCUBELog;
import org.apache.log4j.Logger;
import org.gcube.portlets.user.speciesdiscovery.shared.DatabaseServiceException;
import org.gcube.portlets.user.speciesdiscovery.shared.ResultRow;
@ -22,7 +22,7 @@ public class ResultRowPersistence extends AbstractPersistence<ResultRow>{
protected CriteriaBuilder criteriaBuilder;
protected CriteriaQuery<Object> criteriaQuery;
protected Root<ResultRow> rootFrom;
protected GCUBELog logger = new GCUBELog(ResultRowPersistence.class);
protected Logger logger = Logger.getLogger(ResultRowPersistence.class);
public ResultRowPersistence(EntityManagerFactory factory) throws DatabaseServiceException{
super(factory);

View File

@ -6,8 +6,8 @@ package org.gcube.portlets.user.speciesdiscovery.server.service;
import java.util.ArrayList;
import java.util.List;
import org.apache.log4j.Logger;
import org.gcube.application.framework.core.session.ASLSession;
import org.gcube.common.core.utils.logging.GCUBEClientLog;
import org.gcube.data.spd.model.products.DataProvider;
import org.gcube.data.spd.model.products.DataSet;
import org.gcube.data.spd.model.products.Product;
@ -30,7 +30,7 @@ import org.gcube.portlets.user.speciesdiscovery.shared.util.NormalizeString;
*/
public class ResultItemConverter implements Converter<ResultItem, ResultRow> {
protected GCUBEClientLog logger = new GCUBEClientLog(ResultItemConverter.class);
protected Logger logger = Logger.getLogger(ResultItemConverter.class);
protected int id = 0;
protected ASLSession session;

View File

@ -0,0 +1,42 @@
/**
*
*/
package org.gcube.portlets.user.speciesdiscovery.server.util;
import org.apache.log4j.Logger;
import org.gcube.application.framework.core.session.ASLSession;
import org.gcube.common.homelibrary.home.HomeLibrary;
import org.gcube.common.homelibrary.home.workspace.Workspace;
import org.gcube.common.scope.api.ScopeProvider;
/**
* @author Francesco Mangiacrapa francesco.mangiacrapa@isti.cnr.it
* @Oct 7, 2013
*
*/
public class GetWorkspaceUtil {
protected static Logger logger = Logger.getLogger(GetWorkspaceUtil.class);
public static Workspace getWorskspace(ASLSession session) throws Exception {
if(session==null)
throw new Exception("ASL session is null");
if(session.getScope()==null)
throw new Exception("Scope into ASL session is null");
String scope = session.getScope().toString();
logger.trace("Get workspace for scope "+scope);
ScopeProvider.instance.set(scope);
logger.trace("ScopeProvider instancied for scope "+scope);
logger.trace("retuning workspace for username "+session.getUsername());
return HomeLibrary.getUserWorkspace(session.getUsername());
}
}

View File

@ -8,73 +8,14 @@ import java.net.URL;
import java.net.URLConnection;
import java.net.URLStreamHandler;
import org.gcube.common.core.utils.logging.GCUBELog;
import org.apache.log4j.Logger;
import org.gcube.contentmanager.storageclient.model.protocol.smp.Handler;
import org.gcube.contentmanager.storageclient.model.protocol.smp.SMPURLConnection;
public class StorageUtil {
protected static GCUBELog logger = new GCUBELog(StorageUtil.class);
//Storage version 1.0
// public static InputStream getInputStreamByStorageClient(String url) throws Exception {
// logger.trace("url :" + url);
//
// String [] urlParam=url.split("\\?");
//// String param=urlParam[1];
// logger.info("String encrypted "+urlParam[1]);
// String param=new StringEncrypter("DES").decrypt(urlParam[1]);
// logger.info("String decrypted: "+param);
// String [] getParam=param.split("\\&");
// String serviceClass=null;
// String serviceName=null;
// String owner=null;
// String accessType=null;
// String scopeType=null;
// AccessType type = null;
// String server= null;
// String [] par1;
// for(String par : getParam){
// if(par.contains("ServiceClass")){
// par1=par.split("=");
// serviceClass=par1[1];
// }else if(par.contains("ServiceName")){
// par1=par.split("=");
// serviceName=par1[1];
// }else if(par.contains("owner")){
// par1=par.split("=");
// owner=par1[1];
// }else if(par.contains("scope")){
// par1=par.split("=");
// scopeType=par1[1];
// }else if(par.contains("server")){
// par1=par.split("=");
// server=par1[1];
// }else if(par.contains("AccessType")){
// par1=par.split("=");
// accessType=par1[1];
// if(accessType.equalsIgnoreCase("public")){
// type=AccessType.PUBLIC;
// }else if(accessType.equalsIgnoreCase("shared")){
// type=AccessType.SHARED;
// }
// }else{
// ;
// }
// }
// if((serviceName==null) || (serviceClass==null) || (owner == null) || (scopeType==null) || (type == null))
// throw new MalformedURLException();
// GCUBEScope scope=GCUBEScope.getScope(scopeType);
// String location=extractLocation(urlParam[0]);
//
// logger.trace("Storage instance with parameters "+serviceClass+" "+serviceName+" "+owner+" "+type+" "+scope+ " location: "+urlParam[0]);
// IClient client=new StorageClient(serviceClass, serviceName, owner, type, scope, server).getClient();
// InputStream is=null;
// is=client.get().RFileAStream(location);
// createFileTest(is);
// }
protected Logger logger = Logger.getLogger(StorageUtil.class);
public static InputStream getInputStreamByStorageClient(String url) throws Exception {
Handler.activateProtocol();
@ -101,33 +42,9 @@ public class StorageUtil {
}
// public static InputStream getInputStreamByStorageClient(String url) throws Exception {
//
// Handler.activateProtocol();
// URL smsHome = null;
// try {
// smsHome = new URL(url);
// } catch (MalformedURLException e1) {
// e1.printStackTrace();
// }
// URLConnection uc = null;
// uc = ( URLConnection ) smsHome.openConnection();
// InputStream is=uc.getInputStream();
// return is;
// }
// public static String extractLocation(String url) {
// String [] loc=url.split("//");
// logger.trace("url extracted: "+loc[1]);
// return loc[1];
// }
public static void main(String[] args) throws Exception {
InputStream is = getInputStreamByStorageClient("smp:/51e1065ee4b0a159b8c25cc8?5ezvFfBOLqb2cBxvyAbVnOhbxBCSqhv+Z4BC5NS/+OwS5RYBeaUL5FS9eDyNubiTI4vSpggUgPA+jm9rQxwbisfhkOW/m6l2IYG9BKb8AEJFLgVvG3FJTk0+4xV9iM/hNQvChZjoJZna0aPXkHN4Eg==");
}
}

View File

@ -13,13 +13,8 @@ import javax.persistence.TypedQuery;
import org.gcube.application.framework.core.session.ASLSession;
import org.gcube.application.framework.core.session.SessionManager;
import org.gcube.common.core.scope.GCUBEScope;
import org.gcube.data.spd.model.products.ResultElement;
import org.gcube.data.spd.model.products.TaxonomyItem;
import org.gcube.data.streams.Stream;
import org.gcube.common.scope.impl.ScopeBean;
import org.gcube.portlets.user.speciesdiscovery.server.persistence.DaoSession;
import org.gcube.portlets.user.speciesdiscovery.server.service.SpeciesService;
import org.gcube.portlets.user.speciesdiscovery.server.service.TaxonomyItemConverter;
import org.gcube.portlets.user.speciesdiscovery.shared.ResultRow;
import org.gcube.portlets.user.speciesdiscovery.shared.TaxonomyRow;
@ -32,7 +27,7 @@ public class H2TestResultItemRetrievingAndConverting {
private static final int MAX_ELEMENTS = 20;
static String sessionID = "1";
static String user = "test.user";
static GCUBEScope scope;
static ScopeBean scope;
static ASLSession session;
@ -40,7 +35,7 @@ public class H2TestResultItemRetrievingAndConverting {
{
session = SessionManager.getInstance().getASLSession(sessionID, user);
scope = GCUBEScope.getScope("/gcube/devsec");
scope = new ScopeBean("/gcube/devsec");
session.setScope(scope.toString());
EntityManagerFactory factory = DaoSession.getEntityManagerFactory(session);

View File

@ -3,12 +3,8 @@
*/
package org.gcube.portlets.user.speciesdiscovery.client;
import static org.gcube.data.spd.client.plugins.AbstractPlugin.classification;
import static org.gcube.data.spd.client.plugins.AbstractPlugin.executor;
import static org.gcube.data.spd.client.plugins.AbstractPlugin.manager;
import static org.gcube.data.spd.client.plugins.AbstractPlugin.occurrence;
import java.net.URI;
import java.util.List;
import java.util.concurrent.TimeUnit;