Removed obsolete implementation

This commit is contained in:
Fabio Sinibaldi 2020-10-07 15:47:37 +02:00
parent ceecc4401b
commit 3fc7632421
11 changed files with 150 additions and 703 deletions

View File

@ -1,18 +0,0 @@
package org.gcube.application.geoportal;
import java.util.Collection;
import org.gcube.application.geoportal.model.ArchiveDescriptor;
public interface ArchiveManager {
public ArchiveDescriptor getById(long id);
public void store(ArchiveDescriptor desc);
public void delete(long id,boolean deep);
public void delete(ArchiveDescriptor archive,boolean deep);
public Collection<ArchiveDescriptor> getExisting();
public ArchiveDescriptor getByLayerId(long layerId);
}

View File

@ -1,98 +0,0 @@
package org.gcube.application.geoportal;
import java.util.Collection;
import javax.persistence.EntityManager;
import javax.persistence.EntityManagerFactory;
import javax.persistence.Persistence;
import javax.persistence.Query;
import org.gcube.application.geoportal.model.ArchiveDescriptor;
import org.gcube.application.geoportal.model.db.DBConstants;
import org.gcube.application.geoportal.model.gis.LayerDescriptor;
public class ArchiveManagerImpl implements ArchiveManager {
private EntityManagerFactory emf=null;
public ArchiveManagerImpl() {
emf=Persistence.createEntityManagerFactory(DBConstants.INTERNAL.DB_NAME);
}
@Override
public ArchiveDescriptor getById(long id) {
EntityManager em=emf.createEntityManager();
try {
return em.find(ArchiveDescriptor.class, id);
}finally {
em.close();
}
}
@Override
public void store(ArchiveDescriptor desc) {
EntityManager em=emf.createEntityManager();
try {
em.getTransaction().begin();
em.persist(desc);
em.getTransaction().commit();
}finally {
em.close();
}
}
private void deleteEntry(ArchiveDescriptor toDelete) {
EntityManager em=emf.createEntityManager();
try {
em.getTransaction().begin();
em.remove(em.merge(toDelete));
em.getTransaction().commit();
}finally {
em.close();
}
}
@Override
public void delete(ArchiveDescriptor desc, boolean deep) {
if(deep) {
//for each layer
// delete Meta
// delete fte
// delete postgis table
// delete from WS
// delete entry
}
deleteEntry(desc);
}
@Override
public void delete(long id,boolean deep) {
delete(getById(id),deep);
}
@Override
public Collection<ArchiveDescriptor> getExisting() {
// EntityManager em=emf.createEntityManager();
// try{
// Query query = em.createNativeQuery("SELECT * FROM "+DBConstants.INTERNAL.Archive.TABLE, ArchiveDescriptor.class);
// return query.getResultList();
// }finally {
// em.close();
// }
return null;
}
@Override
public ArchiveDescriptor getByLayerId(long layerId) {
EntityManager em=emf.createEntityManager();
try{
return null;
// return em.find(LayerDescriptor.class, layerId).getArchive();
}finally {
em.close();
}
}
}

View File

@ -157,7 +157,8 @@ public class ContentHandler {
}
//WS
//************* WS
private StorageHubClient sgClient;
private FolderContainer wsBase;
private FolderContainer appBase;

View File

@ -1,32 +0,0 @@
package org.gcube.application.geoportal;
import javax.persistence.EntityManager;
import javax.persistence.EntityManagerFactory;
import javax.persistence.Persistence;
import org.gcube.application.geoportal.model.db.DBConstants;
import org.gcube.application.geoportal.model.gis.LayerDescriptor;
public class LayerService {
private EntityManagerFactory emf=null;
public LayerService() {
emf=Persistence.createEntityManagerFactory(DBConstants.INTERNAL.DB_NAME);
}
public void registerLayer(LayerDescriptor desc) {
EntityManager em=emf.createEntityManager();
try {
em.getTransaction().begin();
em.persist(desc);
em.getTransaction().commit();
}finally {
em.close();
}
}
}

View File

@ -14,18 +14,11 @@ import java.util.Map;
import org.apache.commons.csv.CSVFormat;
import org.apache.commons.csv.CSVParser;
import org.apache.commons.csv.CSVRecord;
import org.gcube.application.geoportal.model.ArchiveDescriptor;
import org.gcube.application.geoportal.model.ArchiveDescriptor.ArchiveType;
import org.gcube.application.geoportal.model.db.DBConstants;
import org.gcube.application.geoportal.model.db.DBInteractionException;
import org.gcube.application.geoportal.model.db.PostgisTable;
import org.gcube.application.geoportal.model.fault.DataParsingException;
import org.gcube.application.geoportal.model.fault.GeoPackageInteractionException;
import org.gcube.application.geoportal.model.fault.InvalidRecordException;
import org.gcube.application.geoportal.model.gis.LayerDescriptor;
import org.gcube.application.geoportal.model.Configuration;
import org.gcube.application.geoportal.model.db.DBConstants;
import org.gcube.application.geoportal.model.db.PostgisTable;
import org.gcube.application.geoportal.model.fault.InvalidRecordException;
import org.gcube.application.geoportal.utils.GpkgUtils;
import org.gcube.application.geoportal.utils.Layers;
import org.gcube.spatial.data.geonetwork.GeoNetworkPublisher;
import org.gcube.spatial.data.gis.GISInterface;
@ -40,33 +33,33 @@ public class Uploader {
// private DatabaseConnection postgisConnection;
private GISInterface gis;
private GeoNetworkPublisher geonetwork;
private ArchiveManager archiveManager;
// private ArchiveManager archiveManager;
private Configuration config;
public Uploader(
GISInterface gis,
GeoNetworkPublisher geonetwork,
ArchiveManager manager,Configuration config) throws SQLException, IOException {
super();
// this.postgisConnection = postgisConnection;
this.gis = gis;
this.geonetwork = geonetwork;
this.archiveManager=manager;
this.config=config;
// public Uploader(
// GISInterface gis,
// GeoNetworkPublisher geonetwork,
// ArchiveManager manager,Configuration config) throws SQLException, IOException {
// super();
// // this.postgisConnection = postgisConnection;
// this.gis = gis;
// this.geonetwork = geonetwork;
// this.archiveManager=manager;
// this.config=config;
//
//
//
// }
}
public ArchiveDescriptor readArchive(File theArchive) {
// Unpack
// check file presence
// read xls
return null;
}
// public ArchiveDescriptor readArchive(File theArchive) {
// // Unpack
// // check file presence
// // read xls
// return null;
// }
// Expectcs CSV nome,anno,regione,xcentroid,ycentroid,csv,shp,nome_csv,poligono,punti,linee
// I.E {nome=Acquacadda Nuxis, anno=2019, regione=Sardegna, xcentroid=8.751, ycentroid=39.179, csv=TRUE, shp=TRUE, nome_csv=AcquacaddaNuxis_poligoni, poligono=TRUE, punti=FALSE, linee=FALSE}
@ -110,12 +103,12 @@ public class Uploader {
for(CSVRecord r:parser.getRecords()) {
String nomeConcessione=r.get("nome");
try {
ArchiveDescriptor toRegister=new ArchiveDescriptor(nomeConcessione,ArchiveType.CONCESSIONI);
// ArchiveDescriptor toRegister=new ArchiveDescriptor(nomeConcessione,ArchiveType.CONCESSIONI);
//Each CSV Record is a Concessione
log.info("Importing "+nomeConcessione+" ["+r.getRecordNumber()+"] ");
ArrayList<LayerDescriptor> layers=new ArrayList<LayerDescriptor>();
// ArrayList<LayerDescriptor> layers=new ArrayList<LayerDescriptor>();
//publish layer(s)
@ -135,7 +128,7 @@ public class Uploader {
String layerName=nomeConcessione+" "+csv;
log.debug("Creating layer "+layerName);
layers.add(Layers.publishPostgisTable(table, gis, geonetwork, layerName, config.getWorkspaceConcessioni(), config.getPostgisStore()));
// layers.add(Layers.publishPostgisTable(table, gis, geonetwork, layerName, config.getWorkspaceConcessioni(), config.getPostgisStore()));
}
}{
// TODO PUBLISH SHP LAYERS
@ -145,7 +138,7 @@ public class Uploader {
Map<String,String> centroidsRow=new HashMap<String, String>();
centroidsRow.put(DBConstants.Concessioni.UUID, toRegister.getId()+"");
// centroidsRow.put(DBConstants.Concessioni.UUID, toRegister.getId()+"");
centroidsRow.put(DBConstants.Concessioni.ANNO, r.get("anno"));
centroidsRow.put(DBConstants.Concessioni.NOME, r.get("nome"));
centroidsRow.put(DBConstants.Concessioni.REGIONE, r.get("regione"));
@ -176,14 +169,14 @@ public class Uploader {
//transfer to WS
//Fill archive details
toRegister.setVersion("1.0.0");
for(LayerDescriptor layer:layers)
// toRegister.setVersion("1.0.0");
// for(LayerDescriptor layer:layers)
// toRegister.addLayer(layer);
//store archive info into app DB
archiveManager.store(toRegister);
}catch(InvalidRecordException e) {
log.warn("Skipping record : ",e);
// archiveManager.store(toRegister);
// }catch(InvalidRecordException e) {
// log.warn("Skipping record : ",e);
}catch(SQLException e) {
throw new SQLException("Unable to insert record ",e);
}catch(Throwable t) {
@ -311,21 +304,21 @@ public class Uploader {
public LayerDescriptor publishGpkgFeature(String gpkgFile, String tableName) throws Exception {
GeoPackage gpkg=GpkgUtils.open(new File(gpkgFile));
PostgisDBManagerI db=PostgisDBManager.get();
PostgisTableFactory tableFactory=new PostgisTableFactory(db);
PostgisTable table=tableFactory.fromGPKGFeatureTable(gpkg, tableName);
log.debug("Created table "+table);
db.commit();
LayerDescriptor desc=Layers.publishPostgisTable(table, gis, geonetwork, tableName, config.getWorkspacePreventiva(), config.getPostgisStore());
log.debug("Created layer "+desc);
return desc;
}
// public LayerDescriptor publishGpkgFeature(String gpkgFile, String tableName) throws Exception {
// GeoPackage gpkg=GpkgUtils.open(new File(gpkgFile));
//
//
// PostgisDBManagerI db=PostgisDBManager.get();
//
// PostgisTableFactory tableFactory=new PostgisTableFactory(db);
//
// PostgisTable table=tableFactory.fromGPKGFeatureTable(gpkg, tableName);
// log.debug("Created table "+table);
// db.commit();
// LayerDescriptor desc=Layers.publishPostgisTable(table, gis, geonetwork, tableName, config.getWorkspacePreventiva(), config.getPostgisStore());
// log.debug("Created layer "+desc);
// return desc;
// }
public void cleanUp() {
// list existing archives

View File

@ -1,80 +0,0 @@
package org.gcube.application.geoportal.model;
import java.time.Instant;
import java.util.List;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import org.gcube.application.geoportal.model.content.AssociatedContent;
import org.gcube.application.geoportal.model.gis.LayerDescriptor;
import lombok.Getter;
import lombok.NoArgsConstructor;
import lombok.NonNull;
import lombok.RequiredArgsConstructor;
import lombok.Setter;
import lombok.ToString;
@Getter
@Setter
@RequiredArgsConstructor
@NoArgsConstructor
//@Table(name = DBConstants.INTERNAL.Archive.TABLE)
@ToString
public class ArchiveDescriptor {
public static enum ArchiveType{
PREVENTIVA,CONCESSIONI
}
@Id @GeneratedValue(strategy=GenerationType.IDENTITY)
private long id;
@NonNull
private String name;
@NonNull
private ArchiveType type;
private String version;
private Instant registrationTime;
// content
private String archiveLocation;
// @OneToMany(cascade=CascadeType.ALL, mappedBy="archive", fetch=FetchType.EAGER)
private List<AssociatedContent> attachments;
// meta
private String metadataID;
// @OneToMany(mappedBy="archive", cascade=CascadeType.ALL, fetch=FetchType.EAGER)
private List<LayerDescriptor> layers;
public void addContent(AssociatedContent content) {
}
// public void addLayer(LayerDescriptor layer) {
// if(layers==null) layers=new ArrayList<LayerDescriptor>();
//
// if(!getLayers().contains(layer)) {
// getLayers().add(layer);
// if(layer.getArchive()!=null) {
// layer.getArchive().getLayers().remove(layer);
// }
// }
// layer.setArchive(this);
// }
}

View File

@ -1,35 +0,0 @@
package org.gcube.application.geoportal.model.gis;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.JoinColumn;
import javax.persistence.ManyToOne;
import org.gcube.application.geoportal.model.ArchiveDescriptor;
import lombok.Getter;
import lombok.NoArgsConstructor;
import lombok.NonNull;
import lombok.RequiredArgsConstructor;
import lombok.Setter;
@Getter
@Setter
@RequiredArgsConstructor
@NoArgsConstructor
public class LayerDescriptor {
@NonNull
private String metaUUID;
@NonNull
private Long metaID;
@NonNull
private String layerName;
@NonNull
private String geoserverWorkspace;
@NonNull
private String tableName;
}

View File

@ -1,30 +1,5 @@
package org.gcube.application.geoportal.utils;
import java.io.File;
import java.io.IOException;
import javax.xml.bind.JAXBException;
import org.gcube.application.geoportal.PostgisDBManagerI;
import org.gcube.application.geoportal.model.MOSI;
import org.gcube.application.geoportal.model.MOSI.Extent_Type;
import org.gcube.application.geoportal.model.db.PostgisTable;
import org.gcube.application.geoportal.model.fault.PublishException;
import org.gcube.application.geoportal.model.gis.BBOX;
import org.gcube.application.geoportal.model.gis.LayerDescriptor;
import org.gcube.spatial.data.geonetwork.GeoNetworkPublisher;
import org.gcube.spatial.data.geonetwork.LoginLevel;
import org.gcube.spatial.data.geonetwork.iso.tpl.ISOMetadataByTemplate;
import org.gcube.spatial.data.gis.GISInterface;
import org.gcube.spatial.data.gis.is.GeoServerDescriptor;
import org.gcube.spatial.data.gis.model.report.PublishResponse;
import org.gcube.spatial.data.gis.model.report.Report.OperationState;
import org.geotoolkit.xml.XML;
import org.opengis.metadata.Metadata;
import freemarker.template.TemplateException;
import it.geosolutions.geoserver.rest.encoder.GSLayerEncoder;
import it.geosolutions.geoserver.rest.encoder.feature.GSFeatureTypeEncoder;
import lombok.extern.slf4j.Slf4j;
@ -32,101 +7,101 @@ import lombok.extern.slf4j.Slf4j;
public class Layers {
static {
// Register custom metadata template
try {
ISOMetadataByTemplate.registerTemplateFolder(Files.getFileFromResources("iso_templates/macros.ftlx"));
}catch(Throwable t) {
log.error("UNABLE TO REGISTER TEMPLATES",t);
}
}
public static void deleteLayer(LayerDescriptor desc, GISInterface gis, GeoServerDescriptor geoserver,LoginLevel loginLevel, PostgisDBManagerI db) throws Exception {
// delete fte
gis.deleteLayer(desc.getGeoserverWorkspace(), desc.getLayerName(), desc.getMetaID(), geoserver, loginLevel);
// delete postgis table
db.deleteTable(desc.getTableName());
db.commit();
//TODO
// delete from WS
}
public static LayerDescriptor publishPostgisTable(PostgisTable table,GISInterface gis, GeoNetworkPublisher geonetwork, String layerTitle, String workspace, String storeName) throws Exception {
String DEFAULT_CRS="GEOGCS[\"WGS 84\", \n" +
" DATUM[\"World Geodetic System 1984\", \n" +
" SPHEROID[\"WGS 84\", 6378137.0, 298.257223563, AUTHORITY[\"EPSG\",\"7030\"]], \n" +
" AUTHORITY[\"EPSG\",\"6326\"]], \n" +
" PRIMEM[\"Greenwich\", 0.0, AUTHORITY[\"EPSG\",\"8901\"]], \n" +
" UNIT[\"degree\", 0.017453292519943295], \n" +
" AXIS[\"Geodetic longitude\", EAST], \n" +
" AXIS[\"Geodetic latitude\", NORTH], \n" +
" AUTHORITY[\"EPSG\",\"4326\"]]";
GSFeatureTypeEncoder fte=new GSFeatureTypeEncoder();
fte.setEnabled(true);
// fte.setLatLonBoundingBox(-180.0, -90.0, 180.0, 90.0, DEFAULT_CRS);
BBOX bbox=table.getBoundingBox();
fte.setLatLonBoundingBox(bbox.getMinLong(), bbox.getMinLat(), bbox.getMaxLong(), bbox.getMaxLat(), DEFAULT_CRS);
fte.setName(table.getTablename());
fte.setTitle(layerTitle);
// fte.setNativeCRS(DEFAULT_CRS);
GSLayerEncoder le=new GSLayerEncoder();
String defaultStyle=null;
switch(table.getGeometryColumnType()) {
case LINE : {
defaultStyle = "line";
break;
}case POINT: {
defaultStyle="point";
break;
}case POLYGON: {
defaultStyle = "polygon";
break;
}
}
le.setDefaultStyle(defaultStyle);
le.setEnabled(true);
Metadata geonetworkMeta=null;//generateMeta(null);
//publishDBTable
// GISInterface gis=GISInterface.get(geoserver);
PublishResponse resp = gis.publishDBTable(workspace,
storeName,
fte,
le,
geonetworkMeta,
geonetwork.getCurrentUserConfiguration("dataset", "_none_"),
LoginLevel.DEFAULT,
false);
log.debug("REsult : "+resp);
if(resp.getDataOperationResult()!=OperationState.COMPLETE||resp.getMetaOperationResult()!=OperationState.COMPLETE) {
throw new PublishException("Outcome was not COMPLETE ",resp);
}
String metadataUUID=geonetwork.getInfo(resp.getReturnedMetaId()).getUuid();
return new LayerDescriptor(
metadataUUID,
resp.getReturnedMetaId(),
fte.getName(),
workspace,
table.getTablename());
}
// static {
// // Register custom metadata template
// try {
// ISOMetadataByTemplate.registerTemplateFolder(Files.getFileFromResources("iso_templates/macros.ftlx"));
// }catch(Throwable t) {
// log.error("UNABLE TO REGISTER TEMPLATES",t);
// }
// }
//
// public static void deleteLayer(LayerDescriptor desc, GISInterface gis, GeoServerDescriptor geoserver,LoginLevel loginLevel, PostgisDBManagerI db) throws Exception {
// // delete fte
// gis.deleteLayer(desc.getGeoserverWorkspace(), desc.getLayerName(), desc.getMetaID(), geoserver, loginLevel);
// // delete postgis table
// db.deleteTable(desc.getTableName());
//
// db.commit();
// //TODO
// // delete from WS
// }
//
//
//
// public static LayerDescriptor publishPostgisTable(PostgisTable table,GISInterface gis, GeoNetworkPublisher geonetwork, String layerTitle, String workspace, String storeName) throws Exception {
//
//
//
// String DEFAULT_CRS="GEOGCS[\"WGS 84\", \n" +
// " DATUM[\"World Geodetic System 1984\", \n" +
// " SPHEROID[\"WGS 84\", 6378137.0, 298.257223563, AUTHORITY[\"EPSG\",\"7030\"]], \n" +
// " AUTHORITY[\"EPSG\",\"6326\"]], \n" +
// " PRIMEM[\"Greenwich\", 0.0, AUTHORITY[\"EPSG\",\"8901\"]], \n" +
// " UNIT[\"degree\", 0.017453292519943295], \n" +
// " AXIS[\"Geodetic longitude\", EAST], \n" +
// " AXIS[\"Geodetic latitude\", NORTH], \n" +
// " AUTHORITY[\"EPSG\",\"4326\"]]";
//
//
// GSFeatureTypeEncoder fte=new GSFeatureTypeEncoder();
// fte.setEnabled(true);
// // fte.setLatLonBoundingBox(-180.0, -90.0, 180.0, 90.0, DEFAULT_CRS);
// BBOX bbox=table.getBoundingBox();
// fte.setLatLonBoundingBox(bbox.getMinLong(), bbox.getMinLat(), bbox.getMaxLong(), bbox.getMaxLat(), DEFAULT_CRS);
// fte.setName(table.getTablename());
// fte.setTitle(layerTitle);
// // fte.setNativeCRS(DEFAULT_CRS);
//
// GSLayerEncoder le=new GSLayerEncoder();
//
// String defaultStyle=null;
//
// switch(table.getGeometryColumnType()) {
// case LINE : {
// defaultStyle = "line";
// break;
// }case POINT: {
// defaultStyle="point";
// break;
// }case POLYGON: {
// defaultStyle = "polygon";
// break;
// }
//
// }
//
// le.setDefaultStyle(defaultStyle);
// le.setEnabled(true);
//
// Metadata geonetworkMeta=null;//generateMeta(null);
//
// //publishDBTable
// // GISInterface gis=GISInterface.get(geoserver);
//
// PublishResponse resp = gis.publishDBTable(workspace,
// storeName,
// fte,
// le,
// geonetworkMeta,
// geonetwork.getCurrentUserConfiguration("dataset", "_none_"),
// LoginLevel.DEFAULT,
// false);
//
// log.debug("REsult : "+resp);
//
// if(resp.getDataOperationResult()!=OperationState.COMPLETE||resp.getMetaOperationResult()!=OperationState.COMPLETE) {
// throw new PublishException("Outcome was not COMPLETE ",resp);
// }
// String metadataUUID=geonetwork.getInfo(resp.getReturnedMetaId()).getUuid();
//
// return new LayerDescriptor(
// metadataUUID,
// resp.getReturnedMetaId(),
// fte.getName(),
// workspace,
// table.getTablename());
// }
// private static Metadata generateMetaForTemplate(MOSI mosi, LayerDescriptor layerDescriptor,BBOX bbox) {

View File

@ -37,7 +37,7 @@ public class Geoportal {
System.out.println("CMD : "+cmd);
System.out.println("Options : "+options);
ArchiveManager archiveManager=new ArchiveManagerImpl();
// ArchiveManager archiveManager=new ArchiveManagerImpl();
// TokenSetter.set(MainUtils.getMandatory("CONTEXT", options));
@ -69,7 +69,8 @@ public class Geoportal {
;
// postgisConnection.setAutocommit(true);
Uploader uploader=new Uploader(gis,geonetwork,archiveManager,config);
Uploader uploader=null;
// new Uploader(gis,geonetwork,archiveManager,config);
Boolean centroidsOnly=Boolean.parseBoolean(MainUtils.getOptional("SHALLOW", options, "true"));
Boolean removeExistent=Boolean.parseBoolean(MainUtils.getOptional("REMOVE_EXISTENT", options, "true"));
@ -92,8 +93,8 @@ public class Geoportal {
case "GPKG_TABLE" :{
String tablename=MainUtils.getMandatory("TABLENAME",options);
uploader.publishGpkgFeature(MainUtils.getMandatory("GPKG",options),
tablename);
// uploader.publishGpkgFeature(MainUtils.getMandatory("GPKG",options),
// tablename);
break;
}

View File

@ -1,102 +0,0 @@
package org.gcube.application.geoportal.db;
import java.time.Instant;
import java.util.Collection;
import org.gcube.application.geoportal.ArchiveManager;
import org.gcube.application.geoportal.ArchiveManagerImpl;
import org.gcube.application.geoportal.model.ArchiveDescriptor;
import org.gcube.application.geoportal.model.ArchiveDescriptor.ArchiveType;
import org.gcube.application.geoportal.model.gis.LayerDescriptor;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Test;
import lombok.extern.slf4j.Slf4j;
@Slf4j
public class Archives {
// private static ArchiveManager manager;
//
// @BeforeClass
// public static void init() {
// manager=new ArchiveManagerImpl();
// }
//
//
//// public static EntityManagerFactory getEMF (){
//// if (emf == null){
//// emf = Persistence.createEntityManagerFactory(Constants.ARCHIVE_DB);
//// }
//// return emf;
//// }
//
//
// @Test
// public void listExistent() {
// ArchiveDescriptor toStore=new ArchiveDescriptor("My to be found archive ",ArchiveType.CONCESSIONI);
// toStore.setRegistrationTime(Instant.now());
// log.debug("Storing "+toStore);
// manager.store(toStore);
//
//
// Collection<ArchiveDescriptor> list=manager.getExisting();
//
// log.debug("Found "+list.size()+" Archives");
//
// for(ArchiveDescriptor archive: list ) {
// System.out.println(archive);
// }
//
// }
//
//
// @Test
// public void insertTest() {
//// em.joinTransaction();
// ArchiveDescriptor toStore=new ArchiveDescriptor("My just inserted archive ",ArchiveType.CONCESSIONI);
// toStore.setRegistrationTime(Instant.now());
//
// LayerDescriptor layer=new LayerDescriptor("gn-UUID",12305l,"someLayer","the_ws","tablename");
//
//
//// toStore.addLayer(layer);
// log.debug("Storing "+toStore);
// manager.store(toStore);
//
// log.debug("Stored with layer "+toStore);
//
//
// log.debug("Getting archive by id "+toStore.getId());
// ArchiveDescriptor found=manager.getById(toStore.getId());
// log.debug("Found "+found);
// log.debug("Found Layer "+found.getLayers().get(0));
//// log.debug("Found Layer -> Archive "+found.getLayers().get(0).getArchive());
// Assert.assertNotNull(found);
// Assert.assertEquals(toStore, found);
//// em.getTransaction().rollback();
// }
//
// @Test
// public void retrievalByLayerTest() {
// ArchiveDescriptor toStore=new ArchiveDescriptor("My just inserted archive ",ArchiveType.CONCESSIONI);
// toStore.setRegistrationTime(Instant.now());
//
// LayerDescriptor layer=new LayerDescriptor("gn-UUID",12305l,"someLayer","the_ws","tablename");
//
//// toStore.addLayer(layer);
//
// log.debug("Storing "+toStore);
// manager.store(toStore);
// log.debug("Stored "+toStore);
//// ArchiveDescriptor found=manager.getByLayerId(layer.getId());
//// Assert.assertNotNull(found);
// }
//
//
// @Test
// public void cleanup() {
// for(ArchiveDescriptor desc: manager.getExisting())
// manager.delete(desc, false);
// }
}

View File

@ -1,158 +0,0 @@
package org.gcube.application.geoportal.db;
import java.io.File;
import java.io.FileFilter;
import java.util.Map;
import org.gcube.application.geoportal.PostgisDBManager;
import org.gcube.application.geoportal.PostgisDBManagerI;
import org.gcube.application.geoportal.TokenSetter;
import org.gcube.application.geoportal.Uploader;
import org.gcube.application.geoportal.model.db.DatabaseConnection;
import org.gcube.application.geoportal.model.gis.LayerDescriptor;
import org.gcube.application.geoportal.utils.CSV;
import org.gcube.application.geoportal.utils.Files;
import org.gcube.application.geoportal.utils.Layers;
import org.gcube.application.geoportal.utils.MainUtils;
import org.gcube.application.geoportal.utils.Workspace;
import org.gcube.common.storagehub.client.dsl.FileContainer;
import org.gcube.common.storagehub.client.dsl.FolderContainer;
import org.gcube.common.storagehub.client.dsl.StorageHubClient;
import org.gcube.spatial.data.geonetwork.GeoNetwork;
import org.gcube.spatial.data.geonetwork.GeoNetworkPublisher;
import org.gcube.spatial.data.geonetwork.LoginLevel;
import org.gcube.spatial.data.gis.GISInterface;
import it.geosolutions.geoserver.rest.encoder.datastore.GSShapefileDatastoreEncoder;
public class LayerTest {
private static final String USAGE="<OPTIONS>"
+ "options : "
+ "--DBUSER"
+ "--DBPWD"
+ "--DBURL"
+ "--COMMIT"
+ "--CSV_PATH | --CSV | --SHP | --SHP_PATH";
public static void main(String[] args) throws Throwable {
if(args.length<1) {
System.out.println(USAGE);
System.exit(0);
}
Map<String,String> options=MainUtils.asMap(args);
System.out.println("Options : "+options);
// CHECK FILE(s) TO USE
boolean consider_shp=options.containsKey("SHP")||options.containsKey("SHP_PATH");
File[] toTest=null;
if(consider_shp) {
if(options.containsKey("SHP_PATH")) {
String path=MainUtils.getMandatory("SHP_PATH", options);
System.out.println("SCANNING "+path);
File csvFolder=new File(path);
toTest=csvFolder.listFiles(new FileFilter() {
@Override
public boolean accept(File pathname) {
if(pathname.isDirectory()) return false;
return pathname.getName().endsWith(".shp");
}
});
}else {
toTest=new File[] {new File(MainUtils.getMandatory("SHP", options))};
}
}else {
if(options.containsKey("CSV_PATH")) {
String path=MainUtils.getMandatory("CSV_PATH", options);
System.out.println("SCANNING "+path);
File csvFolder=new File(path);
toTest=csvFolder.listFiles(new FileFilter() {
@Override
public boolean accept(File pathname) {
if(pathname.isDirectory()) return false;
return pathname.getName().endsWith(".csv");
}
});
}else {
toTest=new File[] {new File(MainUtils.getMandatory("CSV", options))};
}
}
// SETUP EVERYTHING
System.out.println("PREPARING CONNECTIONS...");
DatabaseConnection postgisConnection= new DatabaseConnection(MainUtils.getMandatory("DBUSER",options),
MainUtils.getMandatory("DBPWD",options), MainUtils.getMandatory("DBURL",options));
postgisConnection.setAutocommit(false);
PostgisDBManager.init(postgisConnection);
PostgisDBManagerI db=PostgisDBManager.get();
TokenSetter.set("/d4science.research-infrastructures.eu/D4OS/GeoNA-Prototype");
GeoNetworkPublisher geonetwork= GeoNetwork.get();
geonetwork.login(LoginLevel.DEFAULT);
GISInterface gis=GISInterface.get();
// GET WS DETAILS
String destinationFolderId="ccb3f010-d16b-4b05-8d6f-580530019b0c";
StorageHubClient client=Workspace.getClient();
FolderContainer destinationFolder=Workspace.getFolderById(destinationFolderId, client);
String gs_workspace = "geona-proto";
for(File f:toTest) {
if(consider_shp) {
// ************ SHP
FileContainer wsFile=Workspace.storeByFolderId(destinationFolder, f, client);
GSShapefileDatastoreEncoder shapeEncoder=new GSShapefileDatastoreEncoder(Files.getName(f.getPath()),
wsFile.getPublicLink());
gis.createDataStore(gs_workspace, shapeEncoder);
}else {
// ************ CSV
String csvLayer=f.getAbsolutePath();
System.out.println("Publishing from file "+csvLayer);
// LayerDescriptor desc= Uploader.publishLayer(csvLayer, CSV.getTypeFromPath(csvLayer), db, gis,geonetwork);
//
// System.out.println("Created "+desc);
//
// Layers.deleteLayer(desc, gis, gis.getCurrentGeoServerDescriptor(), LoginLevel.DEFAULT, db);
// System.out.println("DELETED");
}
}
}
}