Fixed incoherence between layer and fileSet base name
This commit is contained in:
parent
0eb83ef6a8
commit
5ea5f1adfd
|
@ -107,7 +107,7 @@ public class StatelessClientTests extends BasicVreTests{
|
|||
|
||||
@Test
|
||||
public void testUploadFileSet() throws Exception {
|
||||
Concessione testObject= client.createNew(TestModel.prepareEmptyConcessione());
|
||||
Concessione testObject= client.createNew(TestModel.prepareConcessione());
|
||||
AddSectionToConcessioneRequest request=
|
||||
// FileSets.prepareRequest(new StorageUtils(),Paths.RELAZIONE,new File(TestModel.getBaseFolder(),"relazione.pdf"));
|
||||
FileSets.build(Paths.ABSTRACT_RELAZIONE).add(
|
||||
|
@ -130,43 +130,24 @@ public class StatelessClientTests extends BasicVreTests{
|
|||
|
||||
|
||||
@Test
|
||||
public Concessione testPublsh() throws Exception {
|
||||
Concessione c= client.createNew(TestModel.prepareConcessione(1,1));
|
||||
StorageUtils storage = new StorageUtils();
|
||||
|
||||
String mongoId=c.getMongo_id();
|
||||
|
||||
client.registerFileSet(mongoId,
|
||||
FileSets.prepareRequest(storage,Paths.RELAZIONE,new File(TestModel.getBaseFolder(),"relazione.pdf")));
|
||||
|
||||
client.registerFileSet(mongoId,
|
||||
FileSets.prepareRequest(storage,Paths.RELAZIONE,new File(TestModel.getBaseFolder(),"relazione.pdf")));
|
||||
|
||||
client.registerFileSet(mongoId,
|
||||
FileSets.prepareRequest(storage,Paths.imgByIndex(0),new File(TestModel.getBaseFolder(),"immagine.png")));
|
||||
|
||||
client.registerFileSet(mongoId,
|
||||
FileSets.prepareRequest(storage,Paths.POSIZIONAMENTO,new File(TestModel.getBaseFolder(),"pos.shp")));
|
||||
|
||||
client.registerFileSet(mongoId,
|
||||
FileSets.prepareRequest(storage,Paths.piantaByIndex(0),new File(TestModel.getBaseFolder(),"pianta.shp")));
|
||||
|
||||
|
||||
c=client.publish(mongoId);
|
||||
public void testPublsh() throws Exception {
|
||||
Concessione c=prepare();
|
||||
|
||||
assertTrue(c.getReport().getStatus().equals(ValidationStatus.PASSED));
|
||||
|
||||
return c;
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testCleanFileSet() throws Exception {
|
||||
|
||||
Concessione testObject=testPublsh();
|
||||
Concessione testObject=prepare();
|
||||
//Precheck to be sure
|
||||
assertFalse(testObject.getPosizionamentoScavo().getActualContent().isEmpty());
|
||||
assertFalse(testObject.getPianteFineScavo().get(0).getActualContent().isEmpty());
|
||||
|
||||
client.unPublish(testObject.getMongo_id());
|
||||
|
||||
//Clear pos
|
||||
testObject=client.cleanFileSet(testObject.getMongo_id(),Paths.POSIZIONAMENTO);
|
||||
assertTrue(testObject.getPosizionamentoScavo().getActualContent().isEmpty());
|
||||
|
@ -177,6 +158,37 @@ public class StatelessClientTests extends BasicVreTests{
|
|||
}
|
||||
|
||||
|
||||
private Concessione prepare() throws Exception {
|
||||
int numImgs=5;
|
||||
Concessione c= client.createNew(TestModel.prepareConcessione(1,numImgs));
|
||||
StorageUtils storage = new StorageUtils();
|
||||
|
||||
String mongoId=c.getMongo_id();
|
||||
|
||||
client.registerFileSet(mongoId,
|
||||
FileSets.prepareRequest(storage,Paths.RELAZIONE,new File(TestModel.getBaseFolder(),"relazione.pdf")));
|
||||
|
||||
client.registerFileSet(mongoId,
|
||||
FileSets.prepareRequest(storage,Paths.RELAZIONE,new File(TestModel.getBaseFolder(),"relazione.pdf")));
|
||||
|
||||
for(int i=0;i<numImgs;i++)
|
||||
client.registerFileSet(mongoId,
|
||||
FileSets.build(Paths.imgByIndex(i)).
|
||||
add(storage.putOntoStorage(new File(TestModel.getBaseFolder(),"immagine.png"),
|
||||
i+"San Mauro_drone totale.JPG")).getTheRequest());
|
||||
|
||||
|
||||
client.registerFileSet(mongoId,
|
||||
FileSets.prepareRequest(storage,Paths.POSIZIONAMENTO,new File(TestModel.getBaseFolder(),"pos.shp")));
|
||||
|
||||
client.registerFileSet(mongoId,
|
||||
FileSets.prepareRequest(storage,Paths.piantaByIndex(0),new File(TestModel.getBaseFolder(),"pianta.shp")));
|
||||
|
||||
|
||||
c=client.publish(mongoId);
|
||||
return c;
|
||||
}
|
||||
|
||||
// UTILS
|
||||
|
||||
public static long count(Iterator<?> iterator){
|
||||
|
|
|
@ -15,7 +15,7 @@ public class ContextUtils {
|
|||
public static String getCurrentScope(){
|
||||
try{
|
||||
String token=SecurityTokenProvider.instance.get();
|
||||
log.debug("Token is : "+token);
|
||||
log.debug("Token is : "+token.substring(0,2)+"..."+token.substring(token.length()-3));
|
||||
if(token==null) throw new Exception("Security Token is null");
|
||||
AuthorizationEntry entry = authorizationService().get(token);
|
||||
return entry.getContext();
|
||||
|
@ -29,7 +29,7 @@ public class ContextUtils {
|
|||
public static String getCurrentCaller(){
|
||||
try{
|
||||
String token=SecurityTokenProvider.instance.get();
|
||||
log.debug("Token is : "+token);
|
||||
log.debug("Token is : "+token.substring(0,2)+"..."+token.substring(token.length()-3));
|
||||
if(token==null) throw new Exception("Security Token is null");
|
||||
AuthorizationEntry entry = authorizationService().get(token);
|
||||
return entry.getClientInfo().getId();
|
||||
|
|
|
@ -1,13 +1,60 @@
|
|||
package org.gcube.application.geoportal.common.utils;
|
||||
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.net.URL;
|
||||
import java.nio.charset.Charset;
|
||||
import java.nio.file.Paths;
|
||||
import java.util.*;
|
||||
|
||||
@Slf4j
|
||||
public class Files {
|
||||
|
||||
|
||||
public static Map<String, List<File>> getAllShapeSet(File baseFolder,boolean recursive) throws IOException {
|
||||
return clusterizeFilesByExtension(baseFolder,".shp",recursive);
|
||||
}
|
||||
|
||||
/*
|
||||
Map shpAbsolutePath -> fileset
|
||||
|
||||
*/
|
||||
private static Map<String, List<File>> clusterizeFilesByExtension(File base,String extension,Boolean recursive) throws IOException {
|
||||
HashMap<String,List<File>> toReturn = new HashMap<>();
|
||||
log.debug("Clustering "+base.getAbsolutePath());
|
||||
List<File> targetFiles=new ArrayList<>();
|
||||
|
||||
// Identify shps
|
||||
if(base.isDirectory()){
|
||||
// Get all shps
|
||||
targetFiles.addAll(Arrays.asList(base.listFiles((dir, name)->{return name.endsWith(extension);})));
|
||||
// recursive
|
||||
if(recursive)
|
||||
for(File f : base.listFiles((dir,name)-> {return new File(dir,name).isDirectory();}))
|
||||
toReturn.putAll(clusterizeFilesByExtension(f,extension,recursive));
|
||||
}else {
|
||||
targetFiles.add(base);
|
||||
}
|
||||
|
||||
// Group files by shps
|
||||
targetFiles.forEach(f->{
|
||||
String basename=f.getName().substring(0,f.getName().lastIndexOf("."));
|
||||
toReturn.put(f.getAbsolutePath(),getSiblings(f.getParentFile(),basename));
|
||||
});
|
||||
|
||||
return toReturn;
|
||||
}
|
||||
|
||||
|
||||
private static List<File> getSiblings(File location,String baseName){
|
||||
List<File> fileset=new ArrayList<>();
|
||||
for (File shpSet : location.listFiles((dir, name) -> {return name.startsWith(baseName);}))
|
||||
fileset.add(shpSet);
|
||||
return fileset;
|
||||
}
|
||||
|
||||
public static File getFileFromResources(String fileName) {
|
||||
|
||||
ClassLoader classLoader =Files.class.getClassLoader();
|
||||
|
|
|
@ -40,7 +40,7 @@ public class StorageUtils {
|
|||
public TempFile putOntoStorage(File source, String filename) throws RemoteBackendException, FileNotFoundException{
|
||||
if(source.exists() && source.canRead())
|
||||
return putOntoStorage(new FileInputStream(source),filename);
|
||||
else throw new FileNotFoundException("Unable to read "+filename);
|
||||
else throw new FileNotFoundException("Unable to read "+source.getAbsolutePath()+" ("+filename+")");
|
||||
}
|
||||
|
||||
public TempFile[] putOntoStorage(File... source) throws RemoteBackendException, FileNotFoundException{
|
||||
|
|
|
@ -5,15 +5,20 @@ import org.gcube.application.geoportal.common.utils.Files;
|
|||
import org.junit.Test;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
@Slf4j
|
||||
public class FilesTests {
|
||||
|
||||
static File baseFolder=new File("../test-data/concessioni");
|
||||
|
||||
@Test
|
||||
public void testNames(){
|
||||
for(String name : new File("../test-data/concessioni").list()){
|
||||
for(String name : baseFolder.list()){
|
||||
if(name.contains(".")) {
|
||||
String originalExtension = name.substring(name.indexOf("."));
|
||||
String obtained = Files.fixFilename(name);
|
||||
|
@ -23,4 +28,16 @@ public class FilesTests {
|
|||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testClustering() throws IOException {
|
||||
Map<String, List<File>> map=Files.getAllShapeSet(baseFolder.getParentFile(),true);
|
||||
map.forEach((k, v)->{
|
||||
System.out.println(k);
|
||||
v.forEach(f->{System.out.println(f.getName());});
|
||||
});
|
||||
|
||||
assertTrue(map.get(new File(baseFolder,"pos.shp").getAbsolutePath()).size()==5);
|
||||
assertTrue(map.get(new File(baseFolder,"pianta.shp").getAbsolutePath()).size()==8);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -59,7 +59,7 @@ public class WorkspaceManager {
|
|||
public FolderContainer createFolder(FolderOptions opts) throws StorageHubException {
|
||||
if(opts.getParent()==null)
|
||||
opts.setParent(appBase);
|
||||
return createFolder(opts,sgClient);
|
||||
return createFolderRoutine(opts);
|
||||
}
|
||||
|
||||
public FileContainer getFileById(String id) throws StorageHubException {
|
||||
|
@ -88,13 +88,13 @@ public class WorkspaceManager {
|
|||
targetName=path.substring(path.lastIndexOf("/")+1);
|
||||
}
|
||||
log.debug("Creating "+targetName);
|
||||
return createFolder(new FolderOptions(targetName,"",targetParent),sgClient);
|
||||
return createFolder(new FolderOptions(targetName,"",targetParent));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
public WorkspaceContent storeToWS(FileOptions opts) throws FileNotFoundException, StorageHubException {
|
||||
FileContainer item=createFile(opts,sgClient);
|
||||
FileContainer item=createFileRoutine(opts);
|
||||
item=sgClient.open(item.getId()).asFile();
|
||||
|
||||
WorkspaceContent content=new WorkspaceContent();
|
||||
|
@ -126,13 +126,13 @@ public class WorkspaceManager {
|
|||
}
|
||||
|
||||
@Synchronized
|
||||
private static FolderContainer createFolder(FolderOptions opts, StorageHubClient sgClient) throws StorageHubException {
|
||||
private static FolderContainer createFolderRoutine(FolderOptions opts) throws StorageHubException {
|
||||
opts.setFolderName(Files.fixFilename(opts.getFolderName()));
|
||||
return opts.getParent().newFolder(opts.getFolderName(),opts.getFolderDescription());
|
||||
}
|
||||
|
||||
@Synchronized
|
||||
private static FileContainer createFile(FileOptions opts, StorageHubClient sgClient) throws StorageHubException {
|
||||
private static FileContainer createFileRoutine(FileOptions opts) throws StorageHubException {
|
||||
opts.setFileName(Files.fixFilename(opts.getFileName()));
|
||||
return opts.getParent().uploadFile(opts.getIs(), opts.getFileName(), opts.getFileDescription());
|
||||
}
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
package org.gcube.application.geoportal.service.engine;
|
||||
package org.gcube.application.geoportal.service.engine.materialization;
|
||||
|
||||
import it.geosolutions.geoserver.rest.GeoServerRESTPublisher;
|
||||
import it.geosolutions.geoserver.rest.GeoServerRESTPublisher.UploadMethod;
|
||||
|
@ -13,6 +13,7 @@ import lombok.extern.slf4j.Slf4j;
|
|||
import org.gcube.application.geoportal.common.model.legacy.*;
|
||||
import org.gcube.application.geoportal.common.model.rest.DatabaseConnection;
|
||||
import org.gcube.application.geoportal.common.utils.Files;
|
||||
import org.gcube.application.geoportal.service.engine.WorkspaceManager;
|
||||
import org.gcube.application.geoportal.service.model.internal.db.PostgisTable;
|
||||
import org.gcube.application.geoportal.service.model.internal.faults.SDIInteractionException;
|
||||
import org.gcube.common.storagehub.client.dsl.FileContainer;
|
||||
|
@ -28,6 +29,7 @@ import org.gcube.spatial.data.gis.is.AbstractGeoServerDescriptor;
|
|||
import java.io.File;
|
||||
import java.net.MalformedURLException;
|
||||
import java.net.URL;
|
||||
import java.util.function.Consumer;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
|
@ -101,75 +103,90 @@ public class SDIManager {
|
|||
|
||||
log.debug("Publishing "+currentElement+" files to geoserver @ "+geoserverHostName);
|
||||
|
||||
if(currentElement.getActualContent()==null||currentElement.getActualContent().isEmpty())
|
||||
throw new SDIInteractionException("Nothing to publish");
|
||||
|
||||
GeoServerContent content=new GeoServerContent();
|
||||
content.setGeoserverHostName(geoserverHostName);
|
||||
content.setWorkspace(workspace);
|
||||
WorkspaceManager wsManager=new WorkspaceManager();
|
||||
|
||||
|
||||
|
||||
|
||||
currentElement.getActualContent().forEach((PersistedContent c)->{
|
||||
try {
|
||||
if(c instanceof WorkspaceContent) {
|
||||
WorkspaceContent wc=(WorkspaceContent) c;
|
||||
FileContainer fc=wsManager.getFileById(wc.getStorageID());
|
||||
|
||||
String completeFilename=Files.fixFilename(fc.get().getName());
|
||||
|
||||
|
||||
String filename=
|
||||
completeFilename.contains(".")?
|
||||
completeFilename.substring(0, completeFilename.indexOf(".")):completeFilename;
|
||||
|
||||
|
||||
Destination destination=new Destination(completeFilename);
|
||||
destination.setCreateSubfolders(true);
|
||||
destination.setOnExistingFileName(DestinationClashPolicy.REWRITE);
|
||||
destination.setOnExistingSubFolder(DestinationClashPolicy.APPEND);
|
||||
|
||||
destination.setPersistenceId("geoserver");
|
||||
destination.setSubFolder("GNA/"+projectId+"/"+
|
||||
currentElement.getMongo_id()+"/"+filename);
|
||||
|
||||
log.debug("Sending "+wc+" to "+destination);
|
||||
TransferResult result=dtGeoServer.httpSource(fc.getPublicLink(), destination);
|
||||
log.debug("Transferred "+result);
|
||||
|
||||
|
||||
content.getFileNames().add(completeFilename);
|
||||
// ******** IDENTIFY LAYER NAME correct layer name
|
||||
// Must be unique under same WS
|
||||
// equal to shp base name
|
||||
|
||||
content.setGeoserverPath(result.getRemotePath().substring(0,result.getRemotePath().lastIndexOf("/")));
|
||||
String baseName= "";
|
||||
|
||||
// Chose layer name first identifying filename
|
||||
for(PersistedContent p:currentElement.getActualContent()){
|
||||
if(p instanceof WorkspaceContent) {
|
||||
WorkspaceContent w= (WorkspaceContent) p;
|
||||
if(w.getName().endsWith(".shp")) {
|
||||
log.debug("SHP is {}",w.getName());
|
||||
baseName=Files.fixFilename(w.getName().substring(0,w.getName().lastIndexOf('.')));
|
||||
break;
|
||||
}
|
||||
}
|
||||
}catch(Throwable t) {
|
||||
log.warn("Unable to transfer Persisted content"+c,t);
|
||||
}
|
||||
|
||||
});
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
if(content.getFileNames().isEmpty())
|
||||
throw new SDIInteractionException("No Persisted content found in "+currentElement);
|
||||
|
||||
String completeFileName=content.getFileNames().get(0);
|
||||
String filename=completeFileName.contains(".")?completeFileName.substring(0, completeFileName.lastIndexOf(".")):completeFileName;
|
||||
|
||||
String remoteFolder=content.getGeoserverPath();
|
||||
|
||||
String toSetLayerName=filename;
|
||||
}
|
||||
|
||||
String toSetLayerName=baseName;
|
||||
|
||||
//Check if layer already exists
|
||||
int count=0;
|
||||
GeoServerRESTReader gsReader=currentGeoserver.getReader();
|
||||
while(gsReader.getLayer(workspace,toSetLayerName)!=null){
|
||||
count++;
|
||||
toSetLayerName=filename+"_"+count;
|
||||
log.debug("layer for "+filename+" already existing, trying "+toSetLayerName);
|
||||
toSetLayerName=baseName+"_"+count;
|
||||
log.debug("layer for "+baseName+" already existing, trying "+toSetLayerName);
|
||||
}
|
||||
|
||||
|
||||
String folderRelativePath="GNA/" + projectId + "/" +
|
||||
currentElement.getMongo_id() + "/" + toSetLayerName;
|
||||
|
||||
|
||||
for (PersistedContent c : currentElement.getActualContent()) {
|
||||
if (c instanceof WorkspaceContent) {
|
||||
WorkspaceContent wc = (WorkspaceContent) c;
|
||||
FileContainer fc = wsManager.getFileById(wc.getStorageID());
|
||||
|
||||
String completeFilename = Files.fixFilename(fc.get().getName());
|
||||
|
||||
completeFilename=completeFilename.replaceAll(baseName, toSetLayerName);
|
||||
|
||||
|
||||
Destination destination = new Destination(completeFilename);
|
||||
destination.setCreateSubfolders(true);
|
||||
destination.setOnExistingFileName(DestinationClashPolicy.REWRITE);
|
||||
destination.setOnExistingSubFolder(DestinationClashPolicy.APPEND);
|
||||
|
||||
destination.setPersistenceId("geoserver");
|
||||
destination.setSubFolder(folderRelativePath);
|
||||
|
||||
log.debug("Sending " + wc + " to " + destination);
|
||||
TransferResult result = SDIManager.this.getDtGeoServer().httpSource(fc.getPublicLink(), destination);
|
||||
log.debug("Transferred " + result);
|
||||
|
||||
|
||||
content.getFileNames().add(completeFilename);
|
||||
|
||||
content.setGeoserverPath(result.getRemotePath().substring(0, result.getRemotePath().lastIndexOf("/")));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
// String completeFileName=content.getFileNames().get(0);
|
||||
// String filename=completeFileName.contains(".")?completeFileName.substring(0, completeFileName.lastIndexOf(".")):completeFileName;
|
||||
//
|
||||
// String remoteFolder=content.getGeoserverPath();
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
String storeName=toSetLayerName+"_store";
|
||||
content.setStore(storeName);
|
||||
content.setFeatureType(toSetLayerName);
|
||||
|
@ -177,10 +194,11 @@ public class SDIManager {
|
|||
GeoServerRESTPublisher publisher=currentGeoserver.getPublisher();
|
||||
log.debug("Trying to create remote workspace : "+workspace);
|
||||
createWorkspace(workspace);
|
||||
|
||||
log.debug("Publishing remote folder "+remoteFolder);
|
||||
|
||||
URL directoryPath=new URL("file:"+remoteFolder+"/"+filename+".shp");
|
||||
String folderAbsolutePath=content.getGeoserverPath();
|
||||
log.debug("Publishing remote folder "+folderAbsolutePath);
|
||||
|
||||
URL directoryPath=new URL("file:"+folderAbsolutePath+"/"+toSetLayerName+".shp");
|
||||
|
||||
|
||||
//TODO Evaluate SRS
|
||||
|
@ -410,5 +428,7 @@ public class SDIManager {
|
|||
dtGeoServer.getWebClient().delete(path);
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
}
|
|
@ -16,12 +16,11 @@ import org.gcube.application.geoportal.common.rest.TempFile;
|
|||
import org.gcube.application.geoportal.common.utils.Files;
|
||||
import org.gcube.application.geoportal.common.utils.StorageUtils;
|
||||
import org.gcube.application.geoportal.service.engine.ImplementationProvider;
|
||||
import org.gcube.application.geoportal.service.engine.SDIManager;
|
||||
import org.gcube.application.geoportal.service.engine.materialization.SDIManager;
|
||||
import org.gcube.application.geoportal.service.engine.WorkspaceManager;
|
||||
import org.gcube.application.geoportal.service.engine.WorkspaceManager.FileOptions;
|
||||
import org.gcube.application.geoportal.service.engine.WorkspaceManager.FolderOptions;
|
||||
import org.gcube.application.geoportal.service.engine.postgis.PostgisIndex;
|
||||
import org.gcube.application.geoportal.service.engine.providers.StorageClientProvider;
|
||||
import org.gcube.application.geoportal.service.model.internal.faults.*;
|
||||
import org.gcube.application.geoportal.service.utils.Serialization;
|
||||
import org.gcube.common.storagehub.client.dsl.FolderContainer;
|
||||
|
|
|
@ -5,7 +5,7 @@ import org.gcube.application.geoportal.common.model.legacy.Concessione;
|
|||
import org.gcube.application.geoportal.common.model.rest.DatabaseConnection;
|
||||
import org.gcube.application.geoportal.common.model.rest.PostgisIndexDescriptor;
|
||||
import org.gcube.application.geoportal.service.engine.ImplementationProvider;
|
||||
import org.gcube.application.geoportal.service.engine.SDIManager;
|
||||
import org.gcube.application.geoportal.service.engine.materialization.SDIManager;
|
||||
import org.gcube.application.geoportal.service.model.internal.db.DBConstants;
|
||||
import org.gcube.application.geoportal.service.model.internal.db.PostgisTable;
|
||||
import org.gcube.application.geoportal.service.model.internal.db.PostgisTable.Field;
|
||||
|
|
|
@ -13,4 +13,15 @@ public class MongoConnection {
|
|||
private String database;
|
||||
private List<String> hosts=new ArrayList<String>();
|
||||
private int port;
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "MongoConnection{" +
|
||||
"user='" + user + '\'' +
|
||||
", password=***" +
|
||||
", database='" + database + '\'' +
|
||||
", hosts=" + hosts +
|
||||
", port=" + port +
|
||||
'}';
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,5 +1,7 @@
|
|||
package org.gcube.application.geoportal.service;
|
||||
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.gcube.application.cms.tests.TokenSetter;
|
||||
import org.gcube.application.geoportal.service.rest.GuardedMethod;
|
||||
import org.gcube.application.geoportal.service.utils.Serialization;
|
||||
import org.glassfish.jersey.test.JerseyTest;
|
||||
|
@ -7,7 +9,7 @@ import org.junit.BeforeClass;
|
|||
|
||||
import javax.ws.rs.core.Application;
|
||||
import javax.ws.rs.core.Response;
|
||||
|
||||
@Slf4j
|
||||
public class BasicServiceTestUnit extends JerseyTest {
|
||||
|
||||
|
||||
|
@ -25,6 +27,7 @@ public class BasicServiceTestUnit extends JerseyTest {
|
|||
GuardedMethod.addPreoperation(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
log.debug("TEST IMPL : Setting context "+scope+" in received call");
|
||||
TokenSetter.set(scope);
|
||||
}
|
||||
});
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
package org.gcube.application.geoportal.service;
|
||||
|
||||
import org.gcube.application.cms.tests.TokenSetter;
|
||||
import org.gcube.application.cms.tests.model.TestFilters;
|
||||
import org.gcube.application.cms.tests.model.TestQueries;
|
||||
import org.gcube.application.cms.tests.model.TestModel;
|
||||
|
@ -9,6 +10,7 @@ import org.gcube.application.geoportal.common.model.legacy.LayerConcessione;
|
|||
import org.gcube.application.geoportal.common.model.legacy.report.ValidationReport.ValidationStatus;
|
||||
import org.gcube.application.geoportal.common.rest.InterfaceConstants;
|
||||
import org.gcube.application.geoportal.common.utils.FileSets;
|
||||
import org.gcube.application.geoportal.common.utils.Files;
|
||||
import org.gcube.application.geoportal.common.utils.StorageUtils;
|
||||
import org.gcube.application.geoportal.service.utils.Serialization;
|
||||
import org.json.JSONObject;
|
||||
|
@ -23,6 +25,7 @@ import javax.ws.rs.core.Response;
|
|||
import java.io.File;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.atomic.AtomicLong;
|
||||
|
||||
import static org.junit.Assert.*;
|
||||
|
@ -63,12 +66,21 @@ public class ConcessioniOverMongoTest extends BasicServiceTestUnit{
|
|||
post(Entity.entity(Serialization.write(builder.getTheRequest()),
|
||||
MediaType.APPLICATION_JSON)),Concessione.class);
|
||||
}
|
||||
|
||||
private static Concessione publish(WebTarget target, Concessione conc) throws Exception {
|
||||
Response resp=target.path(PUBLISH_PATH).path(conc.getMongo_id()).request(MediaType.APPLICATION_JSON).
|
||||
put(Entity.entity(Serialization.write(conc), MediaType.APPLICATION_JSON));
|
||||
|
||||
private static Concessione update(Concessione c, WebTarget target) throws Exception {
|
||||
|
||||
return check(target.path(c.getMongo_id()).request(MediaType.APPLICATION_JSON).
|
||||
put(Entity.entity(Serialization.write(c),
|
||||
MediaType.APPLICATION_JSON)),Concessione.class);
|
||||
}
|
||||
|
||||
private static Concessione publish(WebTarget target, String id) throws Exception {
|
||||
Response resp=target.path(PUBLISH_PATH).path(id).request(MediaType.APPLICATION_JSON).
|
||||
put(Entity.entity(Serialization.write(id), MediaType.APPLICATION_JSON));
|
||||
return check(resp,Concessione.class);
|
||||
}
|
||||
|
||||
|
||||
private static Concessione unpublish(WebTarget target, String id) throws Exception {
|
||||
Response resp=target.path(PUBLISH_PATH).path(id).request(MediaType.APPLICATION_JSON).
|
||||
delete();
|
||||
|
@ -178,10 +190,11 @@ public class ConcessioniOverMongoTest extends BasicServiceTestUnit{
|
|||
@Test
|
||||
public void republish() throws Exception{
|
||||
WebTarget target=target(PATH);
|
||||
Concessione published=getFullPublished(target);
|
||||
// Concessione published=getFullPublished(target);
|
||||
Concessione published=getById(target,"6155ba6002ad3d2c23b72b5a");
|
||||
published = unpublish(target,published.getMongo_id());
|
||||
System.out.println("Republishing..");
|
||||
published=publish(target,published);
|
||||
published=publish(target,published.getMongo_id());
|
||||
Assert.assertEquals(published.getReport().getStatus(),ValidationStatus.PASSED);
|
||||
}
|
||||
|
||||
|
@ -191,7 +204,7 @@ public class ConcessioniOverMongoTest extends BasicServiceTestUnit{
|
|||
//Republishing
|
||||
WebTarget target=target(PATH);
|
||||
String id="610415af02ad3d05b5f81ee3";
|
||||
publish(target,unpublish(target,id));
|
||||
publish(target,unpublish(target,id).getMongo_id());
|
||||
target.path(id).queryParam(InterfaceConstants.Parameters.FORCE,true).request(MediaType.APPLICATION_JSON).delete();
|
||||
}
|
||||
|
||||
|
@ -279,6 +292,7 @@ public class ConcessioniOverMongoTest extends BasicServiceTestUnit{
|
|||
public void publish() throws Exception {
|
||||
WebTarget target=target(PATH);
|
||||
Concessione published=getFullPublished(target);
|
||||
|
||||
System.out.println("Published : "+published);
|
||||
System.out.println("Report is : "+published.getReport());
|
||||
assertNotNull(published.getReport());
|
||||
|
@ -295,9 +309,14 @@ public class ConcessioniOverMongoTest extends BasicServiceTestUnit{
|
|||
}
|
||||
|
||||
private Concessione getFullPublished(WebTarget target) throws Exception {
|
||||
Concessione c=TestModel.prepareConcessione(1,2);
|
||||
|
||||
c.setNome("Concessione : publish test");
|
||||
File layerFolder=new File(
|
||||
"/Users/fabioisti/Documents/Concessioni 04-03/UsiniTomestighes");
|
||||
Map<String, List<File>> layers = Files.getAllShapeSet(layerFolder,true);
|
||||
|
||||
Concessione c=TestModel.prepareConcessione(1,1);
|
||||
|
||||
c.setNome("Concessione : publish test ");
|
||||
StorageUtils storage=new StorageUtils();
|
||||
|
||||
|
||||
|
@ -305,27 +324,34 @@ public class ConcessioniOverMongoTest extends BasicServiceTestUnit{
|
|||
c=register(target,c);
|
||||
|
||||
//Upload files
|
||||
upload(storage,target,c.getMongo_id(),Paths.RELAZIONE,"relazione.pdf");
|
||||
upload(storage,target,c.getMongo_id(),Paths.ABSTRACT_RELAZIONE,"relazione.pdf");
|
||||
c=upload(storage,target,c.getMongo_id(),Paths.RELAZIONE,"relazione.pdf");
|
||||
c=upload(storage,target,c.getMongo_id(),Paths.ABSTRACT_RELAZIONE,"relazione.pdf");
|
||||
|
||||
|
||||
String[] keys=layers.keySet().toArray(new String [0]);
|
||||
c=upload(storage,target,c.getMongo_id(),Paths.POSIZIONAMENTO,
|
||||
TestModel.getBaseFolder().list((file,name)->{return name.startsWith("pianta.shp");}));
|
||||
// layers.get(keys[0]).toArray(new File[0]));
|
||||
|
||||
|
||||
upload(storage,target,c.getMongo_id(),Paths.POSIZIONAMENTO,
|
||||
// TestModel.getBaseFolder().list((file,name)->{return name.startsWith("pos");}));
|
||||
new File("/Users/fabioisti/Documents/invio_08_05/Montalto di Castro (VT)_Vulci_Indagini non invasive_Doc. paragr._Va/CONSEGNA_WGS84")
|
||||
.listFiles((file,name)->{return name.startsWith("Mag_area");}));
|
||||
// Clash on workspaces
|
||||
upload(storage,target,c.getMongo_id(),Paths.piantaByIndex(0),
|
||||
// TestModel.getBaseFolder().list((file,name)->{return name.startsWith("pianta");}));
|
||||
|
||||
new File("/Users/fabioisti/Documents/invio_08_05/Montalto di Castro (VT)_Vulci_Indagini non invasive_Doc. paragr._Va/CONSEGNA_WGS84")
|
||||
.listFiles((file,name)->{return name.startsWith("Mag_anomalies_WGS84");}));
|
||||
upload(storage,target,c.getMongo_id(),Paths.imgByIndex(0),"immagine.png");
|
||||
upload(storage,target,c.getMongo_id(),Paths.imgByIndex(1),"immagine2.png");
|
||||
|
||||
|
||||
for(int i=0;i<c.getPianteFineScavo().size();i++) {
|
||||
String key=keys[0];
|
||||
String path=Paths.piantaByIndex(i);
|
||||
c.getContentByPath(path).setTitolo("Pianta from "+key.replace(layerFolder.getAbsolutePath(),""));
|
||||
c=update(c,target);
|
||||
c=upload(storage, target, c.getMongo_id(), path,
|
||||
TestModel.getBaseFolder().list((file,name)->{return name.startsWith("pianta.shp");}));
|
||||
// layers.get(key).toArray(new File[0]));
|
||||
}
|
||||
|
||||
// Immagini
|
||||
Concessione published=publish(target, c);
|
||||
return published;
|
||||
for (int i = 0; i <c.getImmaginiRappresentative().size() ; i++) {
|
||||
c=upload(storage,target,c.getMongo_id(),Paths.imgByIndex(0),"immagine"+(i+1)+".png");
|
||||
}
|
||||
|
||||
|
||||
return publish(target, c.getMongo_id());
|
||||
}
|
||||
|
||||
public static long count(Iterator<?> iterator){
|
||||
|
|
|
@ -0,0 +1,43 @@
|
|||
package org.gcube.application.geoportal.service;
|
||||
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.gcube.application.cms.tests.TokenSetter;
|
||||
import org.gcube.data.transfer.library.DataTransferClient;
|
||||
import org.gcube.data.transfer.library.faults.RemoteServiceException;
|
||||
import org.gcube.data.transfer.library.faults.ServiceNotFoundException;
|
||||
import org.gcube.data.transfer.library.faults.UnreachableNodeException;
|
||||
import org.gcube.data.transfer.model.RemoteFileDescriptor;
|
||||
|
||||
|
||||
|
||||
@Slf4j
|
||||
public class DescribeGSFolder {
|
||||
|
||||
public static void main(String[] args) throws UnreachableNodeException, ServiceNotFoundException, RemoteServiceException {
|
||||
|
||||
TokenSetter.set("/gcube/devsec/devVRE");
|
||||
|
||||
String gsEndpoint="https://geoserver-218.dev.d4science.org/";
|
||||
String gsBasePath="geoserver/GNA/6157089e596924b380a174b7_1633101414296/";
|
||||
DataTransferClient client=DataTransferClient.getInstanceByEndpoint(gsEndpoint);
|
||||
// System.out.println(getFileDescriptor(gsBasePath,client.getWebClient()));
|
||||
print(client,gsBasePath,"");
|
||||
}
|
||||
|
||||
|
||||
public static void print(DataTransferClient client, String path, String pad) throws RemoteServiceException {
|
||||
RemoteFileDescriptor desc=client.getWebClient().getInfo(path);
|
||||
System.out.println(pad+desc.getFilename() + "["+desc.getSize()+"]");
|
||||
if(desc.isDirectory())
|
||||
desc.getChildren().forEach(f->{
|
||||
try {
|
||||
print(client,path+"/"+f,pad+"\t");
|
||||
} catch (RemoteServiceException e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
});
|
||||
|
||||
}
|
||||
|
||||
|
||||
}
|
|
@ -7,6 +7,7 @@ import com.mongodb.client.MongoCollection;
|
|||
import com.mongodb.client.MongoDatabase;
|
||||
import com.mongodb.client.model.Projections;
|
||||
import org.bson.Document;
|
||||
import org.gcube.application.cms.tests.TokenSetter;
|
||||
import org.gcube.application.geoportal.service.engine.ImplementationProvider;
|
||||
import org.gcube.application.geoportal.service.engine.mongo.MongoManager;
|
||||
import org.gcube.application.geoportal.service.engine.providers.MongoClientProvider;
|
||||
|
|
|
@ -1,48 +0,0 @@
|
|||
package org.gcube.application.geoportal.service;
|
||||
|
||||
|
||||
import org.gcube.application.geoportal.service.engine.SDIManager;
|
||||
import org.gcube.application.geoportal.service.engine.postgis.PostgisIndex;
|
||||
import org.gcube.application.geoportal.service.model.internal.faults.ConfigurationException;
|
||||
import org.gcube.application.geoportal.service.model.internal.faults.SDIInteractionException;
|
||||
import org.junit.Assert;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.sql.SQLException;
|
||||
import java.util.regex.Matcher;
|
||||
|
||||
public class SDITests {
|
||||
|
||||
|
||||
@Before
|
||||
public void init(){
|
||||
TokenSetter.set("/gcube/devsec/devVRE");
|
||||
}
|
||||
|
||||
@Test
|
||||
public void registerCentroidsLayer() throws SDIInteractionException, SQLException, ConfigurationException {
|
||||
PostgisIndex index=new PostgisIndex();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testRegexp(){
|
||||
Matcher hostMatcher=SDIManager.HOSTNAME_PATTERN.matcher("jdbc:postgresql://postgresql-srv-dev.d4science.org:5432/geoserver_dev_db");
|
||||
Assert.assertTrue(hostMatcher.find());
|
||||
System.out.println("HOST :\t"+hostMatcher.group());
|
||||
|
||||
Matcher portMatcher=SDIManager.PORT_PATTERN.matcher("jdbc:postgresql://postgresql-srv-dev.d4science.org:5432/geoserver_dev_db");
|
||||
Assert.assertTrue(portMatcher.find());
|
||||
System.out.println("PORT :\t"+portMatcher.group());
|
||||
|
||||
Matcher dbMatcher=SDIManager.DB_NAME_PATTERN.matcher("jdbc:postgresql://postgresql-srv-dev.d4science.org:5432/geoserver_dev_db");
|
||||
Assert.assertTrue(dbMatcher.find());
|
||||
System.out.println("DB :\t"+dbMatcher.group());
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void registerFileSet(){
|
||||
|
||||
}
|
||||
}
|
|
@ -1,5 +1,6 @@
|
|||
package org.gcube.application.geoportal.service;
|
||||
|
||||
import org.gcube.application.cms.tests.TokenSetter;
|
||||
import org.gcube.application.geoportal.common.utils.StorageUtils;
|
||||
import org.gcube.application.geoportal.service.engine.ImplementationProvider;
|
||||
import org.gcube.application.geoportal.service.engine.providers.StorageClientProvider;
|
||||
|
|
|
@ -1,35 +0,0 @@
|
|||
package org.gcube.application.geoportal.service;
|
||||
|
||||
import org.gcube.common.authorization.library.provider.SecurityTokenProvider;
|
||||
import org.gcube.common.scope.api.ScopeProvider;
|
||||
|
||||
import java.util.Properties;
|
||||
|
||||
public class TokenSetter {
|
||||
|
||||
|
||||
|
||||
private static Properties props=new Properties();
|
||||
|
||||
static{
|
||||
try {
|
||||
props.load(TokenSetter.class.getResourceAsStream("/tokens.properties"));
|
||||
} catch (Exception e) {
|
||||
throw new RuntimeException("YOU NEED TO SET TOKEN FILE IN CONFIGURATION");
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
public static void set(String scope){
|
||||
try{
|
||||
if(!props.containsKey(scope)) throw new RuntimeException("No token found for scope : "+scope);
|
||||
SecurityTokenProvider.instance.set(props.getProperty(scope));
|
||||
}catch(Exception e){
|
||||
e.printStackTrace(System.err);
|
||||
throw e;
|
||||
}
|
||||
ScopeProvider.instance.set(scope);
|
||||
}
|
||||
|
||||
|
||||
}
|
|
@ -0,0 +1,79 @@
|
|||
package org.gcube.application.geoportal.service.engine.materialization;
|
||||
|
||||
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.gcube.application.cms.tests.TokenSetter;
|
||||
import org.gcube.application.cms.tests.model.TestModel;
|
||||
import org.gcube.application.geoportal.common.model.legacy.Concessione;
|
||||
import org.gcube.application.geoportal.common.model.legacy.GeoServerContent;
|
||||
import org.gcube.application.geoportal.common.model.legacy.LayerConcessione;
|
||||
import org.gcube.application.geoportal.common.model.legacy.PersistedContent;
|
||||
import org.gcube.application.geoportal.common.utils.Files;
|
||||
import org.gcube.application.geoportal.service.engine.materialization.SDIManager;
|
||||
import org.gcube.application.geoportal.service.engine.postgis.PostgisIndex;
|
||||
import org.gcube.application.geoportal.service.model.internal.faults.ConfigurationException;
|
||||
import org.gcube.application.geoportal.service.model.internal.faults.SDIInteractionException;
|
||||
import org.gcube.application.geoportal.service.utils.Serialization;
|
||||
import org.junit.Assert;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.nio.charset.Charset;
|
||||
import java.sql.SQLException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.regex.Matcher;
|
||||
|
||||
@Slf4j
|
||||
public class SDITests {
|
||||
|
||||
|
||||
@Before
|
||||
public void init(){
|
||||
TokenSetter.set("/gcube/devsec/devVRE");
|
||||
}
|
||||
|
||||
@Test
|
||||
public void registerCentroidsLayer() throws SDIInteractionException, SQLException, ConfigurationException {
|
||||
PostgisIndex index=new PostgisIndex();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testRegexp(){
|
||||
Matcher hostMatcher=SDIManager.HOSTNAME_PATTERN.matcher("jdbc:postgresql://postgresql-srv-dev.d4science.org:5432/geoserver_dev_db");
|
||||
Assert.assertTrue(hostMatcher.find());
|
||||
System.out.println("HOST :\t"+hostMatcher.group());
|
||||
|
||||
Matcher portMatcher=SDIManager.PORT_PATTERN.matcher("jdbc:postgresql://postgresql-srv-dev.d4science.org:5432/geoserver_dev_db");
|
||||
Assert.assertTrue(portMatcher.find());
|
||||
System.out.println("PORT :\t"+portMatcher.group());
|
||||
|
||||
Matcher dbMatcher=SDIManager.DB_NAME_PATTERN.matcher("jdbc:postgresql://postgresql-srv-dev.d4science.org:5432/geoserver_dev_db");
|
||||
Assert.assertTrue(dbMatcher.find());
|
||||
System.out.println("DB :\t"+dbMatcher.group());
|
||||
}
|
||||
|
||||
|
||||
// @Test
|
||||
//Disabled because dependant on state
|
||||
public void registerLayer() throws IOException, SDIInteractionException {
|
||||
Concessione toTest= Serialization.read(Files.readFileAsString(
|
||||
new File(TestModel.getBaseFolder(),"transferTest.json").getAbsolutePath(), Charset.defaultCharset()), Concessione.class);
|
||||
|
||||
|
||||
SDIManager manager = new SDIManager();
|
||||
List<LayerConcessione> toPush=new ArrayList<>();
|
||||
toPush.add(toTest.getPosizionamentoScavo());
|
||||
toPush.addAll(toTest.getPianteFineScavo());
|
||||
|
||||
for (LayerConcessione l :toPush){
|
||||
GeoServerContent content=manager.pushShapeLayerFileSet(l,toTest.getFolderId(),toTest.getMongo_id()+"_"+System.currentTimeMillis());
|
||||
System.out.println("Published "+content);
|
||||
}
|
||||
|
||||
|
||||
|
||||
}
|
||||
}
|
|
@ -1,6 +1,6 @@
|
|||
package org.gcube.application.geoportal.service.ws;
|
||||
|
||||
import org.gcube.application.geoportal.service.TokenSetter;
|
||||
import org.gcube.application.cms.tests.TokenSetter;
|
||||
import org.gcube.common.storagehub.client.dsl.FolderContainer;
|
||||
import org.gcube.common.storagehub.client.dsl.StorageHubClient;
|
||||
import org.gcube.common.storagehub.model.exceptions.StorageHubException;
|
||||
|
@ -13,7 +13,7 @@ public class DescribeWSFolder {
|
|||
|
||||
public static void main(String[] args) throws StorageHubException {
|
||||
String context="/gcube/devsec/devVRE";
|
||||
String folderID="28774602-6423-4870-9afb-f8f4b585b438";
|
||||
String folderID="46b376db-32d7-4411-ad04-ca3dadab5f5b";
|
||||
Boolean recursive = true;
|
||||
|
||||
TokenSetter.set(context);
|
||||
|
@ -25,19 +25,11 @@ public class DescribeWSFolder {
|
|||
|
||||
System.out.println("Listing... ");
|
||||
print(folder,"");
|
||||
// for (Item i : folder.list().includeHidden().getItems()) {
|
||||
// System.out.println("name:" + i.getName() + " type " + i.getPrimaryType());
|
||||
//
|
||||
//// System.out.println("name:" + i.getName() + " is a File?: " + (i instanceof AbstractFileItem));
|
||||
//// System.out.println("name:" + i.getName() + " is a folder?: " + (i instanceof FolderItem));
|
||||
//// System.out.println("name:" + i.getName() + " is a shared folder?: " + (i instanceof SharedFolder));
|
||||
//// System.out.println("name:" + i.getName() + " is a VRE folder?: " + (i instanceof VreFolder));
|
||||
// }
|
||||
}
|
||||
|
||||
private static final void print(FolderContainer folder,String pad) throws StorageHubException {
|
||||
for (Item i : folder.list().includeHidden().getItems()) {
|
||||
System.out.println(pad+"name:" + i.getName() + " type " + i.getPrimaryType());
|
||||
System.out.println(pad+ i.getName() + " [" + i.getPrimaryType()+"]");
|
||||
if(i instanceof FolderItem){
|
||||
print(shc.open(i.getId()).asFolder(),pad+"\t");
|
||||
}
|
||||
|
|
|
@ -1,15 +0,0 @@
|
|||
package org.gcube.application.geoportal.service.ws;
|
||||
|
||||
public class DisplayWorkspaceTree {
|
||||
|
||||
public static void main(String[] args) {
|
||||
String context="/gcube/devsec/devVRE";
|
||||
|
||||
String folderId=null;
|
||||
|
||||
|
||||
// NB null ==
|
||||
|
||||
}
|
||||
|
||||
}
|
|
@ -1,6 +1,6 @@
|
|||
package org.gcube.application.geoportal.service.ws;
|
||||
|
||||
import org.gcube.application.geoportal.service.TokenSetter;
|
||||
import org.gcube.application.cms.tests.TokenSetter;
|
||||
import org.gcube.application.geoportal.service.engine.WorkspaceManager;
|
||||
import org.gcube.common.storagehub.client.dsl.FolderContainer;
|
||||
import org.gcube.common.storagehub.client.dsl.StorageHubClient;
|
||||
|
|
|
@ -3,13 +3,19 @@
|
|||
|
||||
<appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
|
||||
<encoder>
|
||||
<pattern>%d{yyyy-MM-dd HH:mm:ss} | %-5p | [%thread] %logger{5}:%L - %msg%n</pattern>
|
||||
<pattern>%d{HH:mm:ss} | %logger{0}:%L - %msg%n</pattern>
|
||||
</encoder>
|
||||
</appender>
|
||||
|
||||
<root level="DEBUG">
|
||||
<logger name="org.gcube.application" level="DEBUG">
|
||||
<appender-ref ref="ACCESS_FILE" />
|
||||
</logger>
|
||||
|
||||
<root level="ERROR">
|
||||
<appender-ref ref="STDOUT" />
|
||||
|
||||
</root>
|
||||
|
||||
|
||||
|
||||
</configuration>
|
|
@ -1,18 +0,0 @@
|
|||
"Acquacadda_Nuxis_2019_def","Acquacadda_Nuxis_2019_def/D_posizionamento saggi_NUXIS 2019","Acquacadda_Nuxis_2019_def/E_piante fine scavo vettoriali_NUXIS 2019"
|
||||
"Ariano nel Polisine (Ro) - Loc. San Basilio ( Unvi. di Padova)","Ariano nel Polisine (Ro) - Loc. San Basilio ( Unvi. di Padova)/GNA_SAN BASILIO_2019/GNA_topografia_San Basilio/POSIZIONAMENTO SAGGI E AREE DELLE INDAGINI","Ariano nel Polisine (Ro) - Loc. San Basilio ( Unvi. di Padova)/GNA_SAN BASILIO_2019/GNA_topografia_San Basilio/PLANIMETRIE SAGGI AREE DELLE INDAGINI"
|
||||
"Ariano Polesine (Ro) - Loc. San Basilio (Uni. di Venezia)","Ariano Polesine (Ro) - Loc. San Basilio (Uni. di Venezia)/S.Basilio-UniVe_GNA/D.GNA_Posizionamento_San Basilio","Ariano Polesine (Ro) - Loc. San Basilio (Uni. di Venezia)/S.Basilio-UniVe_GNA/E-F.GNA_Pianta fine scavo_San Basilio/GNA_Pianta di fine scavo_San Basilio"
|
||||
"C_F_GNA_Oscurusciuto_2020","C_F_GNA_Oscurusciuto_2020/D-E-F_GNA_topografia_Oscurusciuto/Posizionamento_limiti_saggi_indagati_Oscurusciuto_vettoriale","C_F_GNA_Oscurusciuto_2020/D-E-F_GNA_topografia_Oscurusciuto/Pianta_fine_scavo_Oscurusciuto_2019_vettoriale"
|
||||
"Cerchiara-Damale_rev","Cerchiara-Damale_rev/D. Posizionamento saggio","Cerchiara-Damale_rev/D. Posizionamento saggio/E. Pianta fine scavo_vettoriale"
|
||||
"Cerveteri (RM)_loc. Monte Abatone_documentazione fine scavo 2019 (paragr. IVa)","Cerveteri (RM)_loc. Monte Abatone_documentazione fine scavo 2019 (paragr. IVa)/IVa_E_Tav.1_Shape Posizionamento area scavo","Cerveteri (RM)_loc. Monte Abatone_documentazione fine scavo 2019 (paragr. IVa)/IVa_E_Tav.1_Shape Posizionamento area scavo"
|
||||
"Cervia Vecchia_rev","Cervia Vecchia_rev/D_GNA_topografia_Cervia","Cervia Vecchia_rev/D_GNA_topografia_Cervia"
|
||||
"Civitavecchia (RM)_loc. Ficoncella_Aquae Tauri_docum. IVa_2019","Civitavecchia (RM)_loc. Ficoncella_Aquae Tauri_docum. IVa_2019/Civitavecchia (RM)_Ficoncella_documentazione fine scavo 2019_IVa/IVa_D_E_F_topografia_Aquae_Tauri/Posizionamento saggi 2019","Civitavecchia (RM)_loc. Ficoncella_Aquae Tauri_docum. IVa_2019/Civitavecchia (RM)_Ficoncella_documentazione fine scavo 2019_IVa/IVa_D_E_F_topografia_Aquae_Tauri/Pianta di fine scavo 2019"
|
||||
"Frascineto Timpone delle Fave_rev","Frascineto Timpone delle Fave_rev/D. Posizionamento dell'area","Frascineto Timpone delle Fave_rev/D. Posizionamento dell'area"
|
||||
"GNA_AQUILEIAcomellimoro_2019_rev","GNA_AQUILEIAcomellimoro_2019_rev/GNA_topografia_AQUILEIAcomellimoro_2019/D.Posizionamento_AQU19COM","GNA_AQUILEIAcomellimoro_2019_rev/GNA_topografia_AQUILEIAcomellimoro_2019/E.Planimetria generale_AQU19COM"
|
||||
"GNA_Poggio Pimperiale_Poggibonsi","GNA_Poggio Pimperiale_Poggibonsi/5591449/GNA_topografia_PoggioImperiale/POSIZIONAMENTO","GNA_Poggio Pimperiale_Poggibonsi/5591449/GNA_topografia_PoggioImperiale/PLANIMETRIE SAGGI/SHAPE A22"
|
||||
"Jesolo (Ve) - Loc. San Mauro e Torre del Caligo","Jesolo (Ve) - Loc. San Mauro e Torre del Caligo/GNA_topografia_Jesolo/GIS_shp","Jesolo (Ve) - Loc. San Mauro e Torre del Caligo/GNA_topografia_Jesolo/GIS_shp"
|
||||
"Monterotondo (RM)_loc. Tor Mancina_documentazione fine scavo 2019 (paragr. IVa)","Monterotondo (RM)_loc. Tor Mancina_documentazione fine scavo 2019 (paragr. IVa)/IVa_D_E_F_GNA_topografia_Via Nomentum-Eretum","Monterotondo (RM)_loc. Tor Mancina_documentazione fine scavo 2019 (paragr. IVa)/IVa_D_E_F_GNA_topografia_Via Nomentum-Eretum"
|
||||
"Quarto d'Altino -Loc. Fornace","Quarto d'Altino -Loc. Fornace/Cupitò/GNA_Altino_posizionamento_shp","Quarto d'Altino -Loc. Fornace/Cupitò/GNA_Altino_posizionamento_shp"
|
||||
"Tolfa (RM)_Bufalareccia_documentazione IVa 2019","Tolfa (RM)_Bufalareccia_documentazione IVa 2019/IVa_D_GNA_posizionamento dei limiti in formato vectoriale_Bufalareccia 2019","Tolfa (RM)_Bufalareccia_documentazione IVa 2019/IVa_E_GNA_pianta di fine scavo multipolygon_Bufalareccia 2019"
|
||||
"UNIME_ LAINO_REV","UNIME_ LAINO_REV/D. Limiti saggi/laino 2019 limiti saggi","UNIME_ LAINO_REV/D. Limiti saggi/laino 2019 limiti saggi"
|
||||
"UNIME_TORTORA_rev","UNIME_TORTORA_rev/tortora 2019 limiti saggi","UNIME_TORTORA_rev/tortora 2019 pianta fine scavo"
|
||||
"UsiniTomestighes","UsiniTomestighes/GNA_TOMESTIGHES_2019/GNA_Topografia_Tomestighes/D_GNA_Posizionamento_limiti_aree_indagate","UsiniTomestighes/GNA_TOMESTIGHES_2019/GNA_Topografia_Tomestighes/E_GNA_Piante_fine_scavo_Tomestighes/Area A_est"
|
|
|
@ -1,18 +0,0 @@
|
|||
"Acquacadda_Nuxis_2019_def","Acquacadda_Nuxis_2019_def/D_posizionamento saggi_NUXIS 2019","Acquacadda_Nuxis_2019_def/E_piante fine scavo vettoriali_NUXIS 2019"
|
||||
"Ariano nel Polisine (Ro) - Loc. San Basilio ( Unvi. di Padova)","Ariano nel Polisine (Ro) - Loc. San Basilio ( Unvi. di Padova)/GNA_SAN BASILIO_2019/GNA_topografia_San Basilio/POSIZIONAMENTO SAGGI E AREE DELLE INDAGINI","Ariano nel Polisine (Ro) - Loc. San Basilio ( Unvi. di Padova)/GNA_SAN BASILIO_2019/GNA_topografia_San Basilio/PLANIMETRIE SAGGI AREE DELLE INDAGINI"
|
||||
"Ariano Polesine (Ro) - Loc. San Basilio (Uni. di Venezia)","Ariano Polesine (Ro) - Loc. San Basilio (Uni. di Venezia)/S.Basilio-UniVe_GNA/D.GNA_Posizionamento_San Basilio","Ariano Polesine (Ro) - Loc. San Basilio (Uni. di Venezia)/S.Basilio-UniVe_GNA/E-F.GNA_Pianta fine scavo_San Basilio/GNA_Pianta di fine scavo_San Basilio"
|
||||
"C_F_GNA_Oscurusciuto_2020","C_F_GNA_Oscurusciuto_2020/D-E-F_GNA_topografia_Oscurusciuto/Posizionamento_limiti_saggi_indagati_Oscurusciuto_vettoriale","C_F_GNA_Oscurusciuto_2020/D-E-F_GNA_topografia_Oscurusciuto/Pianta_fine_scavo_Oscurusciuto_2019_vettoriale"
|
||||
"Cerchiara-Damale_rev","Cerchiara-Damale_rev/D. Posizionamento saggio","Cerchiara-Damale_rev/D. Posizionamento saggio/E. Pianta fine scavo_vettoriale"
|
||||
"Cerveteri (RM)_loc. Monte Abatone_documentazione fine scavo 2019 (paragr. IVa)","Cerveteri (RM)_loc. Monte Abatone_documentazione fine scavo 2019 (paragr. IVa)/IVa_E_Tav.1_Shape Posizionamento area scavo","Cerveteri (RM)_loc. Monte Abatone_documentazione fine scavo 2019 (paragr. IVa)/IVa_E_Tav.1_Shape Posizionamento area scavo"
|
||||
"Cervia Vecchia_rev","Cervia Vecchia_rev/D_GNA_topografia_Cervia","Cervia Vecchia_rev/D_GNA_topografia_Cervia"
|
||||
"Civitavecchia (RM)_loc. Ficoncella_Aquae Tauri_docum. IVa_2019","Civitavecchia (RM)_loc. Ficoncella_Aquae Tauri_docum. IVa_2019/Civitavecchia (RM)_Ficoncella_documentazione fine scavo 2019_IVa/IVa_D_E_F_topografia_Aquae_Tauri/Posizionamento saggi 2019","Civitavecchia (RM)_loc. Ficoncella_Aquae Tauri_docum. IVa_2019/Civitavecchia (RM)_Ficoncella_documentazione fine scavo 2019_IVa/IVa_D_E_F_topografia_Aquae_Tauri/Pianta di fine scavo 2019"
|
||||
"Frascineto Timpone delle Fave_rev","Frascineto Timpone delle Fave_rev/D. Posizionamento dell'area","Frascineto Timpone delle Fave_rev/D. Posizionamento dell'area"
|
||||
"GNA_AQUILEIAcomellimoro_2019_rev","GNA_AQUILEIAcomellimoro_2019_rev/GNA_topografia_AQUILEIAcomellimoro_2019/D.Posizionamento_AQU19COM","GNA_AQUILEIAcomellimoro_2019_rev/GNA_topografia_AQUILEIAcomellimoro_2019/E.Planimetria generale_AQU19COM"
|
||||
"GNA_Poggio Pimperiale_Poggibonsi","GNA_Poggio Pimperiale_Poggibonsi/5591449/GNA_topografia_PoggioImperiale/POSIZIONAMENTO","GNA_Poggio Pimperiale_Poggibonsi/5591449/GNA_topografia_PoggioImperiale/PLANIMETRIE SAGGI/SHAPE A22"
|
||||
"Jesolo (Ve) - Loc. San Mauro e Torre del Caligo","Jesolo (Ve) - Loc. San Mauro e Torre del Caligo/GNA_topografia_Jesolo/GIS_shp","Jesolo (Ve) - Loc. San Mauro e Torre del Caligo/GNA_topografia_Jesolo/GIS_shp"
|
||||
"Monterotondo (RM)_loc. Tor Mancina_documentazione fine scavo 2019 (paragr. IVa)","Monterotondo (RM)_loc. Tor Mancina_documentazione fine scavo 2019 (paragr. IVa)/IVa_D_E_F_GNA_topografia_Via Nomentum-Eretum","Monterotondo (RM)_loc. Tor Mancina_documentazione fine scavo 2019 (paragr. IVa)/IVa_D_E_F_GNA_topografia_Via Nomentum-Eretum"
|
||||
"Quarto d'Altino -Loc. Fornace","Quarto d'Altino -Loc. Fornace/Cupitò/GNA_Altino_posizionamento_shp","Quarto d'Altino -Loc. Fornace/Cupitò/GNA_Altino_posizionamento_shp"
|
||||
"Tolfa (RM)_Bufalareccia_documentazione IVa 2019","Tolfa (RM)_Bufalareccia_documentazione IVa 2019/IVa_D_GNA_posizionamento dei limiti in formato vectoriale_Bufalareccia 2019","Tolfa (RM)_Bufalareccia_documentazione IVa 2019/IVa_E_GNA_pianta di fine scavo multipolygon_Bufalareccia 2019"
|
||||
"UNIME_ LAINO_REV","UNIME_ LAINO_REV/D. Limiti saggi/laino 2019 limiti saggi","UNIME_ LAINO_REV/D. Limiti saggi/laino 2019 limiti saggi"
|
||||
"UNIME_TORTORA_rev","UNIME_TORTORA_rev/tortora 2019 limiti saggi","UNIME_TORTORA_rev/tortora 2019 pianta fine scavo"
|
||||
"UsiniTomestighes","UsiniTomestighes/GNA_TOMESTIGHES_2019/GNA_Topografia_Tomestighes/D_GNA_Posizionamento_limiti_aree_indagate","UsiniTomestighes/GNA_TOMESTIGHES_2019/GNA_Topografia_Tomestighes/E_GNA_Piante_fine_scavo_Tomestighes/Area A_est"
|
|
Before Width: | Height: | Size: 5.7 KiB After Width: | Height: | Size: 5.7 KiB |
|
@ -0,0 +1,17 @@
|
|||
"Acquacadda_Nuxis_2019_def","Acquacadda_Nuxis_2019_def/D_posizionamento saggi_NUXIS 2019/Areale saggi Nuxis - Grotta di Acquacadda.shp"
|
||||
"Ariano nel Polisine (Ro) - Loc. San Basilio ( Unvi. di Padova)","Ariano nel Polisine (Ro) - Loc. San Basilio ( Unvi. di Padova)/GNA_SAN BASILIO_2019/GNA_topografia_San Basilio/POSIZIONAMENTO SAGGI E AREE DELLE INDAGINI/San Basilio 2019_EPSG32633.shp"
|
||||
"Ariano Polesine (Ro) - Loc. San Basilio (Uni. di Venezia)","Ariano Polesine (Ro) - Loc. San Basilio (Uni. di Venezia)/S.Basilio-UniVe_GNA/D.GNA_Posizionamento_San Basilio/limiti di scavo.shp"
|
||||
"C_F_GNA_Oscurusciuto_2020","C_F_GNA_Oscurusciuto_2020/D-E-F_GNA_topografia_Oscurusciuto/Posizionamento_limiti_saggi_indagati_Oscurusciuto_vettoriale/Oscurusciuto_area_scavo_2019.shp"
|
||||
"Cerchiara-Damale_rev","Cerchiara-Damale_rev/D. Posizionamento saggio/RB228_trench2.shp"
|
||||
"Cerveteri (RM)_loc. Monte Abatone_documentazione fine scavo 2019 (paragr. IVa)","Cerveteri (RM)_loc. Monte Abatone_documentazione fine scavo 2019 (paragr. IVa)/IVa_E_Tav.1_Shape Posizionamento area scavo/Posizionamento Area di scavo Monte Abatone 2019.shp"
|
||||
"Cervia Vecchia_rev","Cervia Vecchia_rev/D_GNA_topografia_Cervia/CE19_09.shp"
|
||||
"Frascineto Timpone delle Fave_rev","Frascineto Timpone delle Fave_rev/D. Posizionamento dell'area/TDF2019_ricognizione.shp"
|
||||
"GNA_AQUILEIAcomellimoro_2019_rev","GNA_AQUILEIAcomellimoro_2019_rev/GNA_topografia_AQUILEIAcomellimoro_2019/D.Posizionamento_AQU19COM/Tav01_line_posizionamento_EPSG4326.shp"
|
||||
"GNA_Poggio Pimperiale_Poggibonsi","GNA_Poggio Pimperiale_Poggibonsi/5591449/GNA_topografia_PoggioImperiale/POSIZIONAMENTO/POSIZIONAMENTO A22.shp"
|
||||
"Jesolo (Ve) - Loc. San Mauro e Torre del Caligo","Jesolo (Ve) - Loc. San Mauro e Torre del Caligo/GNA_topografia_Jesolo/GIS_shp/Area di Scavo 2019.shp"
|
||||
"Monterotondo (RM)_loc. Tor Mancina_documentazione fine scavo 2019 (paragr. IVa)","Monterotondo (RM)_loc. Tor Mancina_documentazione fine scavo 2019 (paragr. IVa)/IVa_D_E_F_GNA_topografia_Via Nomentum-Eretum/pianta fine scavo fasi.dxf"
|
||||
"Quarto d'Altino -Loc. Fornace","Quarto d'Altino -Loc. Fornace/Cupitò/GNA_Altino_posizionamento_shp/ALT19_area survey_EPSG4326.shp"
|
||||
"Tolfa (RM)_Bufalareccia_documentazione IVa 2019","Tolfa (RM)_Bufalareccia_documentazione IVa 2019/IVa_D_GNA_posizionamento dei limiti in formato vectoriale_Bufalareccia 2019/BUFFA_POLYGON_.shp"
|
||||
"UNIME_ LAINO_REV","UNIME_ LAINO_REV/D. Limiti saggi/laino 2019 limiti saggi/SAGGI.shp"
|
||||
"UNIME_TORTORA_rev","UNIME_TORTORA_rev/tortora 2019 limiti saggi/SAGGI 2019.shp"
|
||||
"UsiniTomestighes","UsiniTomestighes/GNA_TOMESTIGHES_2019/GNA_Topografia_Tomestighes/D_GNA_Posizionamento_limiti_aree_indagate/Limiti.shp"
|
|
|
@ -0,0 +1 @@
|
|||
"UsiniTomestighes","UsiniTomestighes/GNA_TOMESTIGHES_2019/GNA_Topografia_Tomestighes/D_GNA_Posizionamento_limiti_aree_indagate/Limiti.shp"
|
|
File diff suppressed because one or more lines are too long
|
@ -23,6 +23,27 @@
|
|||
<url>${gitBaseUrl}/${project.artifactId}.git</url>
|
||||
</scm>
|
||||
|
||||
|
||||
|
||||
<dependencyManagement>
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>org.gcube.distribution</groupId>
|
||||
<artifactId>gcube-bom</artifactId>
|
||||
<version>2.0.1</version>
|
||||
<type>pom</type>
|
||||
<scope>import</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.gcube.distribution</groupId>
|
||||
<artifactId>gcube-smartgears-bom</artifactId>
|
||||
<version>2.1.0</version>
|
||||
<type>pom</type>
|
||||
<scope>import</scope>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
</dependencyManagement>
|
||||
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>org.gcube.application</groupId>
|
||||
|
|
|
@ -29,8 +29,8 @@ public class ClearConcessioni {
|
|||
|
||||
|
||||
Iterator<Concessione> it=null;
|
||||
it=manager.getList();
|
||||
// it=manager.search("{\"centroidLat\" : 0}");
|
||||
// it=manager.getList();
|
||||
it=manager.search("{\"nome\" : {\"$regex\" : \"Mock.*\"}}");
|
||||
|
||||
it.forEachRemaining((Concessione c)->{
|
||||
try{
|
|
@ -18,7 +18,7 @@ public class EditFileSet {
|
|||
|
||||
public static void main(String[] args) throws Exception {
|
||||
// params
|
||||
String context="/gcube/devsec/devVRE";
|
||||
String context= "/org/gcube/devsec/devVRE";
|
||||
String publishOption="true";
|
||||
|
||||
String toUpdateId="6131f42502ad3d2580412da7";
|
|
@ -35,7 +35,7 @@ public class Export {
|
|||
|
||||
File dir= new File("/Users/fabioisti/git/geoportal-client/import1628178107083");
|
||||
// String targetContext="/pred4s/preprod/preVRE";
|
||||
String targetContext="/gcube/devsec/devVRE";
|
||||
String targetContext= "/org/gcube/devsec/devVRE";
|
||||
|
||||
|
||||
|
|
@ -6,6 +6,7 @@ import org.gcube.application.geoportal.client.legacy.ConcessioniManagerI;
|
|||
import org.gcube.application.geoportal.common.model.legacy.Concessione;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Iterator;
|
||||
import java.util.concurrent.atomic.AtomicLong;
|
||||
|
||||
import static org.gcube.application.geoportal.client.GeoportalAbstractPlugin.statefulMongoConcessioni;
|
||||
|
@ -24,7 +25,11 @@ public class RepublishAll {
|
|||
AtomicLong count=new AtomicLong(0);
|
||||
AtomicLong nullCount=new AtomicLong(0);
|
||||
AtomicLong errCount=new AtomicLong(0);
|
||||
manager.getList().forEachRemaining((Concessione c)->{
|
||||
Iterator<Concessione> it=null;
|
||||
// it=manager.getList();
|
||||
it=manager.search("{\"report.status\" : {$eq : \"WARNING\"}}");
|
||||
|
||||
it.forEachRemaining((Concessione c)->{
|
||||
try{
|
||||
String currentId=c.getMongo_id();
|
||||
if(currentId==null) {
|
|
@ -1,7 +1,6 @@
|
|||
package org.gcube.application.cms.usecases;
|
||||
|
||||
import org.gcube.application.cms.tests.TokenSetter;
|
||||
import org.gcube.application.geoportal.client.legacy.ConcessioniManagerI;
|
||||
import org.gcube.application.geoportal.common.model.legacy.Concessione;
|
||||
import org.gcube.application.geoportal.common.rest.MongoConcessioni;
|
||||
|
||||
|
@ -13,7 +12,7 @@ public class RepublishSingle {
|
|||
public static void main(String[] args) throws Exception {
|
||||
TokenSetter.set("/gcube/devsec/devVRE");
|
||||
|
||||
String id="6138c3a002ad3d1f0cd659f4";
|
||||
String id="6137497102ad3d1f0cd6586b";
|
||||
|
||||
MongoConcessioni manager=mongoConcessioni().build();
|
||||
|
|
@ -4,11 +4,13 @@ import com.opencsv.CSVReader;
|
|||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.gcube.application.cms.tests.model.TestModel;
|
||||
import org.gcube.application.cms.tests.TokenSetter;
|
||||
import org.gcube.application.geoportal.client.utils.Serialization;
|
||||
import org.gcube.application.geoportal.common.utils.FileSets;
|
||||
|
||||
import org.gcube.application.geoportal.common.model.legacy.Concessione;
|
||||
import org.gcube.application.geoportal.common.model.legacy.report.ValidationReport;
|
||||
import org.gcube.application.geoportal.common.rest.MongoConcessioni;
|
||||
import org.gcube.application.geoportal.common.utils.Files;
|
||||
import org.gcube.application.geoportal.common.utils.StorageUtils;
|
||||
|
||||
import java.io.*;
|
||||
|
@ -22,28 +24,29 @@ public class MockFromFolder {
|
|||
|
||||
public static void main(String[] args) throws Exception {
|
||||
//PARAMS
|
||||
String context="/gcube/devsec/devVRE";
|
||||
String context= "/gcube/devsec/devVRE";
|
||||
|
||||
File descriptorsBaseFolder=new File(TestModel.getBaseFolder(),"packages");
|
||||
//Concessioni 04-03
|
||||
//String packageBaseDir="/Users/fabioisti/Documents/Concessioni 04-03/";
|
||||
//String csvDescriptor="src/test/resources/concessioni/concessioni04-03.csv";
|
||||
String packageBaseDir="/Users/fabioisti/Documents/Concessioni 04-03/";
|
||||
String csvDescriptor="concessioni04-03_filtered.csv";
|
||||
|
||||
//DATASET_GNA_01
|
||||
// String packageBaseDir="/Users/fabioisti/Documents/DATASET_GNA_01";
|
||||
// String csvDescriptor="src/test/resources/concessioni/DATASET_GNA_01.csv";
|
||||
// String csvDescriptor="DATASET_GNA_01.csv";
|
||||
|
||||
//DATASET_GNA_02
|
||||
//String packageBaseDir="/Users/fabioisti/Documents/DATASET_GNA_02";
|
||||
//String csvDescriptor="src/test/resources/concessioni/DATASET_GNA_02.csv";
|
||||
//String csvDescriptor="DATASET_GNA_02.csv";
|
||||
|
||||
|
||||
// invio_08_02
|
||||
// String packageBaseDir="/Users/fabioisti/Documents/invio_08_05";
|
||||
// String csvDescriptor="src/test/resources/concessioni/invio_08_05.csv";
|
||||
// String csvDescriptor="invio_08_05.csv";
|
||||
|
||||
// concessioni 23_04
|
||||
String packageBaseDir="/Users/fabioisti/Documents/Concessioni_23_04";
|
||||
String csvDescriptor="src/test/resources/concessioni/concessioni_23_04.csv";
|
||||
// String packageBaseDir="/Users/fabioisti/Documents/Concessioni_23_04";
|
||||
// String csvDescriptor="concessioni_23_04.csv";
|
||||
|
||||
|
||||
|
||||
|
@ -62,7 +65,7 @@ public class MockFromFolder {
|
|||
File baseDir=new File(packageBaseDir);
|
||||
ArrayList<Concessione> pushed=new ArrayList<>();
|
||||
|
||||
CSVReader reader = new CSVReader(new FileReader(csvDescriptor));
|
||||
CSVReader reader = new CSVReader(new FileReader(new File(descriptorsBaseFolder,csvDescriptor)));
|
||||
String [] nextLine;
|
||||
//reads one line at a time
|
||||
while ((nextLine = reader.readNext()) != null)
|
||||
|
@ -71,26 +74,30 @@ public class MockFromFolder {
|
|||
//Create new
|
||||
String projectName = nextLine[0];
|
||||
String positionPath = nextLine[1];
|
||||
String piantePath = nextLine[2];
|
||||
|
||||
|
||||
try {
|
||||
//NB raggruppa per file
|
||||
Map.Entry<String,List<File>> posSets = clusterizeFiles(positionPath, baseDir).entrySet().stream().findFirst().get();
|
||||
Map<String, List<File>> pianteSets = clusterizeFiles(piantePath, baseDir);
|
||||
Map.Entry<String,List<File>> posSets = Files.getAllShapeSet(new File(baseDir,positionPath),true).
|
||||
entrySet().stream().findFirst().get();
|
||||
Map<String, List<File>> pianteSets = Files.getAllShapeSet(new File(baseDir,projectName),true);
|
||||
|
||||
// Sometimes they are the same
|
||||
if(positionPath.equals(piantePath))
|
||||
if(pianteSets.size()>1)
|
||||
pianteSets.remove(posSets.getKey());
|
||||
|
||||
|
||||
|
||||
log.debug("Entry {} pos Size {} piante {} ",projectName,posSets.getValue().size(),pianteSets.size());
|
||||
|
||||
Concessione c = createMock(projectName, pianteSets, posSets.getValue(), client, storage);
|
||||
|
||||
Concessione c = createMock(projectName,baseDir.getName(), pianteSets, posSets.getValue(), client, storage);
|
||||
|
||||
publishedCount++;
|
||||
if (c.getReport().getStatus().equals(ValidationReport.ValidationStatus.PASSED))
|
||||
successcount++;
|
||||
pushed.add(c);
|
||||
|
||||
|
||||
}catch(Throwable t){
|
||||
System.err.println("Problematic entry "+projectName);
|
||||
t.printStackTrace(System.err);
|
||||
|
@ -111,46 +118,40 @@ public class MockFromFolder {
|
|||
|
||||
|
||||
|
||||
private static Map<String,List<File>> clusterizeFiles(String basePath,File packageFolder) throws IOException {
|
||||
log.debug("Clusterizing "+basePath);
|
||||
|
||||
HashMap<String,List<File>> toReturn = new HashMap<>();
|
||||
File baseDir=new File(packageFolder,basePath);
|
||||
for(File shp:baseDir.listFiles((dir,name)->{return name.endsWith(".shp");})) {
|
||||
String basename=shp.getName().substring(0,shp.getName().lastIndexOf("."));
|
||||
List<File> fileset=new ArrayList<>();
|
||||
for (File shpSet : baseDir.listFiles((dir, name) -> {return name.startsWith(basename);}))
|
||||
fileset.add(shpSet);
|
||||
log.debug("SHP {} Set size {} ",basename,fileset.size());
|
||||
toReturn.put(basename,fileset);
|
||||
}
|
||||
return toReturn;
|
||||
}
|
||||
|
||||
private static Concessione createMock(String baseName,Map<String,List<File>> piante, List<File> pos,
|
||||
private static Concessione createMock(String baseName,String packageName,Map<String,List<File>> piante, List<File> pos,
|
||||
MongoConcessioni client, StorageUtils storage) throws Exception {
|
||||
|
||||
Concessione c= TestModel.prepareConcessione(piante.size(), 2);
|
||||
c.setNome("Mock for "+baseName);
|
||||
c.setNome("Mock for "+baseName+" ("+packageName+")");
|
||||
c= client.createNew(c);
|
||||
String mongoId=c.getMongo_id();
|
||||
|
||||
// TEST DATA, DO NOT CARE
|
||||
client.registerFileSet(mongoId, FileSets.prepareRequest(storage,
|
||||
Concessione.Paths.RELAZIONE,new File ("src/test/resources/concessioni/relazione.pdf")));
|
||||
Concessione.Paths.RELAZIONE,new File (TestModel.getBaseFolder(),"relazione.pdf")));
|
||||
|
||||
client.registerFileSet(mongoId, FileSets.prepareRequest(storage,
|
||||
Concessione.Paths.imgByIndex(0),new File("src/test/resources/concessioni/immagine.png")));
|
||||
Concessione.Paths.imgByIndex(0),new File(TestModel.getBaseFolder(),"immagine.png")));
|
||||
|
||||
// POSIZIONAMENTO
|
||||
|
||||
client.registerFileSet(mongoId, FileSets.prepareRequest(storage,
|
||||
Concessione.Paths.POSIZIONAMENTO,pos.toArray(new File[pos.size()])));
|
||||
|
||||
// PIANTE
|
||||
Map.Entry<String,List<File>>[] entries= piante.entrySet().toArray(new Map.Entry[0]);
|
||||
for( int i= 0; i< piante.size();i++)
|
||||
client.registerFileSet(mongoId, FileSets.prepareRequest(storage,
|
||||
Concessione.Paths.piantaByIndex(i),entries[i].getValue().toArray(new File[0])));
|
||||
for( int i= 0; i< piante.size();i++) {
|
||||
// Set layer name
|
||||
c=client.getById(mongoId);
|
||||
String path=Concessione.Paths.piantaByIndex(i);
|
||||
c.getContentByPath(path).setTitolo(" Pianta from "+entries[i].getKey());
|
||||
client.update(mongoId, Serialization.write(c));
|
||||
|
||||
//Set fileset
|
||||
client.registerFileSet(mongoId, FileSets.prepareRequest(storage,path, entries[i].getValue().toArray(new File[0])));
|
||||
}
|
||||
|
||||
c=client.publish(mongoId);
|
||||
|
Loading…
Reference in New Issue