Bulk reupload concessione

This commit is contained in:
Fabio Sinibaldi 2022-05-13 18:42:11 +02:00
parent 2bd5336eee
commit e0daa09d6f
3 changed files with 304 additions and 195 deletions

View File

@ -19,9 +19,9 @@ import static org.gcube.application.geoportal.client.plugins.GeoportalAbstractPl
public class ClearConcessioni {
public static void main(String[] args) throws Exception {
// String context="/gcube/devsec/devVRE";
String context="/gcube/devsec/devVRE";
String context="/pred4s/preprod/preVRE";
//String context="/pred4s/preprod/preVRE";
TokenSetter.set(context);
@ -38,8 +38,8 @@ public class ClearConcessioni {
AtomicLong found=new AtomicLong(0);
Iterator<Concessione> it=null;
// it=manager.getList();
it=manager.search("{\"nome\" : {$regex : \"Mock .*\"}, \"creationTime\" :{$gt : \"2021-10-18T13:58:53.326\"}}");
it=manager.getList();
// it=manager.search("{\"nome\" : {$regex : \"Mock .*\"}, \"creationTime\" :{$gt : \"2021-10-18T13:58:53.326\"}}");
ExecutorService service = Executors.newFixedThreadPool(3);

View File

@ -0,0 +1,90 @@
package org.gcube.application.cms.usecases;
import lombok.extern.slf4j.Slf4j;
import org.gcube.application.cms.tests.TokenSetter;
import org.gcube.application.geoportal.client.legacy.ConcessioniManagerI;
import org.gcube.application.geoportal.common.model.document.Project;
import org.gcube.application.geoportal.common.model.legacy.Concessione;
import org.gcube.application.geoportal.common.model.rest.QueryRequest;
import org.gcube.application.geoportal.common.rest.Projects;
import java.rmi.RemoteException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicLong;
import static org.gcube.application.geoportal.client.plugins.GeoportalAbstractPlugin.projects;
import static org.gcube.application.geoportal.client.plugins.GeoportalAbstractPlugin.statefulMongoConcessioni;
@Slf4j
public class ClearProjects {
public static void main(String[] args) throws RemoteException, InterruptedException {
String context="/gcube/devsec/devVRE";
//String context="/pred4s/preprod/preVRE";
TokenSetter.set(context);
Projects<Project> client=projects("profiledConcessioni").build();;
ArrayList<String> toSkipIds=new ArrayList<>();
// toSkipIds.add("6102c8dd02ad3d05b5f81df4");
// toSkipIds.add("610415af02ad3d05b5f81ee3");
AtomicLong count=new AtomicLong(0);
AtomicLong nullCount=new AtomicLong(0);
AtomicLong errCount=new AtomicLong(0);
AtomicLong found=new AtomicLong(0);
Iterator<Project> it=null;
it=client.query(new QueryRequest());
// it=manager.search("{\"nome\" : {$regex : \"Mock .*\"}, \"creationTime\" :{$gt : \"2021-10-18T13:58:53.326\"}}");
ExecutorService service = Executors.newFixedThreadPool(3);
it.forEachRemaining((Project c)->{
found.incrementAndGet();
service.submit(new Runnable() {
@Override
public void run() {
try{
TokenSetter.set(context);
String currentId=c.getId();
if(currentId==null) {
System.out.println("ID IS NULL " + c);
nullCount.incrementAndGet();
}
else
if(toSkipIds.contains(currentId))
System.out.println("Skipping "+currentId);
else {
System.out.println("Deleting " + c.getId());
client.deleteById(c.getId(),true);
}
}catch(Throwable throwable){
System.err.println(throwable);
errCount.incrementAndGet();
try {Thread.sleep(1000);} catch (InterruptedException i) {}
}finally {
count.incrementAndGet();
}
}
});
});
while (!service.awaitTermination(1, TimeUnit.MINUTES)) {
log.info("Waiting .. completed {}, out of {} ",count.get(),found.get());
if(found.get()==count.get()) service.shutdown();
}
System.out.println("Done "+count.get()+" [null : "+nullCount.get()+", err : "+errCount.get()+"]");
}
}

View File

@ -4,7 +4,6 @@ import lombok.extern.slf4j.Slf4j;
import org.bson.Document;
import org.gcube.application.cms.custom.gna.concessioni.model.ProfiledConcessione;
import org.gcube.application.cms.tests.TokenSetter;
import org.gcube.application.geoportal.client.legacy.ConcessioniManagerI;
import org.gcube.application.geoportal.client.utils.Serialization;
import org.gcube.application.geoportal.common.model.document.Project;
import org.gcube.application.geoportal.common.model.legacy.*;
@ -14,20 +13,21 @@ import org.gcube.application.geoportal.common.rest.Projects;
import org.gcube.application.geoportal.common.utils.FileSets;
import org.gcube.application.geoportal.common.utils.Files;
import org.gcube.application.geoportal.common.utils.StorageUtils;
import org.gcube.common.storagehub.client.dsl.StorageHubClient;
import javax.print.Doc;
import java.io.File;
import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URL;
import java.nio.charset.Charset;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicLong;
import java.util.function.Consumer;
import static org.gcube.application.geoportal.client.plugins.GeoportalAbstractPlugin.projects;
import static org.gcube.application.geoportal.client.plugins.GeoportalAbstractPlugin.statefulMongoConcessioni;
@Slf4j
public class ExportConcessioniAsProjects {
@ -37,7 +37,7 @@ public class ExportConcessioniAsProjects {
public static void main(String[] args) {
public static void main(String[] args) throws InterruptedException {
// read from imported folder
File dir= new File("/Users/fabioisti/git/gcube-cms-suite/import1652276569045");
String targetContext="/gcube/devsec/devVRE";
@ -68,12 +68,45 @@ public class ExportConcessioniAsProjects {
AtomicLong warnCount = new AtomicLong(0);
AtomicLong errCount = new AtomicLong(0);
ExecutorService service = Executors.newFixedThreadPool(3);
long startProcess = System.currentTimeMillis();
found.forEach(c -> {
service.submit(new Runnable(){
@Override
public void run() {
publish(c,client,storage,errCount,count);
}
} );
});
while (!service.awaitTermination(1, TimeUnit.MINUTES)) {
log.info("Waiting .. completed {}, out of {} ",count.get(),found.size());
if(found.size()==count.get()) service.shutdown();
}
System.out.println("Completed "+count.get()+" [elapsedTime = "+(System.currentTimeMillis()-startProcess)/1000+" sec] [warn : "+warnCount.get()+", err : "+errCount.get()+"]");
}
public static List<TempFile> upload(List<PersistedContent> contents,StorageUtils storage) throws IOException {
ArrayList toReturn = new ArrayList();
for (PersistedContent content : contents)
if (content instanceof WorkspaceContent) {
WorkspaceContent wc = (WorkspaceContent) content;
toReturn.add(storage.putOntoStorage(new URL(wc.getLink()).openStream(), wc.getName()));
}
return toReturn;
}
private static void publish(Concessione c, Projects<Project> client, StorageUtils storage, AtomicLong errCount,AtomicLong count){
try {
long startTime = System.currentTimeMillis();
FileSets.RequestBuilder req=null;
for (Concessione c : found) {
try {
log.info("Using {} {}",c.getNome(),c.getMongo_id());
//Copy core fields
@ -237,8 +270,8 @@ public class ExportConcessioniAsProjects {
project = client.updateDocument(project.getId(),updated);
// FileSets
for (int i = 0; i < c.getImmaginiRappresentative().size(); i++) {
UploadedImage img=c.getImmaginiRappresentative().get(i);
for (int i = 0; i < c.getPianteFineScavo().size(); i++) {
LayerConcessione img=c.getPianteFineScavo().get(i);
List<PersistedContent> content=img.getActualContent();
if(content!=null && !content.isEmpty()) {
req = FileSets.build("$." + ProfiledConcessione.PIANTE_FINE_SCAVO+"["+i+"]", "fileset",
@ -250,6 +283,7 @@ public class ExportConcessioniAsProjects {
}
}
System.out.println("Done "+c.getId()+" in "+(System.currentTimeMillis()-startTime)/1000+" sec");
} catch (Throwable throwable) {
System.err.println(throwable);
errCount.incrementAndGet();
@ -258,19 +292,4 @@ public class ExportConcessioniAsProjects {
}
}
System.out.println("Done "+count.get()+" [warn : "+warnCount.get()+", err : "+errCount.get()+"]");
}
public static List<TempFile> upload(List<PersistedContent> contents,StorageUtils storage) throws IOException {
ArrayList toReturn = new ArrayList();
for (PersistedContent content : contents)
if (content instanceof WorkspaceContent) {
WorkspaceContent wc = (WorkspaceContent) content;
toReturn.add(storage.putOntoStorage(new URL(wc.getLink()).openStream(), wc.getName()));
}
return toReturn;
}
}