Switched form HTTP request to the old HomeLibrary Webapp to storage hub

client
This commit is contained in:
Luca Frosini 2020-03-16 15:10:43 +01:00
parent 3434b1e208
commit 5f7455da9a
6 changed files with 43 additions and 38 deletions

View File

@ -167,7 +167,6 @@ public class Elaborator {
FileSystemDirectoryStructure fileSystemDirectoryStructure = new FileSystemDirectoryStructure();
File elaborationDirectory = fileSystemDirectoryStructure.getTargetFolder(aggregationType, aggregationStartDate);
Bucket srcBucket = CouchBaseConnector.getInstance().getBucket(recordType, aggregationInfo.getAggregationType(), SUFFIX.src);
Bucket dstBucket = CouchBaseConnector.getInstance().getBucket(recordType, aggregationInfo.getAggregationType(), SUFFIX.dst);
@ -179,7 +178,6 @@ public class Elaborator {
aggregator.aggregate();
Calendar now = Utility.getUTCCalendarInstance();
/*
* now is passed as argument to isTimeElapsed function to avoid situation

View File

@ -1,7 +1,6 @@
package org.gcube.accounting.aggregator.persist;
import java.io.File;
import java.io.StringWriter;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
@ -10,6 +9,7 @@ import java.util.Map;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import org.gcube.accounting.aggregator.elaboration.Elaborator;
import org.gcube.accounting.aggregator.persistence.CouchBaseConnector;
import org.gcube.accounting.aggregator.status.AggregationState;
import org.gcube.accounting.aggregator.status.AggregationStatus;
@ -26,18 +26,7 @@ import com.couchbase.client.java.document.json.JsonObject;
*/
public class InsertDocument extends DocumentElaboration {
public static final String CSV_FILENAME;
static {
StringWriter stringWriter = new StringWriter();
stringWriter.append(ServiceUsageRecord.SERVICE_CLASS);
stringWriter.append("_");
stringWriter.append(ServiceUsageRecord.SERVICE_NAME);
stringWriter.append("_");
stringWriter.append(ServiceUsageRecord.CALLED_METHOD);
stringWriter.append(".csv");
CSV_FILENAME = stringWriter.toString();
}
public static final String CSV_FILENAME_SUFFIX = "." + ServiceUsageRecord.CALLED_METHOD + "s.csv";
/**
* This is used to save a CSV file which allow to made a post analysis of calledMethods
@ -52,6 +41,7 @@ public class InsertDocument extends DocumentElaboration {
protected List<String> unparsableLines;
protected boolean serviceUsageRecordElaboration;
protected File csvFile;
public InsertDocument(AggregationStatus aggregationStatus, File file, Bucket bucket){
@ -63,7 +53,7 @@ public class InsertDocument extends DocumentElaboration {
protected String getKey(JsonObject jsonObject) {
String serviceClass = jsonObject.getString(ServiceUsageRecord.SERVICE_CLASS);
String serviceName = jsonObject.getString(ServiceUsageRecord.SERVICE_CLASS);
String serviceName = jsonObject.getString(ServiceUsageRecord.SERVICE_NAME);
return serviceClass + "," + serviceName;
}
@ -102,7 +92,7 @@ public class InsertDocument extends DocumentElaboration {
protected void afterElaboration() {
if(serviceUsageRecordElaboration) {
File destinationFolder = file.getParentFile();
File csvFile = new File(destinationFolder, CSV_FILENAME);
csvFile = new File(destinationFolder, file.getName().replace(Elaborator.AGGREGATED_SUFFIX, CSV_FILENAME_SUFFIX));
for(String key : serviceClassName_calledMethods.keySet()) {
Set<String> setOfCalledMethods = serviceClassName_calledMethods.get(key);
for(String calledMethod : setOfCalledMethods) {
@ -137,5 +127,9 @@ public class InsertDocument extends DocumentElaboration {
public void setServiceUsageRecordElaboration(boolean serviceUsageRecordElaboration) {
this.serviceUsageRecordElaboration = serviceUsageRecordElaboration;
}
public File getCSVFile() {
return csvFile;
}
}

View File

@ -1,7 +1,9 @@
package org.gcube.accounting.aggregator.persist;
import java.io.File;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.List;
import org.gcube.accounting.aggregator.directory.WorkSpaceDirectoryStructure;
import org.gcube.accounting.aggregator.elaboration.Elaborator;
@ -60,7 +62,6 @@ public class Persist {
}
public void recover() throws Exception{
if(aggregationStatus.getAggregatedRecordsNumber()==aggregationStatus.getOriginalRecordsNumber()){
if(originalRecordBucket.name().compareTo(aggregatedRecordBucket.name())==0 || aggregationStatus.getAggregatedRecordsNumber()==0){
Calendar now = Utility.getUTCCalendarInstance();
@ -77,7 +78,6 @@ public class Persist {
}
}
if(AggregationState.canContinue(aggregationStatus.getAggregationState(),AggregationState.AGGREGATED)){
// For Each original row stored on file it remove them from Bucket.
// At the end of elaboration set AgrgegationStatus to DELETED
@ -85,11 +85,12 @@ public class Persist {
DeleteDocument deleteDocument = new DeleteDocument(aggregationStatus, originalRecordsbackupFile, originalRecordBucket);
deleteDocument.elaborate();
}
InsertDocument insertDocument = null;
boolean serviceUsageRecordElaboration = false;
if(AggregationState.canContinue(aggregationStatus.getAggregationState(),AggregationState.DELETED)){
// For Each aggregated row stored on file it add them to Bucket. At the end of elaboration set AggregationStatus to ADDED
InsertDocument insertDocument = new InsertDocument(aggregationStatus, aggregateRecordsBackupFile, aggregatedRecordBucket);
boolean serviceUsageRecordElaboration = recordType.compareTo(ServiceUsageRecord.class.getSimpleName())==0 ? true : false;
insertDocument = new InsertDocument(aggregationStatus, aggregateRecordsBackupFile, aggregatedRecordBucket);
serviceUsageRecordElaboration = recordType.compareTo(ServiceUsageRecord.class.getSimpleName())==0 ? true : false;
insertDocument.setServiceUsageRecordElaboration(serviceUsageRecordElaboration);
insertDocument.elaborate();
}
@ -99,15 +100,25 @@ public class Persist {
WorkSpaceDirectoryStructure workspaceDirectoryStructure = new WorkSpaceDirectoryStructure();
FolderContainer targetFolder = workspaceDirectoryStructure.getTargetFolder(aggregationStatus.getAggregationInfo().getAggregationType(), aggregationStatus.getAggregationInfo().getAggregationStartDate());
List<File> files = new ArrayList<>();
files.add(originalRecordsbackupFile);
files.add(aggregateRecordsBackupFile);
String zipFilename = originalRecordsbackupFile.getName().replace(Elaborator.ORIGINAL_SUFFIX, "");
File malformedRecords = Utility.getMalformatedFile(aggregateRecordsBackupFile);
if(malformedRecords.exists()){
WorkSpaceManagement.getInstance().zipAndBackupFiles(targetFolder,
originalRecordsbackupFile.getName().replace(Elaborator.ORIGINAL_SUFFIX, "-with-malformed"), originalRecordsbackupFile, aggregateRecordsBackupFile, malformedRecords);
}else{
WorkSpaceManagement.getInstance().zipAndBackupFiles(targetFolder,
originalRecordsbackupFile.getName().replace(Elaborator.ORIGINAL_SUFFIX, ""), originalRecordsbackupFile, aggregateRecordsBackupFile);
files.add(malformedRecords);
zipFilename = originalRecordsbackupFile.getName().replace(Elaborator.ORIGINAL_SUFFIX, "-with-malformed");
}
if(serviceUsageRecordElaboration) {
files.add(insertDocument.getCSVFile());
}
WorkSpaceManagement.getInstance().zipAndBackupFiles(targetFolder, zipFilename, files);
setAggregationStateToCompleted(now);
}

View File

@ -9,12 +9,6 @@ import java.util.zip.ZipEntry;
import java.util.zip.ZipOutputStream;
import org.gcube.accounting.aggregator.utility.Utility;
import org.gcube.common.homelibrary.home.Home;
import org.gcube.common.homelibrary.home.HomeLibrary;
import org.gcube.common.homelibrary.home.HomeManager;
import org.gcube.common.homelibrary.home.HomeManagerFactory;
import org.gcube.common.homelibrary.home.User;
import org.gcube.common.homelibrary.home.workspace.Workspace;
import org.gcube.common.storagehub.client.dsl.FileContainer;
import org.gcube.common.storagehub.client.dsl.FolderContainer;
import org.gcube.common.storagehub.client.dsl.ItemContainer;
@ -66,13 +60,13 @@ public class WorkSpaceManagement {
return zipFileName;
}
public boolean zipAndBackupFiles(FolderContainer targetFolder, String name, File... files) throws Exception {
public boolean zipAndBackupFiles(FolderContainer targetFolder, String name, List<File> files) throws Exception {
try {
String zipFileName = getZipFileName(name);
File zipFile = new File(files[0].getParentFile(), zipFileName);
zipFile.delete();
File zipFile = new File(files.get(0).getParentFile(), zipFileName);
zipFile.delete(); // Removing old zip file if any
logger.trace("Going to save {} into workspace", zipFile.getAbsolutePath());
FileOutputStream fos = new FileOutputStream(zipFile);
@ -104,6 +98,9 @@ public class WorkSpaceManagement {
return storageHubClient.getWSRoot();
} catch(Exception e) {
String username = Utility.getUsername();
logger.info("Unable to create the Workspace Root for {}.", username);
throw e;
/*
logger.info("Unable to obtain the Workspace Root for {}. Going to create it.", username);
try {
HomeManagerFactory factory = HomeLibrary.getHomeManagerFactory();
@ -118,6 +115,7 @@ public class WorkSpaceManagement {
logger.info("Unable to create the Workspace Root for {}.", username);
throw e;
}
*/
}
}

View File

@ -1,8 +1,10 @@
package org.gcube.accounting.aggregator.file;
import java.io.File;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;
import java.util.List;
import org.gcube.accounting.aggregator.aggregation.AggregationType;
import org.gcube.accounting.aggregator.directory.WorkSpaceDirectoryStructure;
@ -29,7 +31,9 @@ public class WorkSpaceDirectoryStructureTest extends ContextTest {
FolderContainer targetFolder = workSpaceDirectoryStructure.getTargetFolder(aggregationType, date);
File file = new File(Constant.ROOT_DIRECTORY, "aux.txt");
List<File> files = new ArrayList<>();
files.add(file);
WorkSpaceManagement.getInstance().zipAndBackupFiles(targetFolder, "Test", file);
WorkSpaceManagement.getInstance().zipAndBackupFiles(targetFolder, "Test", files);
}
}

View File

@ -10,7 +10,7 @@
<logger name="org.gcube" level="INFO" />
<!-- logger name="org.gcube.accounting.aggregator" level="TRACE" / -->
<logger name="org.gcube.accounting.aggregator" level="TRACE" />
<root level="WARN">
<appender-ref ref="STDOUT" />