merge from private folder on SVN

git-svn-id: https://svn.d4science.research-infrastructures.eu/gcube/trunk/content-management/storage-manager-trigger@93065 82a268e6-3cf1-43bd-a215-b396298e98cf
21214
roberto.cirillo 10 years ago
parent b3cd12bbe2
commit 6001d5a0b4

@ -0,0 +1,10 @@
<?xml version="1.0" encoding="UTF-8"?>
<classpath>
<classpathentry kind="src" output="target/classes" path="src/main/java"/>
<classpathentry excluding="**" kind="src" output="target/classes" path="src/main/resources"/>
<classpathentry kind="src" output="target/test-classes" path="src/test/java"/>
<classpathentry excluding="**" kind="src" output="target/test-classes" path="src/test/resources"/>
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.7"/>
<classpathentry kind="con" path="org.eclipse.m2e.MAVEN2_CLASSPATH_CONTAINER"/>
<classpathentry kind="output" path="target/classes"/>
</classpath>

@ -0,0 +1,23 @@
<?xml version="1.0" encoding="UTF-8"?>
<projectDescription>
<name>storage-manager-trigger</name>
<comment></comment>
<projects>
</projects>
<buildSpec>
<buildCommand>
<name>org.eclipse.jdt.core.javabuilder</name>
<arguments>
</arguments>
</buildCommand>
<buildCommand>
<name>org.eclipse.m2e.core.maven2Builder</name>
<arguments>
</arguments>
</buildCommand>
</buildSpec>
<natures>
<nature>org.eclipse.jdt.core.javanature</nature>
<nature>org.eclipse.m2e.core.maven2Nature</nature>
</natures>
</projectDescription>

@ -0,0 +1,5 @@
eclipse.preferences.version=1
encoding//src/main/java=UTF-8
encoding//src/main/resources=UTF-8
encoding//src/test/java=UTF-8
encoding//src/test/resources=UTF-8

@ -0,0 +1,5 @@
eclipse.preferences.version=1
org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.7
org.eclipse.jdt.core.compiler.compliance=1.7
org.eclipse.jdt.core.compiler.problem.forbiddenReference=warning
org.eclipse.jdt.core.compiler.source=1.7

@ -0,0 +1,4 @@
activeProfiles=
eclipse.preferences.version=1
resolveWorkspaceProjects=true
version=1

2392
log.txt

File diff suppressed because it is too large Load Diff

@ -0,0 +1,121 @@
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<artifactId>maven-parent</artifactId>
<groupId>org.gcube.tools</groupId>
<version>1.0.0</version>
<relativePath />
</parent>
<groupId>org.gcube.contentmanagement</groupId>
<artifactId>storage-manager-trigger</artifactId>
<version>0.0.1-SNAPSHOT</version>
<scm>
<connection>scm:svn:http://svn.d4science.research-infrastructures.eu/gcube/trunk/content-management/${project.artifactId}</connection>
<developerConnection>scm:svn:https://svn.d4science.research-infrastructures.eu/gcube/trunk/content-management/${project.artifactId}</developerConnection>
<url>http://svn.d4science.research-infrastructures.eu/gcube/trunk/content-management/${project.artifactId}</url>
</scm>
<properties>
<distroDirectory>${project.basedir}/distro</distroDirectory>
</properties>
<dependencies>
<dependency>
<groupId>org.mongodb</groupId>
<artifactId>mongo-java-driver</artifactId>
<version>[2.6.2,)</version>
</dependency>
<!-- <dependency> -->
<!-- <groupId>org.slf4j</groupId> -->
<!-- <artifactId>slf4j-nop</artifactId> -->
<!-- <version>1.6.4</version> -->
<!-- </dependency> -->
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
<version>1.6.4</version>
</dependency>
<!-- <dependency> -->
<!-- <groupId>org.gcube.resources</groupId> -->
<!-- <artifactId>common-gcore-resources</artifactId> -->
<!-- <version>[1.0.0-SNAPSHOT,2.0.0-SNAPSHOT)</version> -->
<!-- </dependency> -->
<!-- <dependency> -->
<!-- <groupId>org.gcube.core</groupId> -->
<!-- <artifactId>common-gcore-stubs</artifactId> -->
<!-- <version>[1.0.0-SNAPSHOT,2.0.0-SNAPSHOT)</version> -->
<!-- </dependency> -->
<!-- <dependency> -->
<!-- <groupId>org.gcube.resources.discovery</groupId> -->
<!-- <artifactId>ic-client</artifactId> -->
<!-- <version>[1.0.0-SNAPSHOT, 2.0.0-SNAPSHOT)</version> -->
<!-- </dependency> -->
<!-- <dependency> -->
<!-- <groupId>org.gcube.core</groupId> -->
<!-- <artifactId>common-encryption</artifactId> -->
<!-- <version>[1.0.0-SNAPSHOT, 2.0.0-SNAPSHOT)</version> -->
<!-- </dependency> -->
<!-- <dependency> -->
<!-- <groupId>org.gcube.core</groupId> -->
<!-- <artifactId>common-scope-maps</artifactId> -->
<!-- <version>[1.0.0-SNAPSHOT, 2.0.0-SNAPSHOT)</version> -->
<!-- </dependency> -->
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.11</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.gcube.accounting</groupId>
<artifactId>common-accounting-lib</artifactId>
<version>[1.0.0-SNAPSHOT,2.0.0-SNAPSHOT)</version>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-resources-plugin</artifactId>
<version>2.5</version>
<executions>
<execution>
<id>copy-profile</id>
<phase>install</phase>
<goals>
<goal>copy-resources</goal>
</goals>
<configuration>
<outputDirectory>target</outputDirectory>
<resources>
<resource>
<directory>${distroDirectory}</directory>
<filtering>true</filtering>
<includes>
<include>profile.xml</include>
</includes>
</resource>
</resources>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-assembly-plugin</artifactId>
<configuration>
<descriptors>
<descriptor>${distroDirectory}/descriptor.xml</descriptor>
</descriptors>
</configuration>
<executions>
<execution>
<id>servicearchive</id>
<phase>install</phase>
<goals>
<goal>single</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins>
</build>
</project>

@ -0,0 +1,15 @@
log4j.rootLogger=INFO, A1, stdout
log4j.appender.A1=org.apache.log4j.RollingFileAppender
log4j.appender.A1.File=log.txt
log4j.appender.A1.layout=org.apache.log4j.PatternLayout
log4j.appender.A1.layout.ConversionPattern=%-4r [%t] %-5p %c %x - %m%n
# ***** Max file size is set to 100KB
log4j.appender.A1.MaxFileSize=100MB
# ***** Keep one backup file
log4j.appender.A1.MaxBackupIndex=1
#CONSOLE
log4j.appender.stdout=org.apache.log4j.ConsoleAppender
log4j.appender.stdout.Threshold=INFO
log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
log4j.appender.stdout.layout.ConversionPattern=[%t] %-5p %c %d{dd MMM yyyy ;HH:mm:ss.SSS} - %m%n

@ -0,0 +1,31 @@
package org.gcube.contentmanager.storageserver.accounting;
public interface Report {
/**
* Set generic properties of report
* @param resourceType
* @param consumerId
* @param resourceOwner
* @param resourceScope
* @return
*/
public void init(String consumerId, String resourceScope);
/**
* set start time of the operation
* @return
*/
public void timeUpdate();
/**
* Set end time of operation and other specific properties
* @return
*/
public void ultimate(String owner, String uri, String operation, String size, String filePath, String id);
/**
* send report
* @return
*/
public void send();
}

@ -0,0 +1,109 @@
package org.gcube.contentmanager.storageserver.accounting;
import java.io.IOException;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.util.Calendar;
import java.util.Date;
import java.util.GregorianCalendar;
import org.gcube.accounting.datamodel.RawUsageRecord;
import org.gcube.accounting.exception.InvalidValueException;
import org.gcube.accounting.messaging.ResourceAccounting;
import org.gcube.accounting.messaging.ResourceAccountingFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class ReportAccountingImpl implements Report {
final Logger logger = LoggerFactory.getLogger(ReportAccountingImpl.class);
public RawUsageRecord ur;
public ResourceAccounting raFactory;
@Override
public void init(String consumerId, String resourceScope) {
logger.info("set accounting properties: consumerId "+consumerId+" scope: "+resourceScope);
raFactory = null;
try {
raFactory = ResourceAccountingFactory.getResourceAccountingInstance();
}
catch (IOException e) {
e.printStackTrace();
}
catch (Exception e) {
e.printStackTrace();
}
this.ur = new RawUsageRecord();
//generic properties
ur.setResourceType("storage-usage");
if(consumerId!=null) ur.setConsumerId(consumerId);
// ur.setResourceOwner("paolo.fabriani");
if(resourceScope !=null) ur.setResourceScope(resourceScope);
//set creation time
Calendar createTime = new GregorianCalendar();
ur.setCreateTime(createTime.getTime());
}
@Override
public void timeUpdate() {
// setIpAddress();
setStartTime();
}
@Override
public void ultimate(String owner, String uri, String operation, String size, String filePath, String id) {
setEndTime();
logger.info("set accounting properties: owner "+owner+" uri: "+uri+" operation: "+operation+" size: "+size);
//specific properties TODO
if(owner != null) ur.setResourceOwner(owner);
if(uri != null) ur.setResourceSpecificProperty("objectURI", uri);
if (operation!=null) ur.setResourceSpecificProperty("operationType",operation);
ur.setResourceSpecificProperty("dataType","STORAGE");
if(size!= null) ur.setResourceSpecificProperty("dataVolume", size);
if(filePath != null) ur.setResourceSpecificProperty("remotePath", filePath);
if(id!= null) ur.setResourceSpecificProperty("id", id);
}
@Override
public void send() {
logger.info("report sending ");
raFactory.sendAccountingMessage(ur);
}
// private void setIpAddress() {
// String address=null;
// try {
// address=InetAddress.getLocalHost().toString();
// } catch (UnknownHostException e) {
//
// }
// logger.info("caller ip: "+address);
// if (address!=null) ur.setResourceSpecificProperty("callerIP", address);;
// }
private void setStartTime() {
//set start time
Calendar startTime = new GregorianCalendar();
Date time=startTime.getTime();
logger.info("set start time: "+time);
try {
ur.setStartTime(time);
}
catch (InvalidValueException e) {
e.printStackTrace();
}
}
private void setEndTime() {
// set end time
Calendar endTime = new GregorianCalendar();
Date time=endTime.getTime();
logger.info("set end time: "+time);
try {
ur.setEndTime(time);
}
catch (InvalidValueException e) {
e.printStackTrace();
}
}
}

@ -0,0 +1,8 @@
package org.gcube.contentmanager.storageserver.accounting;
public class ReportConfig {
/** Report type - used by : Report factory class */
public static final int ACCOUNTING_TYPE = 1;
}

@ -0,0 +1,24 @@
package org.gcube.contentmanager.storageserver.accounting;
public class ReportException extends Exception {
private Exception exc = null;
/** The no-arg constructor */
public ReportException() {
}
/**
* Construct a ReportException with an error message
* @param message the error message
*/
public ReportException(String message) {
super(message);
}
public ReportException (Exception e)
{
this.exc = e;
}
}

@ -0,0 +1,37 @@
package org.gcube.contentmanager.storageserver.accounting;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class ReportFactory {
final static Logger logger=LoggerFactory.getLogger(ReportFactory.class);
/**
* <p> Instantiate the class specified by user </p>
* @return the Dao class
* @throws DAOException
*/
public static Report getReport(int ReportType) throws ReportException {
Report report = null;
try {
switch(ReportType) {
case ReportConfig.ACCOUNTING_TYPE :
report = new ReportAccountingImpl();
break;
default :
throw new ReportException("MyDAOFactory.getDAO: ["+ReportType+"] is an UNKNOWN TYPE !");
}
logger.trace("ReportFactory.getDao : returning class ["+report.getClass().getName()+"]...");
} catch (Exception e) {
e.printStackTrace();
throw new ReportException("ReportFactory.getReport: Exception while getting Report type : \n" + e.getMessage());
}
logger.trace("MyReportFactory.getReport : returning class ["+report.getClass().getName()+"]...");
return report;
}
}

@ -0,0 +1,46 @@
package org.gcube.contentmanager.storageserver.data;
import java.util.Vector;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.mongodb.DBObject;
public class CubbyHole {
private Vector<DBObject> requestQueue = new Vector<DBObject>();
final static Logger logger=LoggerFactory.getLogger(CubbyHole.class);
private boolean available;
public synchronized DBObject get() {
while (requestQueue.size() == 0){
try {
wait();
}
catch (InterruptedException e){
logger.error("getRequest()", e);
}
}
DBObject value=requestQueue.remove(0);
logger.debug("get element from queue: "+value);
available = false;
notifyAll();
return value;
}
public synchronized void put(DBObject value) {
// while (available == true) {
// try {
// wait();
// } catch (InterruptedException e) {
// }
// }
logger.debug("put element to queue: "+value);
requestQueue.addElement(value);
available = true;
notifyAll();
}
}

@ -0,0 +1,102 @@
package org.gcube.contentmanager.storageserver.data;
import java.net.UnknownHostException;
import java.util.Arrays;
import java.util.List;
import javax.management.RuntimeErrorException;
import com.mongodb.BasicDBObject;
import com.mongodb.Bytes;
import com.mongodb.DB;
import com.mongodb.DBCollection;
import com.mongodb.DBCursor;
import com.mongodb.DBObject;
import com.mongodb.MongoClient;
import com.mongodb.ServerAddress;
import org.bson.types.BSONTimestamp;
import org.gcube.contentmanager.storageserver.parse.JsonParser;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class ReadingMongoOplog extends Thread{
final static Logger logger=LoggerFactory.getLogger(ReadingMongoOplog.class);
private ServerAddress[] server;
private MongoClient mongoClient;
private DB local;
private DBCollection oplog;
private CubbyHole c;
private int number;
public ReadingMongoOplog(List<String> srvs, CubbyHole c, int number){
this.c=c;
this.number=number;
try {
if(srvs.size() > 0){
server=new ServerAddress[srvs.size()];
int i=0;
for(String s : srvs){
server[i]=new ServerAddress(s);
i++;
}
}else{
logger.error("MongoDB server not Setted. Please set one or more servers");
throw new RuntimeException("MongoDB server not Setted. Please set one or more servers");
}
} catch (UnknownHostException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
init();
}
private void init() {
mongoClient = new MongoClient(Arrays.asList(server));//"146.48.123.71"
local = mongoClient.getDB("local");
oplog = local.getCollection("oplog.rs");
}
public void run() {
DBCursor lastCursor = oplog.find().sort(new BasicDBObject("$natural", -1)).limit(1);
if (!lastCursor.hasNext()) {
logger.error("no oplog!");
return;
}
DBObject last = lastCursor.next();
BSONTimestamp ts = (BSONTimestamp) last.get("ts");
while (true) {
logger.debug("starting at ts: " + ts);
DBCursor cursor = oplog.find(new BasicDBObject("ts", new BasicDBObject("$gt", ts)));
cursor.addOption(Bytes.QUERYOPTION_TAILABLE);
cursor.addOption(Bytes.QUERYOPTION_AWAITDATA);
while (cursor.hasNext()) {
DBObject x = cursor.next();
ts = (BSONTimestamp) x.get("ts");
if(x.get("o2")!=null){
if(x.containsField("o")){
// parser.jsonRecordParser(x);
c.put(x);
logger.info("Producer #" + this.number + " put: " + x);
}else{
logger.info("operation is not accounted");
}
}else{
logger.debug("record discarded: \t"+x);
}
}
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}
}

@ -0,0 +1,103 @@
package org.gcube.contentmanager.storageserver.parse;
import org.bson.types.ObjectId;
import org.gcube.contentmanager.storageserver.accounting.Report;
import org.gcube.contentmanager.storageserver.accounting.ReportConfig;
import org.gcube.contentmanager.storageserver.accounting.ReportException;
import org.gcube.contentmanager.storageserver.accounting.ReportFactory;
import org.gcube.contentmanager.storageserver.data.CubbyHole;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.mongodb.DBObject;
public class JsonParser extends Thread{
final static Logger logger=LoggerFactory.getLogger(JsonParser.class);
private CubbyHole c;
private int number;
private Report report;
public JsonParser(CubbyHole c, int number){
this.c=c;
this.number=number;
// int the accounting report
try {
init();
} catch (ReportException e) {
throw new RuntimeException("Accounting report Exception initialization");
}
}
private void init() throws ReportException{
report=new ReportFactory().getReport(ReportConfig.ACCOUNTING_TYPE);
}
public void run() {
while(true){
DBObject x=c.get();
logger.info("Consumer #" + this.number + " got: " + x);
DBObject obj=(DBObject)x.get("o");
String op=(String) x.get("op");
String filename=(String) obj.get("filename");
String type=(String) obj.get("type");
String name=(String) obj.get("name");
String owner=(String) obj.get("owner");
ObjectId objectId=(ObjectId)obj.get("_id");
String id = objectId.toString();
long length=(long) obj.get("length");
logger.debug("[recordCheck] operation: "+op+" name: "+name+" type: "+type+" path: "+filename+" length: "+length+" owner: "+owner+ " id: "+id);
if((length >0) && (name!=null)){
//call to the accounting library
String scope=retrieveScopeFromFilename(filename);
report.init(owner, scope);
report.timeUpdate();
String operation=mappingOperationField(op);
report.ultimate(owner, null, operation, length+"", filename, id);
report.send();
logger.debug("[accountingCall] operation: "+op+" name: "+name+" type: "+type+" path: "+filename+" length: "+length+" owner: "+owner);
}else{
logger.debug("operation is not accounted");
}
}
}
private String mappingOperationField(String op) {
if(op.equals("u")){
return "UPDATE";
}else if(op.equals("i")){
return "INSERT";
}else if(op.equals("d")){
return "DELETE";
}
return op;
}
private String retrieveScopeFromFilename(String filename) {
String[] split=filename.split("/");
if(split.length>0){
String scope=null;
int i=1;
if(split[1].equals("VOLATILE")){
i=2;
// scope="/"+split[1];
// if(split[2] != null && (!split[2].equals("home")) && (!split[2].equals("public"))){
// scope=scope+"/"+split[2];
// }
}
scope="/"+split[i];
i++;
while((!split[i].equals("home")) && (!split[i].equals("public"))){
scope=scope+"/"+split[i];
i++;
}
logger.info("retieved scope: "+scope);
return scope;
}else logger.error("Scope bad format: scope not retrieved from string: "+filename);
return null;
}
}

@ -0,0 +1,27 @@
package org.gcube.contentmanager.storageserver.startup;
import java.util.Arrays;
import org.gcube.contentmanager.storageserver.data.CubbyHole;
import org.gcube.contentmanager.storageserver.data.ReadingMongoOplog;
import org.gcube.contentmanager.storageserver.parse.JsonParser;
public class Startup {
public static void main(String[] args) {
for (int i=0; i<args.length;i++)
System.out.println("param N." +i + ": " + args[i]);
if(args.length < 1 || args.length > 1){
System.out.println("Usage:");
System.out.println("\tjava Startup ip \n\n");
System.out.println("Example:");
System.out.println("\tjava Startup 127.0.0.1 \n\n");
return;
}
CubbyHole c = new CubbyHole();
ReadingMongoOplog producer=new ReadingMongoOplog( Arrays.asList(args[0]), c, 1 );
JsonParser consumer=new JsonParser(c, 1);
producer.start();
consumer.start();
}
}
Loading…
Cancel
Save