forked from D-Net/dnet-hadoop
merge branch with master
This commit is contained in:
commit
d4ad740c98
|
@ -3,6 +3,9 @@ package eu.dnetlib.dhp.schema.common;
|
||||||
|
|
||||||
import static com.google.common.base.Preconditions.checkArgument;
|
import static com.google.common.base.Preconditions.checkArgument;
|
||||||
|
|
||||||
|
import java.text.ParseException;
|
||||||
|
import java.text.SimpleDateFormat;
|
||||||
|
import java.util.Date;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Objects;
|
import java.util.Objects;
|
||||||
import java.util.Optional;
|
import java.util.Optional;
|
||||||
|
@ -473,4 +476,25 @@ public class ModelSupport {
|
||||||
private static <T extends Oaf> String idFnForOafEntity(T t) {
|
private static <T extends Oaf> String idFnForOafEntity(T t) {
|
||||||
return ((OafEntity) t).getId();
|
return ((OafEntity) t).getId();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public static final String ISO8601FORMAT = "yyyy-MM-dd'T'HH:mm:ssZ";
|
||||||
|
|
||||||
|
public static String oldest(String dateA, String dateB) throws ParseException {
|
||||||
|
|
||||||
|
if (StringUtils.isBlank(dateA)) {
|
||||||
|
return dateB;
|
||||||
|
}
|
||||||
|
if (StringUtils.isBlank(dateB)) {
|
||||||
|
return dateA;
|
||||||
|
}
|
||||||
|
if (StringUtils.isNotBlank(dateA) && StringUtils.isNotBlank(dateB)) {
|
||||||
|
|
||||||
|
final Date a = new SimpleDateFormat(ISO8601FORMAT).parse(dateA);
|
||||||
|
final Date b = new SimpleDateFormat(ISO8601FORMAT).parse(dateB);
|
||||||
|
|
||||||
|
return a.before(b) ? dateA : dateB;
|
||||||
|
} else {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,8 +1,11 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.schema.oaf;
|
package eu.dnetlib.dhp.schema.oaf;
|
||||||
|
|
||||||
|
import eu.dnetlib.dhp.schema.common.ModelSupport;
|
||||||
|
|
||||||
import static com.google.common.base.Preconditions.checkArgument;
|
import static com.google.common.base.Preconditions.checkArgument;
|
||||||
|
|
||||||
|
import java.text.ParseException;
|
||||||
import java.util.*;
|
import java.util.*;
|
||||||
import java.util.stream.Collectors;
|
import java.util.stream.Collectors;
|
||||||
import java.util.stream.Stream;
|
import java.util.stream.Stream;
|
||||||
|
@ -106,7 +109,7 @@ public class Relation extends Oaf {
|
||||||
}
|
}
|
||||||
|
|
||||||
public Boolean getValidated() {
|
public Boolean getValidated() {
|
||||||
return validated;
|
return Objects.nonNull(validated) && validated;
|
||||||
}
|
}
|
||||||
|
|
||||||
public void setValidated(Boolean validated) {
|
public void setValidated(Boolean validated) {
|
||||||
|
@ -130,6 +133,13 @@ public class Relation extends Oaf {
|
||||||
Objects.equals(getSubRelType(), r.getSubRelType()), "subRelType(s) must be equal");
|
Objects.equals(getSubRelType(), r.getSubRelType()), "subRelType(s) must be equal");
|
||||||
checkArgument(Objects.equals(getRelClass(), r.getRelClass()), "relClass(es) must be equal");
|
checkArgument(Objects.equals(getRelClass(), r.getRelClass()), "relClass(es) must be equal");
|
||||||
|
|
||||||
|
setValidated(getValidated() || r.getValidated());
|
||||||
|
try {
|
||||||
|
setValidationDate(ModelSupport.oldest(getValidationDate(), r.getValidationDate()));
|
||||||
|
} catch (ParseException e) {
|
||||||
|
throw new IllegalArgumentException(String.format("invalid validation date format in relation [s:%s, t:%s]: %s", getSource(), getTarget(), getValidationDate()));
|
||||||
|
}
|
||||||
|
|
||||||
super.mergeFrom(r);
|
super.mergeFrom(r);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -323,13 +323,13 @@ public class Result extends OafEntity implements Serializable {
|
||||||
if (a.size() == b.size()) {
|
if (a.size() == b.size()) {
|
||||||
int msa = a
|
int msa = a
|
||||||
.stream()
|
.stream()
|
||||||
.filter(i -> i.getValue() != null)
|
.filter(i -> i != null && i.getValue() != null)
|
||||||
.map(i -> i.getValue().length())
|
.map(i -> i.getValue().length())
|
||||||
.max(Comparator.naturalOrder())
|
.max(Comparator.naturalOrder())
|
||||||
.orElse(0);
|
.orElse(0);
|
||||||
int msb = b
|
int msb = b
|
||||||
.stream()
|
.stream()
|
||||||
.filter(i -> i.getValue() != null)
|
.filter(i -> i != null && i.getValue() != null)
|
||||||
.map(i -> i.getValue().length())
|
.map(i -> i.getValue().length())
|
||||||
.max(Comparator.naturalOrder())
|
.max(Comparator.naturalOrder())
|
||||||
.orElse(0);
|
.orElse(0);
|
||||||
|
|
|
@ -63,6 +63,51 @@ public class MergeTest {
|
||||||
assertEquals(3, a.getSubject().size());
|
assertEquals(3, a.getSubject().size());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void mergeRelationTest() {
|
||||||
|
|
||||||
|
Relation a = createRel(null, null);
|
||||||
|
Relation b = createRel(null, null);
|
||||||
|
a.mergeFrom(b);
|
||||||
|
assertEquals(a, b);
|
||||||
|
|
||||||
|
a = createRel(true, null);
|
||||||
|
b = createRel(null, null);
|
||||||
|
a.mergeFrom(b);
|
||||||
|
assertEquals(true, a.getValidated());
|
||||||
|
|
||||||
|
a = createRel(true, null);
|
||||||
|
b = createRel(false, null);
|
||||||
|
a.mergeFrom(b);
|
||||||
|
assertEquals(true, a.getValidated());
|
||||||
|
|
||||||
|
a = createRel(true, null);
|
||||||
|
b = createRel(true, "2016-04-05T12:41:19.202Z");
|
||||||
|
a.mergeFrom(b);
|
||||||
|
assertEquals("2016-04-05T12:41:19.202Z", a.getValidationDate());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void mergeRelationTestParseException() {
|
||||||
|
assertThrows(IllegalArgumentException.class, () -> {
|
||||||
|
Relation a = createRel(true, "2016-04-05");
|
||||||
|
Relation b = createRel(true, "2016-04-05");
|
||||||
|
a.mergeFrom(b);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
private Relation createRel(Boolean validated, String validationDate) {
|
||||||
|
Relation rel = new Relation();
|
||||||
|
rel.setSource("1");
|
||||||
|
rel.setTarget("2");
|
||||||
|
rel.setRelType("reltype");
|
||||||
|
rel.setSubRelType("subreltype");
|
||||||
|
rel.setRelClass("relclass");
|
||||||
|
rel.setValidated(validated);
|
||||||
|
rel.setValidationDate(validationDate);
|
||||||
|
return rel;
|
||||||
|
}
|
||||||
|
|
||||||
private KeyValue setKV(final String key, final String value) {
|
private KeyValue setKV(final String key, final String value) {
|
||||||
|
|
||||||
KeyValue k = new KeyValue();
|
KeyValue k = new KeyValue();
|
||||||
|
|
|
@ -136,7 +136,17 @@
|
||||||
</configuration>
|
</configuration>
|
||||||
</global>
|
</global>
|
||||||
|
|
||||||
<start to="ensure_output_dir"/>
|
<start to="resume_from"/>
|
||||||
|
|
||||||
|
<decision name="resume_from">
|
||||||
|
<switch>
|
||||||
|
<case to="ensure_output_dir">${wf:conf('resumeFrom') eq 'ensure_output_dir'}</case>
|
||||||
|
<case to="index_event_subset">${wf:conf('resumeFrom') eq 'index_event_subset'}</case>
|
||||||
|
<case to="stats">${wf:conf('resumeFrom') eq 'stats'}</case>
|
||||||
|
<case to="index_notifications">${wf:conf('resumeFrom') eq 'index_notifications'}</case>
|
||||||
|
<default to="ensure_output_dir"/>
|
||||||
|
</switch>
|
||||||
|
</decision>
|
||||||
|
|
||||||
<kill name="Kill">
|
<kill name="Kill">
|
||||||
<message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
|
<message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
|
||||||
|
@ -505,6 +515,33 @@
|
||||||
<arg>--maxEventsForTopic</arg><arg>${maxIndexedEventsForDsAndTopic}</arg>
|
<arg>--maxEventsForTopic</arg><arg>${maxIndexedEventsForDsAndTopic}</arg>
|
||||||
<arg>--brokerApiBaseUrl</arg><arg>${brokerApiBaseUrl}</arg>
|
<arg>--brokerApiBaseUrl</arg><arg>${brokerApiBaseUrl}</arg>
|
||||||
</spark>
|
</spark>
|
||||||
|
<ok to="stats"/>
|
||||||
|
<error to="Kill"/>
|
||||||
|
</action>
|
||||||
|
|
||||||
|
<action name="stats">
|
||||||
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||||
|
<master>yarn</master>
|
||||||
|
<mode>cluster</mode>
|
||||||
|
<name>GenerateStatsJob</name>
|
||||||
|
<class>eu.dnetlib.dhp.broker.oa.GenerateStatsJob</class>
|
||||||
|
<jar>dhp-broker-events-${projectVersion}.jar</jar>
|
||||||
|
<spark-opts>
|
||||||
|
--executor-cores=${sparkExecutorCores}
|
||||||
|
--executor-memory=${sparkExecutorMemory}
|
||||||
|
--driver-memory=${sparkDriverMemory}
|
||||||
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||||
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||||
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||||
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||||
|
--conf spark.sql.shuffle.partitions=3840
|
||||||
|
</spark-opts>
|
||||||
|
<arg>--outputDir</arg><arg>${outputDir}</arg>
|
||||||
|
<arg>--dbUrl</arg><arg>${brokerDbUrl}</arg>
|
||||||
|
<arg>--dbUser</arg><arg>${brokerDbUser}</arg>
|
||||||
|
<arg>--dbPassword</arg><arg>${brokerDbPassword}</arg>
|
||||||
|
<arg>--brokerApiBaseUrl</arg><arg>${brokerApiBaseUrl}</arg>
|
||||||
|
</spark>
|
||||||
<ok to="index_notifications"/>
|
<ok to="index_notifications"/>
|
||||||
<error to="Kill"/>
|
<error to="Kill"/>
|
||||||
</action>
|
</action>
|
||||||
|
@ -535,33 +572,6 @@
|
||||||
<arg>--esNodesWanOnly</arg><arg>${esNodesWanOnly}</arg>
|
<arg>--esNodesWanOnly</arg><arg>${esNodesWanOnly}</arg>
|
||||||
<arg>--brokerApiBaseUrl</arg><arg>${brokerApiBaseUrl}</arg>
|
<arg>--brokerApiBaseUrl</arg><arg>${brokerApiBaseUrl}</arg>
|
||||||
</spark>
|
</spark>
|
||||||
<ok to="stats"/>
|
|
||||||
<error to="Kill"/>
|
|
||||||
</action>
|
|
||||||
|
|
||||||
<action name="stats">
|
|
||||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
||||||
<master>yarn</master>
|
|
||||||
<mode>cluster</mode>
|
|
||||||
<name>GenerateStatsJob</name>
|
|
||||||
<class>eu.dnetlib.dhp.broker.oa.GenerateStatsJob</class>
|
|
||||||
<jar>dhp-broker-events-${projectVersion}.jar</jar>
|
|
||||||
<spark-opts>
|
|
||||||
--executor-cores=${sparkExecutorCores}
|
|
||||||
--executor-memory=${sparkExecutorMemory}
|
|
||||||
--driver-memory=${sparkDriverMemory}
|
|
||||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
||||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
||||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
||||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
||||||
--conf spark.sql.shuffle.partitions=3840
|
|
||||||
</spark-opts>
|
|
||||||
<arg>--outputDir</arg><arg>${outputDir}</arg>
|
|
||||||
<arg>--dbUrl</arg><arg>${brokerDbUrl}</arg>
|
|
||||||
<arg>--dbUser</arg><arg>${brokerDbUser}</arg>
|
|
||||||
<arg>--dbPassword</arg><arg>${brokerDbPassword}</arg>
|
|
||||||
<arg>--brokerApiBaseUrl</arg><arg>${brokerApiBaseUrl}</arg>
|
|
||||||
</spark>
|
|
||||||
<ok to="End"/>
|
<ok to="End"/>
|
||||||
<error to="Kill"/>
|
<error to="Kill"/>
|
||||||
</action>
|
</action>
|
||||||
|
|
|
@ -0,0 +1,18 @@
|
||||||
|
<configuration>
|
||||||
|
<property>
|
||||||
|
<name>jobTracker</name>
|
||||||
|
<value>yarnRM</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>nameNode</name>
|
||||||
|
<value>hdfs://nameservice1</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>oozie.use.system.libpath</name>
|
||||||
|
<value>true</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>oozie.action.sharelib.for.spark</name>
|
||||||
|
<value>spark2</value>
|
||||||
|
</property>
|
||||||
|
</configuration>
|
|
@ -0,0 +1,114 @@
|
||||||
|
<workflow-app name="create broker events" xmlns="uri:oozie:workflow:0.5">
|
||||||
|
|
||||||
|
<parameters>
|
||||||
|
<property>
|
||||||
|
<name>outputDir</name>
|
||||||
|
<description>the path where the the generated data will be stored</description>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>brokerApiBaseUrl</name>
|
||||||
|
<description>the url of the broker service api</description>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>brokerDbUrl</name>
|
||||||
|
<description>the url of the broker database</description>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>brokerDbUser</name>
|
||||||
|
<description>the user of the broker database</description>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>brokerDbPassword</name>
|
||||||
|
<description>the password of the broker database</description>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>sparkDriverMemory</name>
|
||||||
|
<description>memory for driver process</description>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>sparkExecutorMemory</name>
|
||||||
|
<description>memory for individual executor</description>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>sparkExecutorCores</name>
|
||||||
|
<description>number of cores used by single executor</description>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>oozieActionShareLibForSpark2</name>
|
||||||
|
<description>oozie action sharelib for spark 2.*</description>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>spark2ExtraListeners</name>
|
||||||
|
<value>com.cloudera.spark.lineage.NavigatorAppListener</value>
|
||||||
|
<description>spark 2.* extra listeners classname</description>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>spark2SqlQueryExecutionListeners</name>
|
||||||
|
<value>com.cloudera.spark.lineage.NavigatorQueryListener</value>
|
||||||
|
<description>spark 2.* sql query execution listeners classname</description>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>spark2YarnHistoryServerAddress</name>
|
||||||
|
<description>spark 2.* yarn history server address</description>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>spark2EventLogDir</name>
|
||||||
|
<description>spark 2.* event log dir location</description>
|
||||||
|
</property>
|
||||||
|
</parameters>
|
||||||
|
|
||||||
|
<global>
|
||||||
|
<job-tracker>${jobTracker}</job-tracker>
|
||||||
|
<name-node>${nameNode}</name-node>
|
||||||
|
<configuration>
|
||||||
|
<property>
|
||||||
|
<name>mapreduce.job.queuename</name>
|
||||||
|
<value>${queueName}</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>oozie.launcher.mapred.job.queue.name</name>
|
||||||
|
<value>${oozieLauncherQueueName}</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>oozie.action.sharelib.for.spark</name>
|
||||||
|
<value>${oozieActionShareLibForSpark2}</value>
|
||||||
|
</property>
|
||||||
|
</configuration>
|
||||||
|
</global>
|
||||||
|
|
||||||
|
<start to="stats"/>
|
||||||
|
|
||||||
|
<kill name="Kill">
|
||||||
|
<message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
|
||||||
|
</kill>
|
||||||
|
|
||||||
|
<action name="stats">
|
||||||
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||||
|
<master>yarn</master>
|
||||||
|
<mode>cluster</mode>
|
||||||
|
<name>GenerateStatsJob</name>
|
||||||
|
<class>eu.dnetlib.dhp.broker.oa.GenerateStatsJob</class>
|
||||||
|
<jar>dhp-broker-events-${projectVersion}.jar</jar>
|
||||||
|
<spark-opts>
|
||||||
|
--executor-cores=${sparkExecutorCores}
|
||||||
|
--executor-memory=${sparkExecutorMemory}
|
||||||
|
--driver-memory=${sparkDriverMemory}
|
||||||
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||||
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||||
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||||
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||||
|
--conf spark.sql.shuffle.partitions=3840
|
||||||
|
</spark-opts>
|
||||||
|
<arg>--outputDir</arg><arg>${outputDir}</arg>
|
||||||
|
<arg>--dbUrl</arg><arg>${brokerDbUrl}</arg>
|
||||||
|
<arg>--dbUser</arg><arg>${brokerDbUser}</arg>
|
||||||
|
<arg>--dbPassword</arg><arg>${brokerDbPassword}</arg>
|
||||||
|
<arg>--brokerApiBaseUrl</arg><arg>${brokerApiBaseUrl}</arg>
|
||||||
|
</spark>
|
||||||
|
<ok to="End"/>
|
||||||
|
<error to="Kill"/>
|
||||||
|
</action>
|
||||||
|
|
||||||
|
<end name="End"/>
|
||||||
|
|
||||||
|
</workflow-app>
|
|
@ -15,7 +15,7 @@ import eu.dnetlib.dhp.schema.oaf.*;
|
||||||
|
|
||||||
public class CleaningFunctions {
|
public class CleaningFunctions {
|
||||||
|
|
||||||
public static final String DOI_URL_PREFIX_REGEX = "(^http(s?):\\/\\/)(((dx\\.)?doi\\.org)|(handle\\.test\\.datacite\\.org))\\/";
|
public static final String DOI_PREFIX_REGEX = "^10\\.";
|
||||||
|
|
||||||
public static final String ORCID_CLEANING_REGEX = ".*([0-9]{4}).*[-–—−=].*([0-9]{4}).*[-–—−=].*([0-9]{4}).*[-–—−=].*([0-9x]{4})";
|
public static final String ORCID_CLEANING_REGEX = ".*([0-9]{4}).*[-–—−=].*([0-9]{4}).*[-–—−=].*([0-9]{4}).*[-–—−=].*([0-9x]{4})";
|
||||||
public static final int ORCID_LEN = 19;
|
public static final int ORCID_LEN = 19;
|
||||||
|
@ -59,13 +59,19 @@ public class CleaningFunctions {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (Objects.nonNull(r.getAuthor())) {
|
if (Objects.nonNull(r.getAuthor())) {
|
||||||
r.getAuthor().forEach(a -> {
|
r
|
||||||
if (Objects.nonNull(a.getPid())) {
|
.getAuthor()
|
||||||
a.getPid().forEach(p -> {
|
.stream()
|
||||||
fixVocabName(p.getQualifier(), ModelConstants.DNET_PID_TYPES);
|
.filter(Objects::nonNull)
|
||||||
});
|
.forEach(a -> {
|
||||||
}
|
if (Objects.nonNull(a.getPid())) {
|
||||||
});
|
a
|
||||||
|
.getPid()
|
||||||
|
.stream()
|
||||||
|
.filter(Objects::nonNull)
|
||||||
|
.forEach(p -> fixVocabName(p.getQualifier(), ModelConstants.DNET_PID_TYPES));
|
||||||
|
}
|
||||||
|
});
|
||||||
}
|
}
|
||||||
if (value instanceof Publication) {
|
if (value instanceof Publication) {
|
||||||
|
|
||||||
|
@ -104,6 +110,16 @@ public class CleaningFunctions {
|
||||||
.setLanguage(
|
.setLanguage(
|
||||||
qualifier("und", "Undetermined", ModelConstants.DNET_LANGUAGES));
|
qualifier("und", "Undetermined", ModelConstants.DNET_LANGUAGES));
|
||||||
}
|
}
|
||||||
|
if (Objects.nonNull(r.getCountry())) {
|
||||||
|
r
|
||||||
|
.setCountry(
|
||||||
|
r
|
||||||
|
.getCountry()
|
||||||
|
.stream()
|
||||||
|
.filter(Objects::nonNull)
|
||||||
|
.filter(c -> StringUtils.isNotBlank(c.getClassid()))
|
||||||
|
.collect(Collectors.toList()));
|
||||||
|
}
|
||||||
if (Objects.nonNull(r.getSubject())) {
|
if (Objects.nonNull(r.getSubject())) {
|
||||||
r
|
r
|
||||||
.setSubject(
|
.setSubject(
|
||||||
|
@ -184,6 +200,16 @@ public class CleaningFunctions {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (Objects.nonNull(r.getAuthor())) {
|
if (Objects.nonNull(r.getAuthor())) {
|
||||||
|
r
|
||||||
|
.setAuthor(
|
||||||
|
r
|
||||||
|
.getAuthor()
|
||||||
|
.stream()
|
||||||
|
.filter(a -> Objects.nonNull(a))
|
||||||
|
.filter(a -> StringUtils.isNotBlank(a.getFullname()))
|
||||||
|
.filter(a -> StringUtils.isNotBlank(a.getFullname().replaceAll("[\\W]", "")))
|
||||||
|
.collect(Collectors.toList()));
|
||||||
|
|
||||||
boolean nullRank = r
|
boolean nullRank = r
|
||||||
.getAuthor()
|
.getAuthor()
|
||||||
.stream()
|
.stream()
|
||||||
|
@ -204,6 +230,7 @@ public class CleaningFunctions {
|
||||||
a
|
a
|
||||||
.getPid()
|
.getPid()
|
||||||
.stream()
|
.stream()
|
||||||
|
.filter(Objects::nonNull)
|
||||||
.filter(p -> Objects.nonNull(p.getQualifier()))
|
.filter(p -> Objects.nonNull(p.getQualifier()))
|
||||||
.filter(p -> StringUtils.isNotBlank(p.getValue()))
|
.filter(p -> StringUtils.isNotBlank(p.getValue()))
|
||||||
.map(p -> {
|
.map(p -> {
|
||||||
|
@ -308,7 +335,7 @@ public class CleaningFunctions {
|
||||||
|
|
||||||
// TODO add cleaning for more PID types as needed
|
// TODO add cleaning for more PID types as needed
|
||||||
case "doi":
|
case "doi":
|
||||||
pid.setValue(value.toLowerCase().replaceAll(DOI_URL_PREFIX_REGEX, ""));
|
pid.setValue(value.toLowerCase().replaceAll(DOI_PREFIX_REGEX, "10."));
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
return pid;
|
return pid;
|
||||||
|
|
|
@ -51,31 +51,34 @@ public class OdfToOafMapper extends AbstractMdRecordToOafMapper {
|
||||||
final Node n = (Node) o;
|
final Node n = (Node) o;
|
||||||
final Author author = new Author();
|
final Author author = new Author();
|
||||||
final String fullname = n.valueOf("./datacite:creatorName");
|
final String fullname = n.valueOf("./datacite:creatorName");
|
||||||
author.setFullname(fullname);
|
|
||||||
|
|
||||||
final PacePerson pp = new PacePerson(fullname, false);
|
|
||||||
final String name = n.valueOf("./datacite:givenName");
|
final String name = n.valueOf("./datacite:givenName");
|
||||||
if (StringUtils.isBlank(name) & pp.isAccurate()) {
|
|
||||||
author.setName(pp.getNormalisedFirstName());
|
|
||||||
} else {
|
|
||||||
author.setName(name);
|
|
||||||
}
|
|
||||||
|
|
||||||
final String surname = n.valueOf("./datacite:familyName");
|
final String surname = n.valueOf("./datacite:familyName");
|
||||||
if (StringUtils.isBlank(surname) & pp.isAccurate()) {
|
if (StringUtils.isNotBlank(fullname) || StringUtils.isNotBlank(name) || StringUtils.isNotBlank(surname)) {
|
||||||
author.setSurname(pp.getNormalisedSurname());
|
author.setFullname(fullname);
|
||||||
} else {
|
|
||||||
author.setSurname(surname);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (StringUtils.isBlank(author.getFullname())) {
|
final PacePerson pp = new PacePerson(fullname, false);
|
||||||
author.setFullname(String.format("%s, %s", author.getSurname(), author.getName()));
|
|
||||||
}
|
|
||||||
|
|
||||||
author.setAffiliation(prepareListFields(n, "./datacite:affiliation", info));
|
if (StringUtils.isBlank(name) & pp.isAccurate()) {
|
||||||
author.setPid(preparePids(n, info));
|
author.setName(pp.getNormalisedFirstName());
|
||||||
author.setRank(pos++);
|
} else {
|
||||||
res.add(author);
|
author.setName(name);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (StringUtils.isBlank(surname) & pp.isAccurate()) {
|
||||||
|
author.setSurname(pp.getNormalisedSurname());
|
||||||
|
} else {
|
||||||
|
author.setSurname(surname);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (StringUtils.isBlank(author.getFullname())) {
|
||||||
|
author.setFullname(String.format("%s, %s", author.getSurname(), author.getName()));
|
||||||
|
}
|
||||||
|
|
||||||
|
author.setAffiliation(prepareListFields(n, "./datacite:affiliation", info));
|
||||||
|
author.setPid(preparePids(n, info));
|
||||||
|
author.setRank(pos++);
|
||||||
|
res.add(author);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
return res;
|
return res;
|
||||||
}
|
}
|
||||||
|
|
|
@ -19,6 +19,7 @@ import org.junit.jupiter.api.extension.ExtendWith;
|
||||||
import org.mockito.Mock;
|
import org.mockito.Mock;
|
||||||
import org.mockito.junit.jupiter.MockitoExtension;
|
import org.mockito.junit.jupiter.MockitoExtension;
|
||||||
|
|
||||||
|
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
|
|
||||||
import eu.dnetlib.dhp.oa.graph.clean.CleaningFunctionTest;
|
import eu.dnetlib.dhp.oa.graph.clean.CleaningFunctionTest;
|
||||||
|
@ -333,6 +334,7 @@ public class MappersTest {
|
||||||
assertValidId(p.getCollectedfrom().get(0).getKey());
|
assertValidId(p.getCollectedfrom().get(0).getKey());
|
||||||
System.out.println(p.getTitle().get(0).getValue());
|
System.out.println(p.getTitle().get(0).getValue());
|
||||||
assertTrue(StringUtils.isNotBlank(p.getTitle().get(0).getValue()));
|
assertTrue(StringUtils.isNotBlank(p.getTitle().get(0).getValue()));
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
@ -379,6 +381,16 @@ public class MappersTest {
|
||||||
System.out.println(p.getPid().get(0).getValue());
|
System.out.println(p.getPid().get(0).getValue());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void testTextGridNoAuthor() throws IOException {
|
||||||
|
final String xml = IOUtils.toString(getClass().getResourceAsStream("textgrid-noauthor.xml"));
|
||||||
|
final List<Oaf> list = new OdfToOafMapper(vocs, false).processMdRecord(xml);
|
||||||
|
|
||||||
|
System.out.println("***************");
|
||||||
|
System.out.println(new ObjectMapper().writeValueAsString(list));
|
||||||
|
System.out.println("***************");
|
||||||
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
void testBologna() throws IOException {
|
void testBologna() throws IOException {
|
||||||
final String xml = IOUtils.toString(getClass().getResourceAsStream("oaf-bologna.xml"));
|
final String xml = IOUtils.toString(getClass().getResourceAsStream("oaf-bologna.xml"));
|
||||||
|
@ -396,6 +408,34 @@ public class MappersTest {
|
||||||
System.out.println(p.getTitle().get(0).getValue());
|
System.out.println(p.getTitle().get(0).getValue());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void testEUOpenDataPortal() throws IOException {
|
||||||
|
final String xml = IOUtils.toString(getClass().getResourceAsStream("eu_odp.xml"));
|
||||||
|
final List<Oaf> list = new OdfToOafMapper(vocs, false).processMdRecord(xml);
|
||||||
|
|
||||||
|
System.out.println("***************");
|
||||||
|
System.out.println(new ObjectMapper().writeValueAsString(list));
|
||||||
|
System.out.println("***************");
|
||||||
|
final Dataset p = (Dataset) list.get(0);
|
||||||
|
assertValidId(p.getId());
|
||||||
|
assertValidId(p.getCollectedfrom().get(0).getKey());
|
||||||
|
assertTrue(StringUtils.isNotBlank(p.getTitle().get(0).getValue()));
|
||||||
|
assertEquals(0, p.getAuthor().size());
|
||||||
|
assertEquals(1, p.getPid().size());
|
||||||
|
assertEquals("OPEN", p.getBestaccessright().getClassid());
|
||||||
|
assertEquals("dataset", p.getResulttype().getClassname());
|
||||||
|
assertEquals(1, p.getInstance().size());
|
||||||
|
assertEquals("OPEN", p.getInstance().get(0).getAccessright().getClassid());
|
||||||
|
assertValidId(p.getInstance().get(0).getCollectedfrom().getKey());
|
||||||
|
assertValidId(p.getInstance().get(0).getHostedby().getKey());
|
||||||
|
assertEquals(
|
||||||
|
"CC_BY_4_0", p.getInstance().get(0).getLicense().getValue());
|
||||||
|
assertEquals(1, p.getInstance().get(0).getUrl().size());
|
||||||
|
assertEquals(1, p.getInstance().size());
|
||||||
|
System.out.println(p.getInstance().get(0).getUrl().get(0));
|
||||||
|
System.out.println(p.getInstance().get(0).getHostedby().getValue());
|
||||||
|
}
|
||||||
|
|
||||||
private void assertValidId(final String id) {
|
private void assertValidId(final String id) {
|
||||||
assertEquals(49, id.length());
|
assertEquals(49, id.length());
|
||||||
assertEquals('|', id.charAt(2));
|
assertEquals('|', id.charAt(2));
|
||||||
|
|
|
@ -1047,6 +1047,7 @@ dnet:pid_types @=@ dnet:pid_types @=@ urn @=@ urn
|
||||||
dnet:pid_types @=@ dnet:pid_types @=@ who @=@ WHO Identifier
|
dnet:pid_types @=@ dnet:pid_types @=@ who @=@ WHO Identifier
|
||||||
dnet:pid_types @=@ dnet:pid_types @=@ drks @=@ DRKS Identifier
|
dnet:pid_types @=@ dnet:pid_types @=@ drks @=@ DRKS Identifier
|
||||||
dnet:pid_types @=@ dnet:pid_types @=@ handle @=@ Handle
|
dnet:pid_types @=@ dnet:pid_types @=@ handle @=@ Handle
|
||||||
|
dnet:pid_types @=@ dnet:pid_types @=@ data.europa.eu @=@ EU Persistent URL
|
||||||
dnet:topic_types @=@ dnet:topic_types @=@ ENRICH/MISSING/SUBJECT/ACM @=@ An ACM classification term that can be associated to your publications
|
dnet:topic_types @=@ dnet:topic_types @=@ ENRICH/MISSING/SUBJECT/ACM @=@ An ACM classification term that can be associated to your publications
|
||||||
dnet:topic_types @=@ dnet:topic_types @=@ ENRICH/MISSING/SUBJECT/ARXIV @=@ An ARXIV classification term that can be associated to your publications
|
dnet:topic_types @=@ dnet:topic_types @=@ ENRICH/MISSING/SUBJECT/ARXIV @=@ An ARXIV classification term that can be associated to your publications
|
||||||
dnet:topic_types @=@ dnet:topic_types @=@ ENRICH/MISSING/SUBJECT/DDC @=@ A Dewey Decimal classification term (DDC) that can be associated to your publications
|
dnet:topic_types @=@ dnet:topic_types @=@ ENRICH/MISSING/SUBJECT/DDC @=@ A Dewey Decimal classification term (DDC) that can be associated to your publications
|
||||||
|
|
|
@ -0,0 +1,368 @@
|
||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<oai:record xmlns:dr="http://www.driver-repository.eu/namespace/dr"
|
||||||
|
xmlns:dri="http://www.driver-repository.eu/namespace/dri"
|
||||||
|
xmlns:oaf="http://namespace.openaire.eu/oaf" xmlns:oai="http://www.openarchives.org/OAI/2.0/">
|
||||||
|
<oai:header>
|
||||||
|
<dri:objIdentifier>r3c4b2081b22::0007d64b38bb2b399120f9993f95d911</dri:objIdentifier>
|
||||||
|
<dri:recordIdentifier>60a0a5b0-b63c-473f-b8bc-207ea037eb3b</dri:recordIdentifier>
|
||||||
|
<dri:dateOfCollection>2021-01-28T17:24:33.095+01:00</dri:dateOfCollection>
|
||||||
|
<oaf:datasourceprefix>r3c4b2081b22</oaf:datasourceprefix>
|
||||||
|
<dr:dateOfTransformation>2021-02-03T16:57:03.099+01:00</dr:dateOfTransformation>
|
||||||
|
</oai:header>
|
||||||
|
<oai:metadata>
|
||||||
|
<datacite:resource
|
||||||
|
xmlns:datacite="http://datacite.org/schema/kernel-4"
|
||||||
|
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://datacite.org/schema/kernel-4 http://schema.datacite.org/meta/kernel-4/metadata.xsd">
|
||||||
|
<datacite:identifier identifierType="URL">http://data.europa.eu/88u/dataset/60a0a5b0-b63c-473f-b8bc-207ea037eb3b</datacite:identifier>
|
||||||
|
<datacite:creators/>
|
||||||
|
<datacite:titles>
|
||||||
|
<datacite:title xml:lang="en">GMIS - Favourable feeding habitat of adult Atlantic bluefin tuna (ABFT) Monthly 1998-2017 (frequency of occurence, %)</datacite:title>
|
||||||
|
</datacite:titles>
|
||||||
|
<datacite:publisher>JRC</datacite:publisher>
|
||||||
|
<datacite:publicationYear>2019</datacite:publicationYear>
|
||||||
|
<datacite:dates>
|
||||||
|
<datacite:date dateType="Issued">2019-07-09</datacite:date>
|
||||||
|
</datacite:dates>
|
||||||
|
<datacite:resourceType resourceTypeGeneral="Dataset">Dataset</datacite:resourceType>
|
||||||
|
<datacite:descriptions>
|
||||||
|
<datacite:description descriptionType="Abstract" xml:lang="en">The favourable feeding habitat of the Atlantic bluefin tuna is daily identified linking their ecological traits with environmental variables from satellite remote sensing and physical ocean models. The feeding habitat is mostly related to the occurrence of productive oceanic features (such as eddies) that are detected by satellite sensors of ocean colour (chlorophyll-a fronts). The physical variables used are sea surface temperature and sea surface height anomaly. More information: https://fishreg.jrc.ec.europa.eu/fish-habitat, Peer-reviewed publication: http://www.sciencedirect.com/science/article/pii/S0079661116000070</datacite:description>
|
||||||
|
</datacite:descriptions>
|
||||||
|
<datacite:subjects>
|
||||||
|
<datacite:subject>protected area</datacite:subject>
|
||||||
|
<datacite:subject>environmental monitoring</datacite:subject>
|
||||||
|
<datacite:subject>oceanography</datacite:subject>
|
||||||
|
<datacite:subject>ocean</datacite:subject>
|
||||||
|
<datacite:subject>Environmental Monitoring Facilities</datacite:subject>
|
||||||
|
<datacite:subject>Environment</datacite:subject>
|
||||||
|
<datacite:subject>Protected Sites</datacite:subject>
|
||||||
|
<datacite:subject>Oceanographic Geographical Features</datacite:subject>
|
||||||
|
<datacite:subject>environmental data</datacite:subject>
|
||||||
|
<datacite:subject classid="eu-data-theme"
|
||||||
|
classname="EU Data Theme"
|
||||||
|
schemeid="dnet:subject_classification_typologies" schemename="dnet:subject_classification_typologies">http://publications.europa.eu/resource/authority/data-theme/TECH</datacite:subject>
|
||||||
|
<datacite:subject classid="eu-data-theme"
|
||||||
|
classname="EU Data Theme"
|
||||||
|
schemeid="dnet:subject_classification_typologies" schemename="dnet:subject_classification_typologies">http://publications.europa.eu/resource/authority/data-theme/ENVI</datacite:subject>
|
||||||
|
<datacite:subject classid="eurovoc" classname="EuroVoc"
|
||||||
|
schemeid="dnet:subject_classification_typologies" schemename="dnet:subject_classification_typologies">http://eurovoc.europa.eu/2114</datacite:subject>
|
||||||
|
<datacite:subject classid="eurovoc" classname="EuroVoc"
|
||||||
|
schemeid="dnet:subject_classification_typologies" schemename="dnet:subject_classification_typologies">http://eurovoc.europa.eu/2107</datacite:subject>
|
||||||
|
<datacite:subject classid="eurovoc" classname="EuroVoc"
|
||||||
|
schemeid="dnet:subject_classification_typologies" schemename="dnet:subject_classification_typologies">http://eurovoc.europa.eu/4801</datacite:subject>
|
||||||
|
<datacite:subject classid="eurovoc" classname="EuroVoc"
|
||||||
|
schemeid="dnet:subject_classification_typologies" schemename="dnet:subject_classification_typologies">http://eurovoc.europa.eu/3140</datacite:subject>
|
||||||
|
</datacite:subjects>
|
||||||
|
<datacite:formats>
|
||||||
|
<datacite:format>http://publications.europa.eu/resource/authority/file-type/OP_DATPRO</datacite:format>
|
||||||
|
</datacite:formats>
|
||||||
|
<datacite:geoLocations>
|
||||||
|
<datacite:geoLocation>
|
||||||
|
<datacite:geoLocationPlace>http://publications.europa.eu/resource/authority/country/ESH</datacite:geoLocationPlace>
|
||||||
|
</datacite:geoLocation>
|
||||||
|
<datacite:geoLocation>
|
||||||
|
<datacite:geoLocationPlace>http://publications.europa.eu/resource/authority/country/ESP</datacite:geoLocationPlace>
|
||||||
|
</datacite:geoLocation>
|
||||||
|
<datacite:geoLocation>
|
||||||
|
<datacite:geoLocationPlace>http://publications.europa.eu/resource/authority/country/EST</datacite:geoLocationPlace>
|
||||||
|
</datacite:geoLocation>
|
||||||
|
<datacite:geoLocation>
|
||||||
|
<datacite:geoLocationPlace>http://publications.europa.eu/resource/authority/country/ETH</datacite:geoLocationPlace>
|
||||||
|
</datacite:geoLocation>
|
||||||
|
<datacite:geoLocation>
|
||||||
|
<datacite:geoLocationPlace>http://publications.europa.eu/resource/authority/country/DNK</datacite:geoLocationPlace>
|
||||||
|
</datacite:geoLocation>
|
||||||
|
<datacite:geoLocation>
|
||||||
|
<datacite:geoLocationPlace>http://publications.europa.eu/resource/authority/country/DZA</datacite:geoLocationPlace>
|
||||||
|
</datacite:geoLocation>
|
||||||
|
<datacite:geoLocation>
|
||||||
|
<datacite:geoLocationPlace>http://publications.europa.eu/resource/authority/country/EGY</datacite:geoLocationPlace>
|
||||||
|
</datacite:geoLocation>
|
||||||
|
<datacite:geoLocation>
|
||||||
|
<datacite:geoLocationPlace>http://publications.europa.eu/resource/authority/country/ERI</datacite:geoLocationPlace>
|
||||||
|
</datacite:geoLocation>
|
||||||
|
<datacite:geoLocation>
|
||||||
|
<datacite:geoLocationPlace>http://publications.europa.eu/resource/authority/country/FIN</datacite:geoLocationPlace>
|
||||||
|
</datacite:geoLocation>
|
||||||
|
<datacite:geoLocation>
|
||||||
|
<datacite:geoLocationPlace>http://publications.europa.eu/resource/authority/country/FRA</datacite:geoLocationPlace>
|
||||||
|
</datacite:geoLocation>
|
||||||
|
<datacite:geoLocation>
|
||||||
|
<datacite:geoLocationPlace>http://publications.europa.eu/resource/authority/country/1A0</datacite:geoLocationPlace>
|
||||||
|
</datacite:geoLocation>
|
||||||
|
<datacite:geoLocation>
|
||||||
|
<datacite:geoLocationPlace>http://publications.europa.eu/resource/authority/country/AUT</datacite:geoLocationPlace>
|
||||||
|
</datacite:geoLocation>
|
||||||
|
<datacite:geoLocation>
|
||||||
|
<datacite:geoLocationPlace>http://publications.europa.eu/resource/authority/country/BGR</datacite:geoLocationPlace>
|
||||||
|
</datacite:geoLocation>
|
||||||
|
<datacite:geoLocation>
|
||||||
|
<datacite:geoLocationPlace>http://publications.europa.eu/resource/authority/country/MDA</datacite:geoLocationPlace>
|
||||||
|
</datacite:geoLocation>
|
||||||
|
<datacite:geoLocation>
|
||||||
|
<datacite:geoLocationPlace>http://publications.europa.eu/resource/authority/country/MCO</datacite:geoLocationPlace>
|
||||||
|
</datacite:geoLocation>
|
||||||
|
<datacite:geoLocation>
|
||||||
|
<datacite:geoLocationPlace>http://publications.europa.eu/resource/authority/country/LUX</datacite:geoLocationPlace>
|
||||||
|
</datacite:geoLocation>
|
||||||
|
<datacite:geoLocation>
|
||||||
|
<datacite:geoLocationPlace>http://publications.europa.eu/resource/authority/country/LTU</datacite:geoLocationPlace>
|
||||||
|
</datacite:geoLocation>
|
||||||
|
<datacite:geoLocation>
|
||||||
|
<datacite:geoLocationPlace>http://publications.europa.eu/resource/authority/country/MAR</datacite:geoLocationPlace>
|
||||||
|
</datacite:geoLocation>
|
||||||
|
<datacite:geoLocation>
|
||||||
|
<datacite:geoLocationPlace>http://publications.europa.eu/resource/authority/country/LVA</datacite:geoLocationPlace>
|
||||||
|
</datacite:geoLocation>
|
||||||
|
<datacite:geoLocation>
|
||||||
|
<datacite:geoLocationPlace>http://publications.europa.eu/resource/authority/country/LBN</datacite:geoLocationPlace>
|
||||||
|
</datacite:geoLocation>
|
||||||
|
<datacite:geoLocation>
|
||||||
|
<datacite:geoLocationPlace>http://publications.europa.eu/resource/authority/country/JOR</datacite:geoLocationPlace>
|
||||||
|
</datacite:geoLocation>
|
||||||
|
<datacite:geoLocation>
|
||||||
|
<datacite:geoLocationPlace>http://publications.europa.eu/resource/authority/country/LIE</datacite:geoLocationPlace>
|
||||||
|
</datacite:geoLocation>
|
||||||
|
<datacite:geoLocation>
|
||||||
|
<datacite:geoLocationPlace>http://publications.europa.eu/resource/authority/country/LBY</datacite:geoLocationPlace>
|
||||||
|
</datacite:geoLocation>
|
||||||
|
<datacite:geoLocation>
|
||||||
|
<datacite:geoLocationPlace>http://publications.europa.eu/resource/authority/country/TUN</datacite:geoLocationPlace>
|
||||||
|
</datacite:geoLocation>
|
||||||
|
<datacite:geoLocation>
|
||||||
|
<datacite:geoLocationPlace>http://publications.europa.eu/resource/authority/country/TUR</datacite:geoLocationPlace>
|
||||||
|
</datacite:geoLocation>
|
||||||
|
<datacite:geoLocation>
|
||||||
|
<datacite:geoLocationPlace>http://publications.europa.eu/resource/authority/country/SVK</datacite:geoLocationPlace>
|
||||||
|
</datacite:geoLocation>
|
||||||
|
<datacite:geoLocation>
|
||||||
|
<datacite:geoLocationPlace>http://publications.europa.eu/resource/authority/country/SVN</datacite:geoLocationPlace>
|
||||||
|
</datacite:geoLocation>
|
||||||
|
<datacite:geoLocation>
|
||||||
|
<datacite:geoLocationPlace>http://publications.europa.eu/resource/authority/country/SRB</datacite:geoLocationPlace>
|
||||||
|
</datacite:geoLocation>
|
||||||
|
<datacite:geoLocation>
|
||||||
|
<datacite:geoLocationPlace>http://publications.europa.eu/resource/authority/country/SSD</datacite:geoLocationPlace>
|
||||||
|
</datacite:geoLocation>
|
||||||
|
<datacite:geoLocation>
|
||||||
|
<datacite:geoLocationPlace>http://publications.europa.eu/resource/authority/country/TCD</datacite:geoLocationPlace>
|
||||||
|
</datacite:geoLocation>
|
||||||
|
<datacite:geoLocation>
|
||||||
|
<datacite:geoLocationPlace>http://publications.europa.eu/resource/authority/country/TGO</datacite:geoLocationPlace>
|
||||||
|
</datacite:geoLocation>
|
||||||
|
<datacite:geoLocation>
|
||||||
|
<datacite:geoLocationPlace>http://publications.europa.eu/resource/authority/country/SWE</datacite:geoLocationPlace>
|
||||||
|
</datacite:geoLocation>
|
||||||
|
<datacite:geoLocation>
|
||||||
|
<datacite:geoLocationPlace>http://publications.europa.eu/resource/authority/country/SYR</datacite:geoLocationPlace>
|
||||||
|
</datacite:geoLocation>
|
||||||
|
<datacite:geoLocation>
|
||||||
|
<datacite:geoLocationPlace>http://publications.europa.eu/resource/authority/country/AND</datacite:geoLocationPlace>
|
||||||
|
</datacite:geoLocation>
|
||||||
|
<datacite:geoLocation>
|
||||||
|
<datacite:geoLocationPlace>http://publications.europa.eu/resource/authority/country/BFA</datacite:geoLocationPlace>
|
||||||
|
</datacite:geoLocation>
|
||||||
|
<datacite:geoLocation>
|
||||||
|
<datacite:geoLocationPlace>http://publications.europa.eu/resource/authority/country/GRC</datacite:geoLocationPlace>
|
||||||
|
</datacite:geoLocation>
|
||||||
|
<datacite:geoLocation>
|
||||||
|
<datacite:geoLocationPlace>http://publications.europa.eu/resource/authority/country/GNB</datacite:geoLocationPlace>
|
||||||
|
</datacite:geoLocation>
|
||||||
|
<datacite:geoLocation>
|
||||||
|
<datacite:geoLocationPlace>http://publications.europa.eu/resource/authority/country/GGY</datacite:geoLocationPlace>
|
||||||
|
</datacite:geoLocation>
|
||||||
|
<datacite:geoLocation>
|
||||||
|
<datacite:geoLocationPlace>http://publications.europa.eu/resource/authority/country/GEO</datacite:geoLocationPlace>
|
||||||
|
</datacite:geoLocation>
|
||||||
|
<datacite:geoLocation>
|
||||||
|
<datacite:geoLocationPlace>http://publications.europa.eu/resource/authority/country/GBR</datacite:geoLocationPlace>
|
||||||
|
</datacite:geoLocation>
|
||||||
|
<datacite:geoLocation>
|
||||||
|
<datacite:geoLocationPlace>http://publications.europa.eu/resource/authority/country/FRO</datacite:geoLocationPlace>
|
||||||
|
</datacite:geoLocation>
|
||||||
|
<datacite:geoLocation>
|
||||||
|
<datacite:geoLocationPlace>http://publications.europa.eu/resource/authority/country/GMB</datacite:geoLocationPlace>
|
||||||
|
</datacite:geoLocation>
|
||||||
|
<datacite:geoLocation>
|
||||||
|
<datacite:geoLocationPlace>http://publications.europa.eu/resource/authority/country/GIN</datacite:geoLocationPlace>
|
||||||
|
</datacite:geoLocation>
|
||||||
|
<datacite:geoLocation>
|
||||||
|
<datacite:geoLocationPlace>http://publications.europa.eu/resource/authority/country/GIB</datacite:geoLocationPlace>
|
||||||
|
</datacite:geoLocation>
|
||||||
|
<datacite:geoLocation>
|
||||||
|
<datacite:geoLocationPlace>http://publications.europa.eu/resource/authority/country/GHA</datacite:geoLocationPlace>
|
||||||
|
</datacite:geoLocation>
|
||||||
|
<datacite:geoLocation>
|
||||||
|
<datacite:geoLocationPlace>http://publications.europa.eu/resource/authority/country/MKD</datacite:geoLocationPlace>
|
||||||
|
</datacite:geoLocation>
|
||||||
|
<datacite:geoLocation>
|
||||||
|
<datacite:geoLocationPlace>http://publications.europa.eu/resource/authority/country/MLI</datacite:geoLocationPlace>
|
||||||
|
</datacite:geoLocation>
|
||||||
|
<datacite:geoLocation>
|
||||||
|
<datacite:geoLocationPlace>http://publications.europa.eu/resource/authority/country/MLT</datacite:geoLocationPlace>
|
||||||
|
</datacite:geoLocation>
|
||||||
|
<datacite:geoLocation>
|
||||||
|
<datacite:geoLocationPlace>http://publications.europa.eu/resource/authority/country/MNE</datacite:geoLocationPlace>
|
||||||
|
</datacite:geoLocation>
|
||||||
|
<datacite:geoLocation>
|
||||||
|
<datacite:geoLocationPlace>http://publications.europa.eu/resource/authority/country/MRT</datacite:geoLocationPlace>
|
||||||
|
</datacite:geoLocation>
|
||||||
|
<datacite:geoLocation>
|
||||||
|
<datacite:geoLocationPlace>http://publications.europa.eu/resource/authority/country/NER</datacite:geoLocationPlace>
|
||||||
|
</datacite:geoLocation>
|
||||||
|
<datacite:geoLocation>
|
||||||
|
<datacite:geoLocationPlace>http://publications.europa.eu/resource/authority/country/NGA</datacite:geoLocationPlace>
|
||||||
|
</datacite:geoLocation>
|
||||||
|
<datacite:geoLocation>
|
||||||
|
<datacite:geoLocationPlace>http://publications.europa.eu/resource/authority/country/NLD</datacite:geoLocationPlace>
|
||||||
|
</datacite:geoLocation>
|
||||||
|
<datacite:geoLocation>
|
||||||
|
<datacite:geoLocationPlace>http://publications.europa.eu/resource/authority/country/NOR</datacite:geoLocationPlace>
|
||||||
|
</datacite:geoLocation>
|
||||||
|
<datacite:geoLocation>
|
||||||
|
<datacite:geoLocationPlace>http://publications.europa.eu/resource/authority/country/POL</datacite:geoLocationPlace>
|
||||||
|
</datacite:geoLocation>
|
||||||
|
<datacite:geoLocation>
|
||||||
|
<datacite:geoLocationPlace>http://publications.europa.eu/resource/authority/country/ALB</datacite:geoLocationPlace>
|
||||||
|
</datacite:geoLocation>
|
||||||
|
<datacite:geoLocation>
|
||||||
|
<datacite:geoLocationPlace>http://publications.europa.eu/resource/authority/country/BEN</datacite:geoLocationPlace>
|
||||||
|
</datacite:geoLocation>
|
||||||
|
<datacite:geoLocation>
|
||||||
|
<datacite:geoLocationPlace>http://publications.europa.eu/resource/authority/country/VAT</datacite:geoLocationPlace>
|
||||||
|
</datacite:geoLocation>
|
||||||
|
<datacite:geoLocation>
|
||||||
|
<datacite:geoLocationPlace>http://publications.europa.eu/resource/authority/country/UKR</datacite:geoLocationPlace>
|
||||||
|
</datacite:geoLocation>
|
||||||
|
<datacite:geoLocation>
|
||||||
|
<datacite:geoLocationPlace>http://publications.europa.eu/resource/authority/country/CAF</datacite:geoLocationPlace>
|
||||||
|
</datacite:geoLocation>
|
||||||
|
<datacite:geoLocation>
|
||||||
|
<datacite:geoLocationPlace>http://publications.europa.eu/resource/authority/country/BLR</datacite:geoLocationPlace>
|
||||||
|
</datacite:geoLocation>
|
||||||
|
<datacite:geoLocation>
|
||||||
|
<datacite:geoLocationPlace>http://publications.europa.eu/resource/authority/country/CIV</datacite:geoLocationPlace>
|
||||||
|
</datacite:geoLocation>
|
||||||
|
<datacite:geoLocation>
|
||||||
|
<datacite:geoLocationPlace>http://publications.europa.eu/resource/authority/country/CHE</datacite:geoLocationPlace>
|
||||||
|
</datacite:geoLocation>
|
||||||
|
<datacite:geoLocation>
|
||||||
|
<datacite:geoLocationPlace>http://publications.europa.eu/resource/authority/country/CPV</datacite:geoLocationPlace>
|
||||||
|
</datacite:geoLocation>
|
||||||
|
<datacite:geoLocation>
|
||||||
|
<datacite:geoLocationPlace>http://publications.europa.eu/resource/authority/country/CMR</datacite:geoLocationPlace>
|
||||||
|
</datacite:geoLocation>
|
||||||
|
<datacite:geoLocation>
|
||||||
|
<datacite:geoLocationPlace>http://publications.europa.eu/resource/authority/country/CZE</datacite:geoLocationPlace>
|
||||||
|
</datacite:geoLocation>
|
||||||
|
<datacite:geoLocation>
|
||||||
|
<datacite:geoLocationPlace>http://publications.europa.eu/resource/authority/country/CYP</datacite:geoLocationPlace>
|
||||||
|
</datacite:geoLocation>
|
||||||
|
<datacite:geoLocation>
|
||||||
|
<datacite:geoLocationPlace>http://publications.europa.eu/resource/authority/country/DJI</datacite:geoLocationPlace>
|
||||||
|
</datacite:geoLocation>
|
||||||
|
<datacite:geoLocation>
|
||||||
|
<datacite:geoLocationPlace>http://publications.europa.eu/resource/authority/country/DEU</datacite:geoLocationPlace>
|
||||||
|
</datacite:geoLocation>
|
||||||
|
<datacite:geoLocation>
|
||||||
|
<datacite:geoLocationPlace>http://publications.europa.eu/resource/authority/country/ITA</datacite:geoLocationPlace>
|
||||||
|
</datacite:geoLocation>
|
||||||
|
<datacite:geoLocation>
|
||||||
|
<datacite:geoLocationPlace>http://publications.europa.eu/resource/authority/country/JEY</datacite:geoLocationPlace>
|
||||||
|
</datacite:geoLocation>
|
||||||
|
<datacite:geoLocation>
|
||||||
|
<datacite:geoLocationPlace>http://publications.europa.eu/resource/authority/country/ISL</datacite:geoLocationPlace>
|
||||||
|
</datacite:geoLocation>
|
||||||
|
<datacite:geoLocation>
|
||||||
|
<datacite:geoLocationPlace>http://publications.europa.eu/resource/authority/country/ISR</datacite:geoLocationPlace>
|
||||||
|
</datacite:geoLocation>
|
||||||
|
<datacite:geoLocation>
|
||||||
|
<datacite:geoLocationPlace>http://publications.europa.eu/resource/authority/country/IRL</datacite:geoLocationPlace>
|
||||||
|
</datacite:geoLocation>
|
||||||
|
<datacite:geoLocation>
|
||||||
|
<datacite:geoLocationPlace>http://publications.europa.eu/resource/authority/country/IRQ</datacite:geoLocationPlace>
|
||||||
|
</datacite:geoLocation>
|
||||||
|
<datacite:geoLocation>
|
||||||
|
<datacite:geoLocationPlace>http://publications.europa.eu/resource/authority/country/HUN</datacite:geoLocationPlace>
|
||||||
|
</datacite:geoLocation>
|
||||||
|
<datacite:geoLocation>
|
||||||
|
<datacite:geoLocationPlace>http://publications.europa.eu/resource/authority/country/IMN</datacite:geoLocationPlace>
|
||||||
|
</datacite:geoLocation>
|
||||||
|
<datacite:geoLocation>
|
||||||
|
<datacite:geoLocationPlace>http://publications.europa.eu/resource/authority/country/GRL</datacite:geoLocationPlace>
|
||||||
|
</datacite:geoLocation>
|
||||||
|
<datacite:geoLocation>
|
||||||
|
<datacite:geoLocationPlace>http://publications.europa.eu/resource/authority/country/HRV</datacite:geoLocationPlace>
|
||||||
|
</datacite:geoLocation>
|
||||||
|
<datacite:geoLocation>
|
||||||
|
<datacite:geoLocationPlace>http://publications.europa.eu/resource/authority/country/ALA</datacite:geoLocationPlace>
|
||||||
|
</datacite:geoLocation>
|
||||||
|
<datacite:geoLocation>
|
||||||
|
<datacite:geoLocationPlace>http://publications.europa.eu/resource/authority/country/BEL</datacite:geoLocationPlace>
|
||||||
|
</datacite:geoLocation>
|
||||||
|
<datacite:geoLocation>
|
||||||
|
<datacite:geoLocationPlace>http://publications.europa.eu/resource/authority/country/BIH</datacite:geoLocationPlace>
|
||||||
|
</datacite:geoLocation>
|
||||||
|
<datacite:geoLocation>
|
||||||
|
<datacite:geoLocationPlace>http://publications.europa.eu/resource/authority/country/SLE</datacite:geoLocationPlace>
|
||||||
|
</datacite:geoLocation>
|
||||||
|
<datacite:geoLocation>
|
||||||
|
<datacite:geoLocationPlace>http://publications.europa.eu/resource/authority/country/SEN</datacite:geoLocationPlace>
|
||||||
|
</datacite:geoLocation>
|
||||||
|
<datacite:geoLocation>
|
||||||
|
<datacite:geoLocationPlace>http://publications.europa.eu/resource/authority/country/SDN</datacite:geoLocationPlace>
|
||||||
|
</datacite:geoLocation>
|
||||||
|
<datacite:geoLocation>
|
||||||
|
<datacite:geoLocationPlace>http://publications.europa.eu/resource/authority/country/SAU</datacite:geoLocationPlace>
|
||||||
|
</datacite:geoLocation>
|
||||||
|
<datacite:geoLocation>
|
||||||
|
<datacite:geoLocationPlace>http://publications.europa.eu/resource/authority/country/RUS</datacite:geoLocationPlace>
|
||||||
|
</datacite:geoLocation>
|
||||||
|
<datacite:geoLocation>
|
||||||
|
<datacite:geoLocationPlace>http://publications.europa.eu/resource/authority/country/ROU</datacite:geoLocationPlace>
|
||||||
|
</datacite:geoLocation>
|
||||||
|
<datacite:geoLocation>
|
||||||
|
<datacite:geoLocationPlace>http://publications.europa.eu/resource/authority/country/PSE</datacite:geoLocationPlace>
|
||||||
|
</datacite:geoLocation>
|
||||||
|
<datacite:geoLocation>
|
||||||
|
<datacite:geoLocationPlace>http://publications.europa.eu/resource/authority/country/PRT</datacite:geoLocationPlace>
|
||||||
|
</datacite:geoLocation>
|
||||||
|
<datacite:geoLocation>
|
||||||
|
<datacite:geoLocationPlace>http://publications.europa.eu/resource/authority/country/SOM</datacite:geoLocationPlace>
|
||||||
|
</datacite:geoLocation>
|
||||||
|
<datacite:geoLocation>
|
||||||
|
<datacite:geoLocationPlace>http://publications.europa.eu/resource/authority/country/SMR</datacite:geoLocationPlace>
|
||||||
|
</datacite:geoLocation>
|
||||||
|
</datacite:geoLocations>
|
||||||
|
</datacite:resource>
|
||||||
|
<oaf:identifier identifierType="data.europa.eu">http://data.europa.eu/88u/dataset/60a0a5b0-b63c-473f-b8bc-207ea037eb3b</oaf:identifier>
|
||||||
|
<oaf:country>EU</oaf:country>
|
||||||
|
<oaf:accessrights>OPEN</oaf:accessrights>
|
||||||
|
<dr:CobjCategory type="dataset">0021</dr:CobjCategory>
|
||||||
|
<oaf:dateAccepted>2019-07-09</oaf:dateAccepted>
|
||||||
|
<oaf:hostedBy id="re3data_____::r3d100011728" name="European Union Open Data Portal"/>
|
||||||
|
<oaf:collectedFrom id="re3data_____::r3d100011728" name="European Union Open Data Portal"/>
|
||||||
|
<oaf:license>CC_BY_4_0</oaf:license>
|
||||||
|
<oaf:language>ENG</oaf:language>
|
||||||
|
</oai:metadata>
|
||||||
|
<about xmlns:dc="http://purl.org/dc/elements/1.1/"
|
||||||
|
xmlns:prov="http://www.openarchives.org/OAI/2.0/provenance" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
|
||||||
|
<provenance xmlns="http://www.openarchives.org/OAI/2.0/provenance" xsi:schemaLocation="http://www.openarchives.org/OAI/2.0/provenance http://www.openarchives.org/OAI/2.0/provenance.xsd">
|
||||||
|
<originDescription altered="true" harvestDate="2021-01-28T17:24:33.095+01:00">
|
||||||
|
<baseURL>https%3A%2F%2Fdata.europa.eu%2Feuodp%2Fdata%2Fapiodp%2Faction%2Fpackage_search</baseURL>
|
||||||
|
<identifier/>
|
||||||
|
<datestamp/>
|
||||||
|
<metadataNamespace/>
|
||||||
|
</originDescription>
|
||||||
|
</provenance>
|
||||||
|
<oaf:datainfo>
|
||||||
|
<oaf:inferred>false</oaf:inferred>
|
||||||
|
<oaf:deletedbyinference>false</oaf:deletedbyinference>
|
||||||
|
<oaf:trust>0.9</oaf:trust>
|
||||||
|
<oaf:inferenceprovenance/>
|
||||||
|
<oaf:provenanceaction classid="sysimport:crosswalk"
|
||||||
|
classname="sysimport:crosswalk"
|
||||||
|
schemeid="dnet:provenanceActions" schemename="dnet:provenanceActions"/>
|
||||||
|
</oaf:datainfo>
|
||||||
|
</about>
|
||||||
|
</oai:record>
|
||||||
|
|
|
@ -46,6 +46,7 @@
|
||||||
<oaf:collectedFrom id="openaire____::crossref" name="Crossref"/>
|
<oaf:collectedFrom id="openaire____::crossref" name="Crossref"/>
|
||||||
<oaf:identifier identifierType="doi">10.1080/23744235.2020.1774644</oaf:identifier>
|
<oaf:identifier identifierType="doi">10.1080/23744235.2020.1774644</oaf:identifier>
|
||||||
<oaf:journal eissn="2374-4243" ep="3" iss="" issn="2374-4235" sp="1" vol="">Infectious Diseases</oaf:journal>
|
<oaf:journal eissn="2374-4243" ep="3" iss="" issn="2374-4235" sp="1" vol="">Infectious Diseases</oaf:journal>
|
||||||
|
<oaf:projectid validationDate="2020-12-07T11:15:59.627Z">corda__h2020::814530</oaf:projectid>
|
||||||
</metadata>
|
</metadata>
|
||||||
<about xmlns:oai="http://www.openarchives.org/OAI/2.0/">
|
<about xmlns:oai="http://www.openarchives.org/OAI/2.0/">
|
||||||
<provenance xmlns="http://www.openarchives.org/OAI/2.0/provenance" xsi:schemaLocation="http://www.openarchives.org/OAI/2.0/provenance http://www.openarchives.org/OAI/2.0/provenance.xsd">
|
<provenance xmlns="http://www.openarchives.org/OAI/2.0/provenance" xsi:schemaLocation="http://www.openarchives.org/OAI/2.0/provenance http://www.openarchives.org/OAI/2.0/provenance.xsd">
|
||||||
|
|
|
@ -0,0 +1,117 @@
|
||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<record xmlns:datacite="http://datacite.org/schema/kernel-3"
|
||||||
|
xmlns:dr="http://www.driver-repository.eu/namespace/dr"
|
||||||
|
xmlns:oaf="http://namespace.openaire.eu/oaf" xmlns:oai="http://www.openarchives.org/OAI/2.0/">
|
||||||
|
<oai:header xmlns="http://namespace.openaire.eu/"
|
||||||
|
xmlns:dc="http://purl.org/dc/elements/1.1/"
|
||||||
|
xmlns:dri="http://www.driver-repository.eu/namespace/dri"
|
||||||
|
xmlns:prov="http://www.openarchives.org/OAI/2.0/provenance" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
|
||||||
|
<dri:objIdentifier>r3f52792889d::00002412cb25f2f3047712d00ab2c8eb</dri:objIdentifier>
|
||||||
|
<dri:recordIdentifier>hdl:11858/00-1734-0000-0003-EE73-2</dri:recordIdentifier>
|
||||||
|
<dri:dateOfCollection>2020-12-16T10:04:03.148Z</dri:dateOfCollection>
|
||||||
|
<oaf:datasourceprefix>r3f52792889d</oaf:datasourceprefix>
|
||||||
|
<identifier xmlns="http://www.openarchives.org/OAI/2.0/">textgrid:rn8z.0</identifier>
|
||||||
|
<datestamp xmlns="http://www.openarchives.org/OAI/2.0/">2012-01-29T20:54:12Z</datestamp>
|
||||||
|
<dr:dateOfTransformation>2020-12-16T16:02:37.562Z</dr:dateOfTransformation>
|
||||||
|
</oai:header>
|
||||||
|
<metadata>
|
||||||
|
<datacite:resource xmlns="http://www.openarchives.org/OAI/2.0/"
|
||||||
|
xmlns:dc="http://purl.org/dc/elements/1.1/"
|
||||||
|
xmlns:dri="http://www.driver-repository.eu/namespace/dri"
|
||||||
|
xmlns:prov="http://www.openarchives.org/OAI/2.0/provenance" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
|
||||||
|
<datacite:identifier identifierType="Handle">hdl:11858/00-1734-0000-0003-EE73-2</datacite:identifier>
|
||||||
|
<datacite:creators>
|
||||||
|
<datacite:creator>
|
||||||
|
<datacite:creatorName></datacite:creatorName>
|
||||||
|
<datacite:nameIdentifier nameIdentifierScheme="pnd" schemeURI="https://ref.de.dariah.eu/pndsearch/pndquery.xql?id="></datacite:nameIdentifier>
|
||||||
|
</datacite:creator>
|
||||||
|
</datacite:creators>
|
||||||
|
<datacite:titles>
|
||||||
|
<datacite:title titleType="Other">Auf dem Trocknen</datacite:title>
|
||||||
|
<datacite:title titleType="Other">Detlev von Liliencron: Gute Nacht. Hinterlassene Gedichte, Berlin: Schuster & Loeffler, 1909.</datacite:title>
|
||||||
|
</datacite:titles>
|
||||||
|
<datacite:publisher>TextGrid</datacite:publisher>
|
||||||
|
<datacite:publicationYear>2012</datacite:publicationYear>
|
||||||
|
<datacite:contributors>
|
||||||
|
<datacite:contributor contributorType="DataManager">
|
||||||
|
<datacite:contributorName>tvitt@textgrid.de</datacite:contributorName>
|
||||||
|
</datacite:contributor>
|
||||||
|
<datacite:contributor contributorType="Other">
|
||||||
|
<datacite:contributorName>Digitale Bibliothek</datacite:contributorName>
|
||||||
|
<datacite:nameIdentifier nameIdentifierScheme="textgrid" schemeURI="http://www.textgridlab.org/schema/textgrid-metadata_2010.xsd">TGPR-372fe6dc-57f2-6cd4-01b5-2c4bbefcfd3c</datacite:nameIdentifier>
|
||||||
|
</datacite:contributor>
|
||||||
|
</datacite:contributors>
|
||||||
|
<datacite:dates>
|
||||||
|
<datacite:date dateType="Created">2012-01-29T20:54:12Z</datacite:date>
|
||||||
|
<datacite:date dateType="Issued">2012-01-29T20:54:12Z</datacite:date>
|
||||||
|
<datacite:date dateType="Updated">2012-01-29T20:54:12Z</datacite:date>
|
||||||
|
</datacite:dates>
|
||||||
|
<datacite:resourceType resourceTypeGeneral="Dataset"/>
|
||||||
|
<alternateIdentifiers xmlns="http://datacite.org/schema/kernel-3">
|
||||||
|
<datacite:alternateIdentifier alternateIdentifierType="URI" xmlns="http://www.openarchives.org/OAI/2.0/">textgrid:rn8z.0</datacite:alternateIdentifier>
|
||||||
|
<alternateIdentifier alternateIdentifierType="URL">http://hdl.handle.net/hdl:11858/00-1734-0000-0003-EE73-2</alternateIdentifier>
|
||||||
|
</alternateIdentifiers>
|
||||||
|
<datacite:relatedIdentifiers>
|
||||||
|
<datacite:relatedIdentifier relatedIdentifierType="Handle" relationType="IsPartOf">hdl:11858/00-1734-0000-0003-EE72-4</datacite:relatedIdentifier>
|
||||||
|
</datacite:relatedIdentifiers>
|
||||||
|
<datacite:sizes>
|
||||||
|
<datacite:size>527 Bytes</datacite:size>
|
||||||
|
</datacite:sizes>
|
||||||
|
<datacite:formats>
|
||||||
|
<datacite:format>text/tg.edition+tg.aggregation+xml</datacite:format>
|
||||||
|
</datacite:formats>
|
||||||
|
<datacite:version>0</datacite:version>
|
||||||
|
<datacite:rightsList>
|
||||||
|
<datacite:rights rightsURI="http://creativecommons.org/licenses/by/3.0/de/legalcode"> Der annotierte Datenbestand der Digitalen Bibliothek inklusive
|
||||||
|
Metadaten sowie davon einzeln zugängliche Teile sind eine Abwandlung
|
||||||
|
des Datenbestandes von www.editura.de durch TextGrid und werden
|
||||||
|
unter der Lizenz Creative Commons Namensnennung 3.0 Deutschland
|
||||||
|
Lizenz (by-Nennung TextGrid) veröffentlicht. Die Lizenz bezieht sich
|
||||||
|
nicht auf die der Annotation zu Grunde liegenden allgemeinfreien
|
||||||
|
Texte (Siehe auch Punkt 2 der Lizenzbestimmungen).</datacite:rights>
|
||||||
|
<datacite:rights rightsURI="info:eu-repo/semantics/openAccess"/>
|
||||||
|
</datacite:rightsList>
|
||||||
|
<datacite:descriptions>
|
||||||
|
<datacite:description descriptionType="Abstract"/>
|
||||||
|
</datacite:descriptions>
|
||||||
|
<datacite:geoLocations>
|
||||||
|
<datacite:geoLocation>
|
||||||
|
<datacite:geoLocationPlace
|
||||||
|
xmlns:xs="http://www.w3.org/2001/XMLSchema" xsi:type="xs:string">Berlin</datacite:geoLocationPlace>
|
||||||
|
</datacite:geoLocation>
|
||||||
|
</datacite:geoLocations>
|
||||||
|
</datacite:resource>
|
||||||
|
<oaf:identifier identifierType="handle">hdl:11858/00-1734-0000-0003-EE73-2</oaf:identifier>
|
||||||
|
<dr:CobjCategory type="dataset">0021</dr:CobjCategory>
|
||||||
|
<oaf:refereed>0002</oaf:refereed>
|
||||||
|
<oaf:dateAccepted>2012-01-29</oaf:dateAccepted>
|
||||||
|
<oaf:accessrights>OPEN</oaf:accessrights>
|
||||||
|
<oaf:license>http://creativecommons.org/licenses/by/3.0/de/legalcode</oaf:license>
|
||||||
|
<oaf:language>und</oaf:language>
|
||||||
|
<oaf:country>DE</oaf:country>
|
||||||
|
<oaf:hostedBy id="re3data_____::r3d100011365" name="TextGrid Repository"/>
|
||||||
|
<oaf:collectedFrom id="re3data_____::r3d100011365" name="TextGrid Repository"/>
|
||||||
|
</metadata>
|
||||||
|
<about xmlns:dc="http://purl.org/dc/elements/1.1/"
|
||||||
|
xmlns:dri="http://www.driver-repository.eu/namespace/dri"
|
||||||
|
xmlns:prov="http://www.openarchives.org/OAI/2.0/provenance" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
|
||||||
|
<provenance xmlns="http://www.openarchives.org/OAI/2.0/provenance" xsi:schemaLocation="http://www.openarchives.org/OAI/2.0/provenance http://www.openarchives.org/OAI/2.0/provenance.xsd">
|
||||||
|
<originDescription altered="true" harvestDate="2020-12-16T10:04:03.148Z">
|
||||||
|
<baseURL>https%3A%2F%2Fdev.textgridlab.org%2F1.0%2Ftgoaipmh%2Foai</baseURL>
|
||||||
|
<identifier>textgrid:rn8z.0</identifier>
|
||||||
|
<datestamp>2012-01-29T20:54:12Z</datestamp>
|
||||||
|
<metadataNamespace/>
|
||||||
|
</originDescription>
|
||||||
|
</provenance>
|
||||||
|
<oaf:datainfo>
|
||||||
|
<oaf:inferred>false</oaf:inferred>
|
||||||
|
<oaf:deletedbyinference>false</oaf:deletedbyinference>
|
||||||
|
<oaf:trust>0.9</oaf:trust>
|
||||||
|
<oaf:inferenceprovenance/>
|
||||||
|
<oaf:provenanceaction classid="sysimport:crosswalk:datasetarchive"
|
||||||
|
classname="sysimport:crosswalk:datasetarchive"
|
||||||
|
schemeid="dnet:provenanceActions" schemename="dnet:provenanceActions"/>
|
||||||
|
</oaf:datainfo>
|
||||||
|
</about>
|
||||||
|
</record>
|
||||||
|
|
|
@ -27,6 +27,7 @@ import eu.dnetlib.dhp.common.HdfsSupport;
|
||||||
import eu.dnetlib.dhp.oa.provision.model.JoinedEntity;
|
import eu.dnetlib.dhp.oa.provision.model.JoinedEntity;
|
||||||
import eu.dnetlib.dhp.oa.provision.model.ProvisionModelSupport;
|
import eu.dnetlib.dhp.oa.provision.model.ProvisionModelSupport;
|
||||||
import eu.dnetlib.dhp.oa.provision.model.RelatedEntityWrapper;
|
import eu.dnetlib.dhp.oa.provision.model.RelatedEntityWrapper;
|
||||||
|
import eu.dnetlib.dhp.schema.common.ModelConstants;
|
||||||
import eu.dnetlib.dhp.schema.common.ModelSupport;
|
import eu.dnetlib.dhp.schema.common.ModelSupport;
|
||||||
import eu.dnetlib.dhp.schema.oaf.*;
|
import eu.dnetlib.dhp.schema.oaf.*;
|
||||||
import scala.Tuple2;
|
import scala.Tuple2;
|
||||||
|
@ -272,11 +273,7 @@ public class CreateRelatedEntitiesJob_phase2 {
|
||||||
.filter(Objects::nonNull)
|
.filter(Objects::nonNull)
|
||||||
.map(Qualifier::getClassid)
|
.map(Qualifier::getClassid)
|
||||||
.filter(StringUtils::isNotBlank)
|
.filter(StringUtils::isNotBlank)
|
||||||
.anyMatch(c -> "orcid".equals(c.toLowerCase()));
|
.anyMatch(c -> c.toLowerCase().contains(ModelConstants.ORCID));
|
||||||
}
|
|
||||||
|
|
||||||
private static FilterFunction<JoinedEntity> filterEmptyEntityFn() {
|
|
||||||
return (FilterFunction<JoinedEntity>) v -> Objects.nonNull(v.getEntity());
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private static void removeOutputDir(SparkSession spark, String path) {
|
private static void removeOutputDir(SparkSession spark, String path) {
|
||||||
|
|
|
@ -7,13 +7,16 @@ import java.util.Set;
|
||||||
|
|
||||||
import com.google.common.collect.Sets;
|
import com.google.common.collect.Sets;
|
||||||
|
|
||||||
|
import eu.dnetlib.dhp.schema.common.ModelConstants;
|
||||||
import eu.dnetlib.dhp.schema.oaf.*;
|
import eu.dnetlib.dhp.schema.oaf.*;
|
||||||
|
|
||||||
public class GraphMappingUtils {
|
public class GraphMappingUtils {
|
||||||
|
|
||||||
public static final String SEPARATOR = "_";
|
public static final String SEPARATOR = "_";
|
||||||
|
|
||||||
public static Set<String> authorPidTypes = Sets.newHashSet("orcid", "magidentifier");
|
public static Set<String> authorPidTypes = Sets
|
||||||
|
.newHashSet(
|
||||||
|
ModelConstants.ORCID, ModelConstants.ORCID_PENDING, "magidentifier");
|
||||||
|
|
||||||
public static String removePrefix(final String s) {
|
public static String removePrefix(final String s) {
|
||||||
if (s.contains("|"))
|
if (s.contains("|"))
|
||||||
|
|
|
@ -73,7 +73,9 @@ public class TemplateFactory {
|
||||||
final Collection<String> fields,
|
final Collection<String> fields,
|
||||||
final String semanticclass,
|
final String semanticclass,
|
||||||
final String semantischeme,
|
final String semantischeme,
|
||||||
final DataInfo info) {
|
final DataInfo info,
|
||||||
|
final boolean validated,
|
||||||
|
final String validationDate) {
|
||||||
return getTemplate(resources.getRel())
|
return getTemplate(resources.getRel())
|
||||||
.add("type", type)
|
.add("type", type)
|
||||||
.add("objIdentifier", escapeXml(removePrefix(objIdentifier)))
|
.add("objIdentifier", escapeXml(removePrefix(objIdentifier)))
|
||||||
|
@ -86,6 +88,8 @@ public class TemplateFactory {
|
||||||
.add(
|
.add(
|
||||||
"provenanceaction",
|
"provenanceaction",
|
||||||
info.getProvenanceaction() != null ? info.getProvenanceaction().getClassid() : "")
|
info.getProvenanceaction() != null ? info.getProvenanceaction().getClassid() : "")
|
||||||
|
.add("validated", validated)
|
||||||
|
.add("validationdate", validationDate)
|
||||||
.render();
|
.render();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1094,9 +1094,12 @@ public class XmlRecordFactory implements Serializable {
|
||||||
String.format("missing scheme for: <%s - %s>", type.toString(), targetType));
|
String.format("missing scheme for: <%s - %s>", type.toString(), targetType));
|
||||||
}
|
}
|
||||||
final HashSet<String> fields = Sets.newHashSet(mapFields(link, contexts));
|
final HashSet<String> fields = Sets.newHashSet(mapFields(link, contexts));
|
||||||
|
if (rel.getValidated() == null)
|
||||||
|
rel.setValidated(false);
|
||||||
return templateFactory
|
return templateFactory
|
||||||
.getRel(
|
.getRel(
|
||||||
targetType, rel.getTarget(), fields, rel.getRelClass(), scheme, rel.getDataInfo());
|
targetType, rel.getTarget(), fields, rel.getRelClass(), scheme, rel.getDataInfo(), rel.getValidated(),
|
||||||
|
rel.getValidationDate());
|
||||||
}
|
}
|
||||||
|
|
||||||
private List<String> listChildren(
|
private List<String> listChildren(
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
<rel inferred="$inferred$" trust="$trust$" inferenceprovenance="$inferenceprovenance$" provenanceaction="$provenanceaction$">
|
<rel inferred="$inferred$" trust="$trust$" inferenceprovenance="$inferenceprovenance$" provenanceaction="$provenanceaction$">
|
||||||
<to class="$class$" scheme="$scheme$" type="$type$">$objIdentifier$</to>
|
$if(validated)$<validated date="$validationdate$"/>$else$$endif$
|
||||||
|
<to class="$class$" scheme="$scheme$" type="$type$">$objIdentifier$</to>
|
||||||
$metadata:{ it | $it$ }$
|
$metadata:{ it | $it$ }$
|
||||||
</rel>
|
</rel>
|
|
@ -5,22 +5,27 @@ import static org.junit.jupiter.api.Assertions.*;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.io.StringReader;
|
import java.io.StringReader;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
import org.apache.commons.io.IOUtils;
|
import org.apache.commons.io.IOUtils;
|
||||||
import org.dom4j.Document;
|
import org.dom4j.Document;
|
||||||
import org.dom4j.DocumentException;
|
import org.dom4j.DocumentException;
|
||||||
import org.dom4j.io.SAXReader;
|
import org.dom4j.io.SAXReader;
|
||||||
import org.junit.jupiter.api.Assertions;
|
import org.junit.jupiter.api.Assertions;
|
||||||
import org.junit.jupiter.api.Disabled;
|
|
||||||
import org.junit.jupiter.api.Test;
|
import org.junit.jupiter.api.Test;
|
||||||
|
|
||||||
import com.fasterxml.jackson.databind.DeserializationFeature;
|
import com.fasterxml.jackson.databind.DeserializationFeature;
|
||||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
|
import com.google.common.collect.Lists;
|
||||||
|
|
||||||
import eu.dnetlib.dhp.oa.provision.model.JoinedEntity;
|
import eu.dnetlib.dhp.oa.provision.model.JoinedEntity;
|
||||||
|
import eu.dnetlib.dhp.oa.provision.model.RelatedEntity;
|
||||||
|
import eu.dnetlib.dhp.oa.provision.model.RelatedEntityWrapper;
|
||||||
import eu.dnetlib.dhp.oa.provision.utils.ContextMapper;
|
import eu.dnetlib.dhp.oa.provision.utils.ContextMapper;
|
||||||
import eu.dnetlib.dhp.oa.provision.utils.XmlRecordFactory;
|
import eu.dnetlib.dhp.oa.provision.utils.XmlRecordFactory;
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.Project;
|
||||||
import eu.dnetlib.dhp.schema.oaf.Publication;
|
import eu.dnetlib.dhp.schema.oaf.Publication;
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.Relation;
|
||||||
|
|
||||||
public class XmlRecordFactoryTest {
|
public class XmlRecordFactoryTest {
|
||||||
|
|
||||||
|
@ -58,4 +63,67 @@ public class XmlRecordFactoryTest {
|
||||||
|
|
||||||
// TODO add assertions based of values extracted from the XML record
|
// TODO add assertions based of values extracted from the XML record
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testXMLRecordFactoryWithValidatedProject() throws IOException, DocumentException {
|
||||||
|
|
||||||
|
ContextMapper contextMapper = new ContextMapper();
|
||||||
|
|
||||||
|
XmlRecordFactory xmlRecordFactory = new XmlRecordFactory(contextMapper, false, XmlConverterJob.schemaLocation,
|
||||||
|
otherDsTypeId);
|
||||||
|
|
||||||
|
Publication p = OBJECT_MAPPER
|
||||||
|
.readValue(IOUtils.toString(getClass().getResourceAsStream("publication.json")), Publication.class);
|
||||||
|
Project pj = OBJECT_MAPPER
|
||||||
|
.readValue(IOUtils.toString(getClass().getResourceAsStream("project.json")), Project.class);
|
||||||
|
Relation rel = OBJECT_MAPPER
|
||||||
|
.readValue(
|
||||||
|
(IOUtils.toString(getClass().getResourceAsStream("relToValidatedProject.json"))), Relation.class);
|
||||||
|
RelatedEntity relatedProject = CreateRelatedEntitiesJob_phase1.asRelatedEntity(pj, Project.class);
|
||||||
|
List<RelatedEntityWrapper> links = Lists.newArrayList();
|
||||||
|
RelatedEntityWrapper rew = new RelatedEntityWrapper(rel, relatedProject);
|
||||||
|
links.add(rew);
|
||||||
|
JoinedEntity je = new JoinedEntity<>(p);
|
||||||
|
je.setLinks(links);
|
||||||
|
|
||||||
|
String xml = xmlRecordFactory.build(je);
|
||||||
|
|
||||||
|
assertNotNull(xml);
|
||||||
|
|
||||||
|
Document doc = new SAXReader().read(new StringReader(xml));
|
||||||
|
assertNotNull(doc);
|
||||||
|
System.out.println(doc.asXML());
|
||||||
|
Assertions.assertEquals("2021-01-01", doc.valueOf("//validated/@date"));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testXMLRecordFactoryWithNonValidatedProject() throws IOException, DocumentException {
|
||||||
|
|
||||||
|
ContextMapper contextMapper = new ContextMapper();
|
||||||
|
|
||||||
|
XmlRecordFactory xmlRecordFactory = new XmlRecordFactory(contextMapper, false, XmlConverterJob.schemaLocation,
|
||||||
|
otherDsTypeId);
|
||||||
|
|
||||||
|
Publication p = OBJECT_MAPPER
|
||||||
|
.readValue(IOUtils.toString(getClass().getResourceAsStream("publication.json")), Publication.class);
|
||||||
|
Project pj = OBJECT_MAPPER
|
||||||
|
.readValue(IOUtils.toString(getClass().getResourceAsStream("project.json")), Project.class);
|
||||||
|
Relation rel = OBJECT_MAPPER
|
||||||
|
.readValue((IOUtils.toString(getClass().getResourceAsStream("relToProject.json"))), Relation.class);
|
||||||
|
RelatedEntity relatedProject = CreateRelatedEntitiesJob_phase1.asRelatedEntity(pj, Project.class);
|
||||||
|
List<RelatedEntityWrapper> links = Lists.newArrayList();
|
||||||
|
RelatedEntityWrapper rew = new RelatedEntityWrapper(rel, relatedProject);
|
||||||
|
links.add(rew);
|
||||||
|
JoinedEntity je = new JoinedEntity<>(p);
|
||||||
|
je.setLinks(links);
|
||||||
|
|
||||||
|
String xml = xmlRecordFactory.build(je);
|
||||||
|
|
||||||
|
assertNotNull(xml);
|
||||||
|
|
||||||
|
Document doc = new SAXReader().read(new StringReader(xml));
|
||||||
|
assertNotNull(doc);
|
||||||
|
System.out.println(doc.asXML());
|
||||||
|
assertEquals("", doc.valueOf("//rel/validated"));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -61,7 +61,7 @@
|
||||||
<FIELD indexable="true" name="funderoriginalname" result="false" stat="false" tokenizable="false" xpath="//*[local-name()='entity']/*[local-name()='project']/fundingtree/funder/originalname"/>
|
<FIELD indexable="true" name="funderoriginalname" result="false" stat="false" tokenizable="false" xpath="//*[local-name()='entity']/*[local-name()='project']/fundingtree/funder/originalname"/>
|
||||||
<FIELD indexable="true" name="funderjurisdiction" result="false" stat="false" tokenizable="false" xpath="//*[local-name()='entity']/*[local-name()='project']/fundingtree/funder/jurisdiction"/><!-- RESULT FIELDS -->
|
<FIELD indexable="true" name="funderjurisdiction" result="false" stat="false" tokenizable="false" xpath="//*[local-name()='entity']/*[local-name()='project']/fundingtree/funder/jurisdiction"/><!-- RESULT FIELDS -->
|
||||||
<FIELD indexable="true" name="resulttitle" result="false" stat="false" xpath="//*[local-name() = 'entity']/*[local-name() ='result']/title | //*[local-name()='entity']/*[local-name()='result']/children/result/title"/>
|
<FIELD indexable="true" name="resulttitle" result="false" stat="false" xpath="//*[local-name() = 'entity']/*[local-name() ='result']/title | //*[local-name()='entity']/*[local-name()='result']/children/result/title"/>
|
||||||
<FIELD indexable="true" name="resultsubject" result="false" stat="false" tokenizable="false" xpath="distinct-values(//*[local-name()='entity']/*[local-name()='result']/subject)"/>
|
<FIELD indexable="true" name="resultsubject" result="false" stat="false" type="string_ci" xpath="distinct-values(//*[local-name()='entity']/*[local-name()='result']/subject)"/>
|
||||||
<FIELD indexable="true" name="resultsubjectclass" result="false" stat="false" tokenizable="false" xpath="distinct-values(//*[local-name()='entity']/*[local-name()='result']/subject/@classname)"/>
|
<FIELD indexable="true" name="resultsubjectclass" result="false" stat="false" tokenizable="false" xpath="distinct-values(//*[local-name()='entity']/*[local-name()='result']/subject/@classname)"/>
|
||||||
<FIELD indexable="true" multivalued="false" name="resultembargoenddate" result="false" stat="false" type="pdate" value="//*[local-name()='entity']/*[local-name()='result']/embargoenddate"/>
|
<FIELD indexable="true" multivalued="false" name="resultembargoenddate" result="false" stat="false" type="pdate" value="//*[local-name()='entity']/*[local-name()='result']/embargoenddate"/>
|
||||||
<FIELD indexable="true" multivalued="false" name="resultembargoendyear" result="false" stat="false" tokenizable="false" value="dnet:extractYear(//*[local-name()='entity']/*[local-name()='result']/embargoenddate)"/>
|
<FIELD indexable="true" multivalued="false" name="resultembargoendyear" result="false" stat="false" tokenizable="false" value="dnet:extractYear(//*[local-name()='entity']/*[local-name()='result']/embargoenddate)"/>
|
||||||
|
@ -72,6 +72,7 @@
|
||||||
<FIELD indexable="true" name="resultdescription" result="false" stat="false" xpath="//*[local-name()='entity']/*[local-name()='result']//*[local-name()='description']"/>
|
<FIELD indexable="true" name="resultdescription" result="false" stat="false" xpath="//*[local-name()='entity']/*[local-name()='result']//*[local-name()='description']"/>
|
||||||
<FIELD indexable="true" name="resultlicense" result="false" stat="false" tokenizable="false" xpath="//*[local-name()='entity']/*[local-name()='result']/children/instance/license"/>
|
<FIELD indexable="true" name="resultlicense" result="false" stat="false" tokenizable="false" xpath="//*[local-name()='entity']/*[local-name()='result']/children/instance/license"/>
|
||||||
<FIELD indexable="true" name="resultaccessright" result="false" stat="false" tokenizable="false" xpath="//*[local-name()='entity']/*[local-name()='result']/children/instance/accessright/@classname"/>
|
<FIELD indexable="true" name="resultaccessright" result="false" stat="false" tokenizable="false" xpath="//*[local-name()='entity']/*[local-name()='result']/children/instance/accessright/@classname"/>
|
||||||
|
<FIELD indexable="true" name="resultresourcetypename" result="false" stat="false" type="string_ci" xpath="//*[local-name()='entity']/*[local-name()='result']/resourcetype/@classname"/>
|
||||||
<FIELD indexable="true" multivalued="false" name="resultbestaccessright" result="false" stat="false" tokenizable="false" xpath="distinct-values(//*[local-name()='entity']/*[local-name()='result']/bestaccessright/@classname)"/>
|
<FIELD indexable="true" multivalued="false" name="resultbestaccessright" result="false" stat="false" tokenizable="false" xpath="distinct-values(//*[local-name()='entity']/*[local-name()='result']/bestaccessright/@classname)"/>
|
||||||
<FIELD indexable="true" multivalued="false" name="resultdateofacceptance" result="false" stat="false" type="pdate" value="//*[local-name()='entity']/*[local-name()='result']/dateofacceptance"/>
|
<FIELD indexable="true" multivalued="false" name="resultdateofacceptance" result="false" stat="false" type="pdate" value="//*[local-name()='entity']/*[local-name()='result']/dateofacceptance"/>
|
||||||
<FIELD indexable="true" multivalued="false" name="resultacceptanceyear" result="false" stat="false" tokenizable="false" value="dnet:extractYear(//*[local-name()='entity']/*[local-name()='result']/dateofacceptance)"/>
|
<FIELD indexable="true" multivalued="false" name="resultacceptanceyear" result="false" stat="false" tokenizable="false" value="dnet:extractYear(//*[local-name()='entity']/*[local-name()='result']/dateofacceptance)"/>
|
||||||
|
@ -79,6 +80,7 @@
|
||||||
<FIELD indexable="true" multivalued="true" name="resultauthor_nt" result="false" stat="false" type="string_ci" xpath="//*[local-name()='entity']/*[local-name()='result']/creator"/>
|
<FIELD indexable="true" multivalued="true" name="resultauthor_nt" result="false" stat="false" type="string_ci" xpath="//*[local-name()='entity']/*[local-name()='result']/creator"/>
|
||||||
<FIELD indexable="true" multivalued="true" name="authorid" result="false" stat="false" type="string_ci" xpath="//*[local-name()='entity']/*[local-name()='result']/creator/@*[local-name() != 'rank' and local-name() != 'name' and local-name() != 'surname']"/>
|
<FIELD indexable="true" multivalued="true" name="authorid" result="false" stat="false" type="string_ci" xpath="//*[local-name()='entity']/*[local-name()='result']/creator/@*[local-name() != 'rank' and local-name() != 'name' and local-name() != 'surname']"/>
|
||||||
<FIELD indexable="true" multivalued="true" name="authoridtype" result="false" stat="false" type="string_ci" xpath="//*[local-name()='entity']/*[local-name()='result']/creator/@*[local-name() != 'rank' and local-name() != 'name' and local-name() != 'surname']/local-name()"/>
|
<FIELD indexable="true" multivalued="true" name="authoridtype" result="false" stat="false" type="string_ci" xpath="//*[local-name()='entity']/*[local-name()='result']/creator/@*[local-name() != 'rank' and local-name() != 'name' and local-name() != 'surname']/local-name()"/>
|
||||||
|
<FIELD indexable="true" multivalued="true" name="orcidtypevalue" result="false" stat="false" type="string_ci" xpath="//*[local-name()='entity']/*[local-name()='result']/creator" value="string-join((./@*[local-name() = 'orcid'], ./@*[local-name() = 'orcid']/local-name()), '||' )"/>
|
||||||
<FIELD indexable="true" name="resulthostingdatasource" result="false" stat="false" tokenizable="false" value="distinct-values(concat(./@id, '||', ./@name))" xpath="//*[local-name()='entity']/*[local-name()='result']/children/instance/*[local-name()='hostedby']"/>
|
<FIELD indexable="true" name="resulthostingdatasource" result="false" stat="false" tokenizable="false" value="distinct-values(concat(./@id, '||', ./@name))" xpath="//*[local-name()='entity']/*[local-name()='result']/children/instance/*[local-name()='hostedby']"/>
|
||||||
<FIELD indexable="true" name="resulthostingdatasourceid" result="false" stat="false" tokenizable="false" xpath="distinct-values(//*[local-name()='entity']/*[local-name()='result']/children/instance/*[local-name()='hostedby']/@id)"/>
|
<FIELD indexable="true" name="resulthostingdatasourceid" result="false" stat="false" tokenizable="false" xpath="distinct-values(//*[local-name()='entity']/*[local-name()='result']/children/instance/*[local-name()='hostedby']/@id)"/>
|
||||||
<FIELD indexable="true" name="resulthostingdatasourcename" result="false" stat="false" tokenizable="false" xpath="distinct-values(//*[local-name()='entity']/*[local-name()='result']/children/instance/*[local-name()='hostedby']/@name)"/>
|
<FIELD indexable="true" name="resulthostingdatasourcename" result="false" stat="false" tokenizable="false" xpath="distinct-values(//*[local-name()='entity']/*[local-name()='result']/children/instance/*[local-name()='hostedby']/@name)"/>
|
||||||
|
|
|
@ -0,0 +1,109 @@
|
||||||
|
{
|
||||||
|
"id": "40|corda__h2020::79a0e16c122c9a18eb60e4a5e64b620d",
|
||||||
|
"originalId": [],
|
||||||
|
"pid": [],
|
||||||
|
"dateofcollection": "2020-01-01",
|
||||||
|
"dateoftransformation": "2020-01-01",
|
||||||
|
"extraInfo": [],
|
||||||
|
"oaiprovenance": null,
|
||||||
|
"websiteurl": {
|
||||||
|
"value": "https://web.site",
|
||||||
|
"datainfo": null
|
||||||
|
},
|
||||||
|
"code": {
|
||||||
|
"value": "79a0e",
|
||||||
|
"datainfo": null
|
||||||
|
},
|
||||||
|
"acronym": {
|
||||||
|
"value": "79a0e_acronym",
|
||||||
|
"datainfo": null
|
||||||
|
},
|
||||||
|
"title": {
|
||||||
|
"value": "79a0e_title",
|
||||||
|
"datainfo": null
|
||||||
|
},
|
||||||
|
"startdate": {
|
||||||
|
"value": "2019-02-01",
|
||||||
|
"datainfo": null
|
||||||
|
},
|
||||||
|
"enddate": {
|
||||||
|
"value": "2021-01-09",
|
||||||
|
"datainfo": null
|
||||||
|
},
|
||||||
|
"callidentifier": {
|
||||||
|
"value": "79a0e_callID",
|
||||||
|
"datainfo": null
|
||||||
|
},
|
||||||
|
"keywords": {
|
||||||
|
"value": "",
|
||||||
|
"datainfo": null
|
||||||
|
},
|
||||||
|
"duration": {
|
||||||
|
"value": "",
|
||||||
|
"datainfo": null
|
||||||
|
},
|
||||||
|
"ecsc39": {
|
||||||
|
"value": "true",
|
||||||
|
"datainfo": null
|
||||||
|
},
|
||||||
|
"oamandatepublications": {
|
||||||
|
"value": "true",
|
||||||
|
"datainfo": null
|
||||||
|
},
|
||||||
|
"ecarticle29_3": {
|
||||||
|
"value": "false",
|
||||||
|
"datainfo": null
|
||||||
|
},
|
||||||
|
"optional1": {
|
||||||
|
"value": "",
|
||||||
|
"datainfo": null
|
||||||
|
},
|
||||||
|
"optional2": {
|
||||||
|
"value": "",
|
||||||
|
"datainfo": null
|
||||||
|
},
|
||||||
|
"jsonextrainfo":{
|
||||||
|
"value": "",
|
||||||
|
"datainfo": null
|
||||||
|
},
|
||||||
|
"contactfullname":{
|
||||||
|
"value": "",
|
||||||
|
"datainfo": null
|
||||||
|
},
|
||||||
|
"contactfax": {
|
||||||
|
"value": "",
|
||||||
|
"datainfo": null
|
||||||
|
},
|
||||||
|
"contactphone": {
|
||||||
|
"value": "",
|
||||||
|
"datainfo": null
|
||||||
|
},
|
||||||
|
"contactemail": {
|
||||||
|
"value": "",
|
||||||
|
"datainfo": null
|
||||||
|
},
|
||||||
|
"summary": {
|
||||||
|
"value": "79a0e_description",
|
||||||
|
"datainfo": null
|
||||||
|
},
|
||||||
|
"currency": {
|
||||||
|
"value": "EUR",
|
||||||
|
"datainfo": null
|
||||||
|
},
|
||||||
|
"totalcost": 120000,
|
||||||
|
"fundedamount": 18000,
|
||||||
|
"h2020topiccode": "",
|
||||||
|
"h2020topicdescription": "",
|
||||||
|
"h2020classification": [],
|
||||||
|
"subjects": [
|
||||||
|
{
|
||||||
|
"value": "",
|
||||||
|
"qualifier": null,
|
||||||
|
"datainfo": null
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"fundingtree": []
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
|
|
@ -39,6 +39,8 @@
|
||||||
<creator rank="4">Saykally, Jessica N.</creator>
|
<creator rank="4">Saykally, Jessica N.</creator>
|
||||||
<creator rank="5">Keeley, Kristen L.</creator>
|
<creator rank="5">Keeley, Kristen L.</creator>
|
||||||
<creator rank="6">Haris Hatic</creator>
|
<creator rank="6">Haris Hatic</creator>
|
||||||
|
<creator rank="7" name="Miriam" surname="Baglioni" orcid_pending="0000-0002-2273-9004">Baglioni, Miriam</creator>
|
||||||
|
<creator rank="8" name="Michele" surname="De Bonis" orcid="0000-0002-2273-9004">De Bonis, Michele</creator>
|
||||||
<dateofacceptance>2017-06-01</dateofacceptance>
|
<dateofacceptance>2017-06-01</dateofacceptance>
|
||||||
<description>Withania somnifera has been used in traditional medicine for a variety
|
<description>Withania somnifera has been used in traditional medicine for a variety
|
||||||
of neural disorders. Recently, chronic neurodegenerative conditions have been
|
of neural disorders. Recently, chronic neurodegenerative conditions have been
|
||||||
|
@ -115,7 +117,7 @@
|
||||||
<source>Cell Transplantation</source>
|
<source>Cell Transplantation</source>
|
||||||
<resulttype classid="publication" classname="publication"
|
<resulttype classid="publication" classname="publication"
|
||||||
schemeid="dnet:result_typologies" schemename="dnet:result_typologies"/>
|
schemeid="dnet:result_typologies" schemename="dnet:result_typologies"/>
|
||||||
<resourcetype/>
|
<resourcetype classid="Book" classname="Book" schemeid="dnet:dataCite_resource" schemename="dnet:dataCite_resource" />
|
||||||
<journal issn="0963-6897" eissn="1555-3892" ep="1201" iss="7" sp="1193" vol="26"
|
<journal issn="0963-6897" eissn="1555-3892" ep="1201" iss="7" sp="1193" vol="26"
|
||||||
>Cell Transplantation</journal>
|
>Cell Transplantation</journal>
|
||||||
<context id="NIH" label="National Institutes of Health" type="funding">
|
<context id="NIH" label="National Institutes of Health" type="funding">
|
||||||
|
@ -292,6 +294,28 @@
|
||||||
>rcuk________::23feba2a5ca7f6b6016bf3a45180da50</to>
|
>rcuk________::23feba2a5ca7f6b6016bf3a45180da50</to>
|
||||||
<legalname>University of Delhi</legalname>
|
<legalname>University of Delhi</legalname>
|
||||||
</rel>
|
</rel>
|
||||||
|
<rel inferred="false" trust="0.9" inferenceprovenance="" provenanceaction="sysimport:crosswalk:repository">
|
||||||
|
<validated date="2021-01-01">true</validated>
|
||||||
|
<to class="isProducedBy" scheme="dnet:result_project_relations" type="project">corda_______::30c6b5ab90f30666de1d112fb93d8c77</to>
|
||||||
|
<code>227878</code>
|
||||||
|
<funding>
|
||||||
|
<funder id="ec__________::EC" shortname="EC" name="European Commission" jurisdiction="EU" />
|
||||||
|
<funding_level_0 name="FP7">ec__________::EC::FP7</funding_level_0>
|
||||||
|
<funding_level_1 name="SP2">ec__________::EC::FP7::SP2</funding_level_1>
|
||||||
|
<funding_level_2 name="ERC">ec__________::EC::FP7::SP2::ERC</funding_level_2>
|
||||||
|
</funding>
|
||||||
|
<title>Complex structure and dynamics of collective motion</title>
|
||||||
|
<acronym>COLLMOT</acronym>
|
||||||
|
<contracttype classid="ERC" classname="Support for frontier research (ERC)" schemeid="ec:FP7contractTypes" schemename="ec:FP7contractTypes" />
|
||||||
|
</rel>
|
||||||
|
<rel inferred="true" trust="0.72" inferenceprovenance="iis::document_referencedProjects" provenanceaction="iis">
|
||||||
|
<to class="isProducedBy" scheme="dnet:result_project_relations" type="project">irb_hr______::2330a1d0dac71ffbe15fbcbc807288d4</to>
|
||||||
|
<code>108-1083570-3635</code>
|
||||||
|
<funding>
|
||||||
|
<funder id="irb_hr______::MZOS" shortname="MZOS" name="Ministry of Science, Education and Sports of the Republic of Croatia (MSES)" jurisdiction="HR" />
|
||||||
|
</funding>
|
||||||
|
<title>Pentadecapeptide BPC 157 - further investigations</title>
|
||||||
|
</rel>
|
||||||
</rels>
|
</rels>
|
||||||
<children>
|
<children>
|
||||||
<instance id="openaire____::55045bd2a65019fd8e6741a755395c8c">
|
<instance id="openaire____::55045bd2a65019fd8e6741a755395c8c">
|
||||||
|
|
|
@ -0,0 +1,31 @@
|
||||||
|
{
|
||||||
|
"collectedfrom": [
|
||||||
|
{
|
||||||
|
"key": "10|opendoar____::eccbc87e4b5ce2fe28308fd9f2a7baf3",
|
||||||
|
"value": "AMS Acta",
|
||||||
|
"dataInfo": null
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"dataInfo": {
|
||||||
|
"invisible": false,
|
||||||
|
"inferred": false,
|
||||||
|
"deletedbyinference": false,
|
||||||
|
"trust": "0.9",
|
||||||
|
"inferenceprovenance": "",
|
||||||
|
"provenanceaction": {
|
||||||
|
"classid": "sysimport:crosswalk:repository",
|
||||||
|
"classname": "sysimport:crosswalk:repository",
|
||||||
|
"schemeid": "dnet:provenanceActions",
|
||||||
|
"schemename": "dnet:provenanceActions"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"lastupdatetimestamp": 1606898557407,
|
||||||
|
"relType": "resultProject",
|
||||||
|
"subRelType": "outcome",
|
||||||
|
"relClass": "isProducedBy",
|
||||||
|
"source": "50|CSC_________::0000ec4dd9df012feaafa77e71a0fb4c",
|
||||||
|
"target": "40|corda__h2020::79a0e16c122c9a18eb60e4a5e64b620d",
|
||||||
|
"validated": null,
|
||||||
|
"validationDate": null,
|
||||||
|
"properties": []
|
||||||
|
}
|
|
@ -0,0 +1,31 @@
|
||||||
|
{
|
||||||
|
"collectedfrom": [
|
||||||
|
{
|
||||||
|
"key": "10|opendoar____::eccbc87e4b5ce2fe28308fd9f2a7baf3",
|
||||||
|
"value": "AMS Acta",
|
||||||
|
"dataInfo": null
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"dataInfo": {
|
||||||
|
"invisible": false,
|
||||||
|
"inferred": false,
|
||||||
|
"deletedbyinference": false,
|
||||||
|
"trust": "0.9",
|
||||||
|
"inferenceprovenance": "",
|
||||||
|
"provenanceaction": {
|
||||||
|
"classid": "sysimport:crosswalk:repository",
|
||||||
|
"classname": "sysimport:crosswalk:repository",
|
||||||
|
"schemeid": "dnet:provenanceActions",
|
||||||
|
"schemename": "dnet:provenanceActions"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"lastupdatetimestamp": 1606898557407,
|
||||||
|
"relType": "resultProject",
|
||||||
|
"subRelType": "outcome",
|
||||||
|
"relClass": "isProducedBy",
|
||||||
|
"source": "50|CSC_________::0000ec4dd9df012feaafa77e71a0fb4c",
|
||||||
|
"target": "40|corda__h2020::79a0e16c122c9a18eb60e4a5e64b620d",
|
||||||
|
"validated": true,
|
||||||
|
"validationDate": "2021-01-01",
|
||||||
|
"properties": []
|
||||||
|
}
|
|
@ -0,0 +1,91 @@
|
||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||||
|
<parent>
|
||||||
|
<artifactId>dhp-workflows</artifactId>
|
||||||
|
<groupId>eu.dnetlib.dhp</groupId>
|
||||||
|
<version>1.2.4-SNAPSHOT</version>
|
||||||
|
</parent>
|
||||||
|
<modelVersion>4.0.0</modelVersion>
|
||||||
|
<artifactId>dhp-usage-raw-data-update</artifactId>
|
||||||
|
<build>
|
||||||
|
<plugins>
|
||||||
|
<plugin>
|
||||||
|
<groupId>pl.project13.maven</groupId>
|
||||||
|
<artifactId>git-commit-id-plugin</artifactId>
|
||||||
|
<version>2.1.15</version>
|
||||||
|
<executions>
|
||||||
|
<execution>
|
||||||
|
<goals>
|
||||||
|
<goal>revision</goal>
|
||||||
|
</goals>
|
||||||
|
</execution>
|
||||||
|
</executions>
|
||||||
|
<configuration>
|
||||||
|
<dotGitDirectory>${project.basedir}/../.git</dotGitDirectory>
|
||||||
|
<!-- more config here as you see fit -->
|
||||||
|
</configuration>
|
||||||
|
</plugin>
|
||||||
|
<plugin>
|
||||||
|
<groupId>org.apache.maven.plugins</groupId>
|
||||||
|
<artifactId>maven-compiler-plugin</artifactId>
|
||||||
|
<version>3.6.1</version>
|
||||||
|
<configuration>
|
||||||
|
<source>1.8</source>
|
||||||
|
<target>1.8</target>
|
||||||
|
</configuration>
|
||||||
|
</plugin>
|
||||||
|
</plugins>
|
||||||
|
</build>
|
||||||
|
<properties>
|
||||||
|
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
|
||||||
|
<project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
|
||||||
|
<cdh.hive.version>0.13.1-cdh5.2.1</cdh.hive.version>
|
||||||
|
<cdh.hadoop.version>2.5.0-cdh5.2.1</cdh.hadoop.version>
|
||||||
|
</properties>
|
||||||
|
|
||||||
|
<dependencies>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.apache.spark</groupId>
|
||||||
|
<artifactId>spark-core_2.11</artifactId>
|
||||||
|
<version>2.2.0</version>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.apache.spark</groupId>
|
||||||
|
<artifactId>spark-sql_2.11</artifactId>
|
||||||
|
<version>2.4.5</version>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>com.googlecode.json-simple</groupId>
|
||||||
|
<artifactId>json-simple</artifactId>
|
||||||
|
<version>1.1.1</version>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.json</groupId>
|
||||||
|
<artifactId>json</artifactId>
|
||||||
|
<version>20180130</version>
|
||||||
|
<type>jar</type>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.apache.hive</groupId>
|
||||||
|
<artifactId>hive-jdbc</artifactId>
|
||||||
|
<version>${cdh.hive.version}</version>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.apache.hadoop</groupId>
|
||||||
|
<artifactId>hadoop-common</artifactId>
|
||||||
|
<version>${cdh.hadoop.version}</version>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>eu.dnetlib.dhp</groupId>
|
||||||
|
<artifactId>dhp-common</artifactId>
|
||||||
|
<version>${project.version}</version>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>c3p0</groupId>
|
||||||
|
<artifactId>c3p0</artifactId>
|
||||||
|
<version>0.9.1.2</version>
|
||||||
|
<type>jar</type>
|
||||||
|
</dependency>
|
||||||
|
</dependencies>
|
||||||
|
<name>dhp-usage-raw-data-update</name>
|
||||||
|
</project>
|
|
@ -0,0 +1 @@
|
||||||
|
mvn clean package -Poozie-package,deploy,run -Dworkflow.source.dir=eu/dnetlib/dhp/oa/graph/usagerawdata
|
|
@ -0,0 +1,125 @@
|
||||||
|
/*
|
||||||
|
* To change this license header, choose License Headers in Project Properties.
|
||||||
|
* To change this template file, choose Tools | Templates
|
||||||
|
* and open the template in the editor.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package eu.dnetlib.oa.graph.usagerawdata.export;
|
||||||
|
|
||||||
|
import java.sql.Connection;
|
||||||
|
import java.sql.DriverManager;
|
||||||
|
import java.sql.SQLException;
|
||||||
|
import java.sql.Statement;
|
||||||
|
import java.util.Properties;
|
||||||
|
|
||||||
|
import org.apache.log4j.Logger;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @author D. Pierrakos, S. Zoupanos
|
||||||
|
*/
|
||||||
|
/**
|
||||||
|
* @author D. Pierrakos, S. Zoupanos
|
||||||
|
*/
|
||||||
|
import com.mchange.v2.c3p0.ComboPooledDataSource;
|
||||||
|
|
||||||
|
public abstract class ConnectDB {
|
||||||
|
|
||||||
|
public static Connection DB_HIVE_CONNECTION;
|
||||||
|
public static Connection DB_IMPALA_CONNECTION;
|
||||||
|
|
||||||
|
private static String dbHiveUrl;
|
||||||
|
private static String dbImpalaUrl;
|
||||||
|
private static String usageStatsDBSchema;
|
||||||
|
private static String statsDBSchema;
|
||||||
|
private final static Logger log = Logger.getLogger(ConnectDB.class);
|
||||||
|
|
||||||
|
static void init() throws ClassNotFoundException {
|
||||||
|
|
||||||
|
dbHiveUrl = ExecuteWorkflow.dbHiveUrl;
|
||||||
|
dbImpalaUrl = ExecuteWorkflow.dbImpalaUrl;
|
||||||
|
usageStatsDBSchema = ExecuteWorkflow.usageStatsDBSchema;
|
||||||
|
statsDBSchema = ExecuteWorkflow.statsDBSchema;
|
||||||
|
|
||||||
|
Class.forName("org.apache.hive.jdbc.HiveDriver");
|
||||||
|
}
|
||||||
|
|
||||||
|
public static Connection getHiveConnection() throws SQLException {
|
||||||
|
if (DB_HIVE_CONNECTION != null && !DB_HIVE_CONNECTION.isClosed()) {
|
||||||
|
return DB_HIVE_CONNECTION;
|
||||||
|
} else {
|
||||||
|
DB_HIVE_CONNECTION = connectHive();
|
||||||
|
|
||||||
|
return DB_HIVE_CONNECTION;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public static Connection getImpalaConnection() throws SQLException {
|
||||||
|
if (DB_IMPALA_CONNECTION != null && !DB_IMPALA_CONNECTION.isClosed()) {
|
||||||
|
return DB_IMPALA_CONNECTION;
|
||||||
|
} else {
|
||||||
|
DB_IMPALA_CONNECTION = connectImpala();
|
||||||
|
|
||||||
|
return DB_IMPALA_CONNECTION;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public static String getUsageStatsDBSchema() {
|
||||||
|
return ConnectDB.usageStatsDBSchema;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static String getStatsDBSchema() {
|
||||||
|
return ConnectDB.statsDBSchema;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static Connection connectHive() throws SQLException {
|
||||||
|
/*
|
||||||
|
* Connection connection = DriverManager.getConnection(dbHiveUrl); Statement stmt =
|
||||||
|
* connection.createStatement(); log.debug("Opened database successfully"); return connection;
|
||||||
|
*/
|
||||||
|
ComboPooledDataSource cpds = new ComboPooledDataSource();
|
||||||
|
cpds.setJdbcUrl(dbHiveUrl);
|
||||||
|
cpds.setAcquireIncrement(1);
|
||||||
|
cpds.setMaxPoolSize(100);
|
||||||
|
cpds.setMinPoolSize(1);
|
||||||
|
cpds.setInitialPoolSize(1);
|
||||||
|
cpds.setMaxIdleTime(300);
|
||||||
|
cpds.setMaxConnectionAge(36000);
|
||||||
|
|
||||||
|
cpds.setAcquireRetryAttempts(5);
|
||||||
|
cpds.setAcquireRetryDelay(2000);
|
||||||
|
cpds.setBreakAfterAcquireFailure(false);
|
||||||
|
|
||||||
|
cpds.setCheckoutTimeout(0);
|
||||||
|
cpds.setPreferredTestQuery("SELECT 1");
|
||||||
|
cpds.setIdleConnectionTestPeriod(60);
|
||||||
|
return cpds.getConnection();
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
private static Connection connectImpala() throws SQLException {
|
||||||
|
/*
|
||||||
|
* Connection connection = DriverManager.getConnection(dbImpalaUrl); Statement stmt =
|
||||||
|
* connection.createStatement(); log.debug("Opened database successfully"); return connection;
|
||||||
|
*/
|
||||||
|
ComboPooledDataSource cpds = new ComboPooledDataSource();
|
||||||
|
cpds.setJdbcUrl(dbImpalaUrl);
|
||||||
|
cpds.setAcquireIncrement(1);
|
||||||
|
cpds.setMaxPoolSize(100);
|
||||||
|
cpds.setMinPoolSize(1);
|
||||||
|
cpds.setInitialPoolSize(1);
|
||||||
|
cpds.setMaxIdleTime(300);
|
||||||
|
cpds.setMaxConnectionAge(36000);
|
||||||
|
|
||||||
|
cpds.setAcquireRetryAttempts(5);
|
||||||
|
cpds.setAcquireRetryDelay(2000);
|
||||||
|
cpds.setBreakAfterAcquireFailure(false);
|
||||||
|
|
||||||
|
cpds.setCheckoutTimeout(0);
|
||||||
|
cpds.setPreferredTestQuery("SELECT 1");
|
||||||
|
cpds.setIdleConnectionTestPeriod(60);
|
||||||
|
|
||||||
|
return cpds.getConnection();
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,211 @@
|
||||||
|
/*
|
||||||
|
* To change this license header, choose License Headers in Project Properties.
|
||||||
|
* To change this template file, choose Tools | Templates
|
||||||
|
* and open the template in the editor.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package eu.dnetlib.oa.graph.usagerawdata.export;
|
||||||
|
|
||||||
|
import java.text.SimpleDateFormat;
|
||||||
|
import java.util.Calendar;
|
||||||
|
import java.util.Date;
|
||||||
|
|
||||||
|
import org.apache.commons.io.IOUtils;
|
||||||
|
import org.apache.log4j.BasicConfigurator;
|
||||||
|
|
||||||
|
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @author D. Pierrakos, S. Zoupanos
|
||||||
|
*/
|
||||||
|
public class ExecuteWorkflow {
|
||||||
|
|
||||||
|
static String matomoAuthToken;
|
||||||
|
static String matomoBaseURL;
|
||||||
|
static String repoLogPath;
|
||||||
|
static String portalLogPath;
|
||||||
|
static String portalMatomoID;
|
||||||
|
static String irusUKBaseURL;
|
||||||
|
static String irusUKReportPath;
|
||||||
|
static String sarcsReportPathArray;
|
||||||
|
static String sarcsReportPathNonArray;
|
||||||
|
static String lareferenciaLogPath;
|
||||||
|
static String lareferenciaBaseURL;
|
||||||
|
static String lareferenciaAuthToken;
|
||||||
|
static String dbHiveUrl;
|
||||||
|
static String dbImpalaUrl;
|
||||||
|
static String usageStatsDBSchema;
|
||||||
|
static String statsDBSchema;
|
||||||
|
static boolean recreateDbAndTables;
|
||||||
|
|
||||||
|
static boolean piwikEmptyDirs;
|
||||||
|
static boolean downloadPiwikLogs;
|
||||||
|
static boolean processPiwikLogs;
|
||||||
|
|
||||||
|
static Calendar startingLogPeriod;
|
||||||
|
static Calendar endingLogPeriod;
|
||||||
|
static int numberOfPiwikIdsToDownload;
|
||||||
|
static int numberOfSiteIdsToDownload;
|
||||||
|
|
||||||
|
static boolean laReferenciaEmptyDirs;
|
||||||
|
static boolean downloadLaReferenciaLogs;
|
||||||
|
static boolean processLaReferenciaLogs;
|
||||||
|
|
||||||
|
static boolean irusCreateTablesEmptyDirs;
|
||||||
|
static boolean irusDownloadReports;
|
||||||
|
static boolean irusProcessStats;
|
||||||
|
static int irusNumberOfOpendoarsToDownload;
|
||||||
|
|
||||||
|
static boolean sarcCreateTablesEmptyDirs;
|
||||||
|
static boolean sarcDownloadReports;
|
||||||
|
static boolean sarcProcessStats;
|
||||||
|
static int sarcNumberOfIssnToDownload;
|
||||||
|
|
||||||
|
static boolean finalizeStats;
|
||||||
|
|
||||||
|
static int numberOfDownloadThreads;
|
||||||
|
|
||||||
|
public static void main(String args[]) throws Exception {
|
||||||
|
|
||||||
|
// Sending the logs to the console
|
||||||
|
BasicConfigurator.configure();
|
||||||
|
|
||||||
|
final ArgumentApplicationParser parser = new ArgumentApplicationParser(
|
||||||
|
IOUtils
|
||||||
|
.toString(
|
||||||
|
UsageStatsExporter.class
|
||||||
|
.getResourceAsStream(
|
||||||
|
"/eu/dnetlib/dhp/oa/graph/usagerawdata/export/usagerawdata_parameters.json")));
|
||||||
|
parser.parseArgument(args);
|
||||||
|
|
||||||
|
// Setting up the initial parameters
|
||||||
|
matomoAuthToken = parser.get("matomoAuthToken");
|
||||||
|
matomoBaseURL = parser.get("matomoBaseURL");
|
||||||
|
repoLogPath = parser.get("repoLogPath");
|
||||||
|
portalLogPath = parser.get("portalLogPath");
|
||||||
|
portalMatomoID = parser.get("portalMatomoID");
|
||||||
|
irusUKBaseURL = parser.get("irusUKBaseURL");
|
||||||
|
irusUKReportPath = parser.get("irusUKReportPath");
|
||||||
|
sarcsReportPathArray = parser.get("sarcsReportPathArray");
|
||||||
|
sarcsReportPathNonArray = parser.get("sarcsReportPathNonArray");
|
||||||
|
lareferenciaLogPath = parser.get("lareferenciaLogPath");
|
||||||
|
lareferenciaBaseURL = parser.get("lareferenciaBaseURL");
|
||||||
|
lareferenciaAuthToken = parser.get("lareferenciaAuthToken");
|
||||||
|
|
||||||
|
dbHiveUrl = parser.get("dbHiveUrl");
|
||||||
|
dbImpalaUrl = parser.get("dbImpalaUrl");
|
||||||
|
usageStatsDBSchema = parser.get("usageStatsDBSchema");
|
||||||
|
statsDBSchema = parser.get("statsDBSchema");
|
||||||
|
|
||||||
|
if (parser.get("recreateDbAndTables").toLowerCase().equals("true")) {
|
||||||
|
recreateDbAndTables = true;
|
||||||
|
} else {
|
||||||
|
recreateDbAndTables = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (parser.get("piwikEmptyDirs").toLowerCase().equals("true")) {
|
||||||
|
piwikEmptyDirs = true;
|
||||||
|
} else {
|
||||||
|
piwikEmptyDirs = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (parser.get("downloadPiwikLogs").toLowerCase().equals("true")) {
|
||||||
|
downloadPiwikLogs = true;
|
||||||
|
} else {
|
||||||
|
downloadPiwikLogs = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (parser.get("processPiwikLogs").toLowerCase().equals("true")) {
|
||||||
|
processPiwikLogs = true;
|
||||||
|
} else {
|
||||||
|
processPiwikLogs = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
String startingLogPeriodStr = parser.get("startingLogPeriod");
|
||||||
|
Date startingLogPeriodDate = new SimpleDateFormat("MM/yyyy").parse(startingLogPeriodStr);
|
||||||
|
startingLogPeriod = startingLogPeriodStr(startingLogPeriodDate);
|
||||||
|
|
||||||
|
// String endingLogPeriodStr = parser.get("endingLogPeriod");
|
||||||
|
// Date endingLogPeriodDate = new SimpleDateFormat("MM/yyyy").parse(endingLogPeriodStr);
|
||||||
|
// endingLogPeriod = startingLogPeriodStr(endingLogPeriodDate);
|
||||||
|
|
||||||
|
numberOfPiwikIdsToDownload = Integer.parseInt(parser.get("numberOfPiwikIdsToDownload"));
|
||||||
|
numberOfSiteIdsToDownload = Integer.parseInt(parser.get("numberOfSiteIdsToDownload"));
|
||||||
|
|
||||||
|
if (parser.get("laReferenciaEmptyDirs").toLowerCase().equals("true")) {
|
||||||
|
laReferenciaEmptyDirs = true;
|
||||||
|
} else {
|
||||||
|
laReferenciaEmptyDirs = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (parser.get("downloadLaReferenciaLogs").toLowerCase().equals("true")) {
|
||||||
|
downloadLaReferenciaLogs = true;
|
||||||
|
} else {
|
||||||
|
downloadLaReferenciaLogs = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (parser.get("processLaReferenciaLogs").toLowerCase().equals("true")) {
|
||||||
|
processLaReferenciaLogs = true;
|
||||||
|
} else {
|
||||||
|
processLaReferenciaLogs = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (parser.get("irusCreateTablesEmptyDirs").toLowerCase().equals("true")) {
|
||||||
|
irusCreateTablesEmptyDirs = true;
|
||||||
|
} else {
|
||||||
|
irusCreateTablesEmptyDirs = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (parser.get("irusDownloadReports").toLowerCase().equals("true")) {
|
||||||
|
irusDownloadReports = true;
|
||||||
|
} else {
|
||||||
|
irusDownloadReports = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (parser.get("irusProcessStats").toLowerCase().equals("true")) {
|
||||||
|
irusProcessStats = true;
|
||||||
|
} else {
|
||||||
|
irusProcessStats = false;
|
||||||
|
}
|
||||||
|
irusNumberOfOpendoarsToDownload = Integer.parseInt(parser.get("irusNumberOfOpendoarsToDownload"));
|
||||||
|
|
||||||
|
if (parser.get("sarcCreateTablesEmptyDirs").toLowerCase().equals("true")) {
|
||||||
|
sarcCreateTablesEmptyDirs = true;
|
||||||
|
} else {
|
||||||
|
sarcCreateTablesEmptyDirs = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (parser.get("sarcDownloadReports").toLowerCase().equals("true")) {
|
||||||
|
sarcDownloadReports = true;
|
||||||
|
} else {
|
||||||
|
sarcDownloadReports = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (parser.get("sarcProcessStats").toLowerCase().equals("true")) {
|
||||||
|
sarcProcessStats = true;
|
||||||
|
} else {
|
||||||
|
sarcProcessStats = false;
|
||||||
|
}
|
||||||
|
sarcNumberOfIssnToDownload = Integer.parseInt(parser.get("sarcNumberOfIssnToDownload"));
|
||||||
|
|
||||||
|
if (parser.get("finalizeStats").toLowerCase().equals("true")) {
|
||||||
|
finalizeStats = true;
|
||||||
|
} else {
|
||||||
|
finalizeStats = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
numberOfDownloadThreads = Integer.parseInt(parser.get("numberOfDownloadThreads"));
|
||||||
|
|
||||||
|
UsageStatsExporter usagestatsExport = new UsageStatsExporter();
|
||||||
|
usagestatsExport.export();
|
||||||
|
// usagestatsExport.createdDBWithTablesOnly();
|
||||||
|
}
|
||||||
|
|
||||||
|
private static Calendar startingLogPeriodStr(Date date) {
|
||||||
|
|
||||||
|
Calendar calendar = Calendar.getInstance();
|
||||||
|
calendar.setTime(date);
|
||||||
|
return calendar;
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,358 @@
|
||||||
|
|
||||||
|
package eu.dnetlib.oa.graph.usagerawdata.export;
|
||||||
|
|
||||||
|
import java.io.*;
|
||||||
|
import java.net.URL;
|
||||||
|
import java.net.URLConnection;
|
||||||
|
import java.sql.PreparedStatement;
|
||||||
|
import java.sql.ResultSet;
|
||||||
|
import java.sql.Statement;
|
||||||
|
import java.text.SimpleDateFormat;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.Calendar;
|
||||||
|
import java.util.Date;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
import org.apache.hadoop.conf.Configuration;
|
||||||
|
import org.apache.hadoop.fs.FSDataOutputStream;
|
||||||
|
import org.apache.hadoop.fs.FileSystem;
|
||||||
|
import org.apache.hadoop.fs.Path;
|
||||||
|
import org.json.simple.JSONArray;
|
||||||
|
import org.json.simple.JSONObject;
|
||||||
|
import org.json.simple.parser.JSONParser;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @author D. Pierrakos, S. Zoupanos
|
||||||
|
*/
|
||||||
|
public class IrusStats {
|
||||||
|
|
||||||
|
private String irusUKURL;
|
||||||
|
|
||||||
|
private static final Logger logger = LoggerFactory.getLogger(IrusStats.class);
|
||||||
|
|
||||||
|
public IrusStats(String irusUKURL) throws Exception {
|
||||||
|
this.irusUKURL = irusUKURL;
|
||||||
|
// The following may not be needed - It will be created when JSON tables are created
|
||||||
|
// createTmpTables();
|
||||||
|
}
|
||||||
|
|
||||||
|
public void reCreateLogDirs() throws Exception {
|
||||||
|
FileSystem dfs = FileSystem.get(new Configuration());
|
||||||
|
|
||||||
|
logger.info("Deleting irusUKReport directory: " + ExecuteWorkflow.irusUKReportPath);
|
||||||
|
dfs.delete(new Path(ExecuteWorkflow.irusUKReportPath), true);
|
||||||
|
|
||||||
|
logger.info("Creating irusUKReport directory: " + ExecuteWorkflow.irusUKReportPath);
|
||||||
|
dfs.mkdirs(new Path(ExecuteWorkflow.irusUKReportPath));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void createTables() throws Exception {
|
||||||
|
try {
|
||||||
|
logger.info("Creating sushilog");
|
||||||
|
Statement stmt = ConnectDB.getHiveConnection().createStatement();
|
||||||
|
String sqlCreateTableSushiLog = "CREATE TABLE IF NOT EXISTS " + ConnectDB.getUsageStatsDBSchema()
|
||||||
|
+ ".sushilog(source STRING, "
|
||||||
|
+ "repository STRING, rid STRING, date STRING, metric_type STRING, count INT) clustered by (source, "
|
||||||
|
+ "repository, rid, date, metric_type) into 100 buckets stored as orc tblproperties('transactional'='true')";
|
||||||
|
stmt.executeUpdate(sqlCreateTableSushiLog);
|
||||||
|
logger.info("Created sushilog");
|
||||||
|
|
||||||
|
stmt.close();
|
||||||
|
ConnectDB.getHiveConnection().close();
|
||||||
|
logger.info("Sushi Tables Created");
|
||||||
|
} catch (Exception e) {
|
||||||
|
logger.error("Failed to create tables: " + e);
|
||||||
|
throw new Exception("Failed to create tables: " + e.toString(), e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void processIrusStats() throws Exception {
|
||||||
|
Statement stmt = ConnectDB.getHiveConnection().createStatement();
|
||||||
|
ConnectDB.getHiveConnection().setAutoCommit(false);
|
||||||
|
|
||||||
|
logger.info("Adding JSON Serde jar");
|
||||||
|
stmt.executeUpdate("add jar /usr/share/cmf/common_jars/hive-hcatalog-core-1.1.0-cdh5.14.0.jar");
|
||||||
|
logger.info("Added JSON Serde jar");
|
||||||
|
|
||||||
|
logger.info("Dropping sushilogtmp_json table");
|
||||||
|
String dropSushilogtmpJson = "DROP TABLE IF EXISTS "
|
||||||
|
+ ConnectDB.getUsageStatsDBSchema()
|
||||||
|
+ ".sushilogtmp_json";
|
||||||
|
stmt.executeUpdate(dropSushilogtmpJson);
|
||||||
|
logger.info("Dropped sushilogtmp_json table");
|
||||||
|
|
||||||
|
logger.info("Creating irus_sushilogtmp_json table");
|
||||||
|
String createSushilogtmpJson = "CREATE EXTERNAL TABLE IF NOT EXISTS "
|
||||||
|
+ ConnectDB.getUsageStatsDBSchema() + ".irus_sushilogtmp_json(\n"
|
||||||
|
+ " `ItemIdentifier` ARRAY<\n"
|
||||||
|
+ " struct<\n"
|
||||||
|
+ " Type: STRING,\n"
|
||||||
|
+ " Value: STRING\n"
|
||||||
|
+ " >\n"
|
||||||
|
+ " >,\n"
|
||||||
|
+ " `ItemPerformance` ARRAY<\n"
|
||||||
|
+ " struct<\n"
|
||||||
|
+ " `Period`: struct<\n"
|
||||||
|
+ " `Begin`: STRING,\n"
|
||||||
|
+ " `End`: STRING\n"
|
||||||
|
+ " >,\n"
|
||||||
|
+ " `Instance`: struct<\n"
|
||||||
|
+ " `Count`: STRING,\n"
|
||||||
|
+ " `MetricType`: STRING\n"
|
||||||
|
+ " >\n"
|
||||||
|
+ " >\n"
|
||||||
|
+ " >\n"
|
||||||
|
+ ")\n"
|
||||||
|
+ "ROW FORMAT SERDE 'org.apache.hive.hcatalog.data.JsonSerDe'\n"
|
||||||
|
+ "LOCATION '" + ExecuteWorkflow.irusUKReportPath + "'\n"
|
||||||
|
+ "TBLPROPERTIES (\"transactional\"=\"false\")";
|
||||||
|
stmt.executeUpdate(createSushilogtmpJson);
|
||||||
|
logger.info("Created irus_sushilogtmp_json table");
|
||||||
|
|
||||||
|
logger.info("Dropping irus_sushilogtmp table");
|
||||||
|
String dropSushilogtmp = "DROP TABLE IF EXISTS "
|
||||||
|
+ ConnectDB.getUsageStatsDBSchema()
|
||||||
|
+ ".irus_sushilogtmp";
|
||||||
|
stmt.executeUpdate(dropSushilogtmp);
|
||||||
|
logger.info("Dropped irus_sushilogtmp table");
|
||||||
|
|
||||||
|
logger.info("Creating irus_sushilogtmp table");
|
||||||
|
String createSushilogtmp = "CREATE TABLE " + ConnectDB.getUsageStatsDBSchema()
|
||||||
|
+ ".irus_sushilogtmp(source STRING, repository STRING, "
|
||||||
|
+ "rid STRING, date STRING, metric_type STRING, count INT) clustered by (source) into 100 buckets stored as orc "
|
||||||
|
+ "tblproperties('transactional'='true')";
|
||||||
|
stmt.executeUpdate(createSushilogtmp);
|
||||||
|
logger.info("Created irus_sushilogtmp table");
|
||||||
|
|
||||||
|
logger.info("Inserting to irus_sushilogtmp table");
|
||||||
|
String insertSushilogtmp = "INSERT INTO " + ConnectDB.getUsageStatsDBSchema() + ".irus_sushilogtmp "
|
||||||
|
+ "SELECT 'IRUS-UK', CONCAT('opendoar____::', split(split(INPUT__FILE__NAME,'IrusIRReport_')[1],'_')[0]), "
|
||||||
|
+ "`ItemIdent`.`Value`, `ItemPerf`.`Period`.`Begin`, "
|
||||||
|
+ "`ItemPerf`.`Instance`.`MetricType`, `ItemPerf`.`Instance`.`Count` "
|
||||||
|
+ "FROM " + ConnectDB.getUsageStatsDBSchema() + ".irus_sushilogtmp_json "
|
||||||
|
+ "LATERAL VIEW posexplode(ItemIdentifier) ItemIdentifierTable AS seqi, ItemIdent "
|
||||||
|
+ "LATERAL VIEW posexplode(ItemPerformance) ItemPerformanceTable AS seqp, ItemPerf "
|
||||||
|
+ "WHERE `ItemIdent`.`Type`= 'OAI'";
|
||||||
|
stmt.executeUpdate(insertSushilogtmp);
|
||||||
|
logger.info("Inserted to irus_sushilogtmp table");
|
||||||
|
|
||||||
|
logger.info("Inserting to sushilog table");
|
||||||
|
String insertToShushilog = "INSERT INTO " + ConnectDB.getUsageStatsDBSchema() + ".sushilog SELECT * FROM "
|
||||||
|
+ ConnectDB.getUsageStatsDBSchema()
|
||||||
|
+ ".irus_sushilogtmp";
|
||||||
|
stmt.executeUpdate(insertToShushilog);
|
||||||
|
logger.info("Inserted to sushilog table");
|
||||||
|
|
||||||
|
ConnectDB.getHiveConnection().close();
|
||||||
|
}
|
||||||
|
|
||||||
|
public void getIrusRRReport(String irusUKReportPath) throws Exception {
|
||||||
|
SimpleDateFormat sdf = new SimpleDateFormat("YYYY-MM");
|
||||||
|
// Setting the starting period
|
||||||
|
Calendar start = (Calendar) ExecuteWorkflow.startingLogPeriod.clone();
|
||||||
|
logger.info("(getIrusRRReport) Starting period for log download: " + sdf.format(start.getTime()));
|
||||||
|
|
||||||
|
// Setting the ending period (last day of the month)
|
||||||
|
// Calendar end = (Calendar) ExecuteWorkflow.endingLogPeriod.clone();
|
||||||
|
// end.add(Calendar.MONTH, +1);
|
||||||
|
// end.add(Calendar.DAY_OF_MONTH, -1);
|
||||||
|
Calendar end = Calendar.getInstance();
|
||||||
|
end.add(Calendar.DAY_OF_MONTH, -1);
|
||||||
|
|
||||||
|
logger.info("(getIrusRRReport) Ending period for log download: " + sdf.format(end.getTime()));
|
||||||
|
|
||||||
|
String reportUrl = irusUKURL + "GetReport/?Report=RR1&Release=4&RequestorID=OpenAIRE&BeginDate="
|
||||||
|
+ sdf.format(start.getTime()) + "&EndDate=" + sdf.format(end.getTime())
|
||||||
|
+ "&RepositoryIdentifier=&ItemDataType=&NewJiscBand=&Granularity=Monthly&Callback=";
|
||||||
|
|
||||||
|
logger.info("(getIrusRRReport) Getting report: " + reportUrl);
|
||||||
|
|
||||||
|
String text = getJson(reportUrl, "", "");
|
||||||
|
|
||||||
|
List<String> opendoarsToVisit = new ArrayList<String>();
|
||||||
|
JSONParser parser = new JSONParser();
|
||||||
|
JSONObject jsonObject = (JSONObject) parser.parse(text);
|
||||||
|
jsonObject = (JSONObject) jsonObject.get("ReportResponse");
|
||||||
|
jsonObject = (JSONObject) jsonObject.get("Report");
|
||||||
|
jsonObject = (JSONObject) jsonObject.get("Report");
|
||||||
|
jsonObject = (JSONObject) jsonObject.get("Customer");
|
||||||
|
JSONArray jsonArray = (JSONArray) jsonObject.get("ReportItems");
|
||||||
|
if (jsonArray != null) {
|
||||||
|
int i = 0;
|
||||||
|
for (Object aJsonArray : jsonArray) {
|
||||||
|
JSONObject jsonObjectRow = (JSONObject) aJsonArray;
|
||||||
|
JSONArray itemIdentifier = (JSONArray) jsonObjectRow.get("ItemIdentifier");
|
||||||
|
for (Object identifier : itemIdentifier) {
|
||||||
|
JSONObject opendoar = (JSONObject) identifier;
|
||||||
|
if (opendoar.get("Type").toString().equals("OpenDOAR")) {
|
||||||
|
i++;
|
||||||
|
opendoarsToVisit.add(opendoar.get("Value").toString());
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// break;
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info("(getIrusRRReport) Found the following opendoars for download: " + opendoarsToVisit);
|
||||||
|
|
||||||
|
if (ExecuteWorkflow.irusNumberOfOpendoarsToDownload > 0
|
||||||
|
&& ExecuteWorkflow.irusNumberOfOpendoarsToDownload <= opendoarsToVisit.size()) {
|
||||||
|
logger.info("Trimming siteIds list to the size of: " + ExecuteWorkflow.irusNumberOfOpendoarsToDownload);
|
||||||
|
opendoarsToVisit = opendoarsToVisit.subList(0, ExecuteWorkflow.irusNumberOfOpendoarsToDownload);
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info("(getIrusRRReport) Downloading the followins opendoars: " + opendoarsToVisit);
|
||||||
|
|
||||||
|
for (String opendoar : opendoarsToVisit) {
|
||||||
|
logger.info("Now working on openDoar: " + opendoar);
|
||||||
|
this.getIrusIRReport(opendoar, irusUKReportPath);
|
||||||
|
}
|
||||||
|
logger.info("(getIrusRRReport) Finished with report: " + reportUrl);
|
||||||
|
} else {
|
||||||
|
logger.info("IRUS Reports not found for day");
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
private void getIrusIRReport(String opendoar, String irusUKReportPath) throws Exception {
|
||||||
|
|
||||||
|
logger.info("(getIrusIRReport) Getting report(s) with opendoar: " + opendoar);
|
||||||
|
|
||||||
|
ConnectDB.getHiveConnection().setAutoCommit(false);
|
||||||
|
|
||||||
|
SimpleDateFormat simpleDateFormat = new SimpleDateFormat("YYYY-MM");
|
||||||
|
|
||||||
|
// Setting the starting period
|
||||||
|
Calendar start = (Calendar) ExecuteWorkflow.startingLogPeriod.clone();
|
||||||
|
logger.info("(getIrusIRReport) Starting period for log download: " + simpleDateFormat.format(start.getTime()));
|
||||||
|
|
||||||
|
// Setting the ending period (last day of the month)
|
||||||
|
Calendar end = Calendar.getInstance();
|
||||||
|
end.add(Calendar.DAY_OF_MONTH, -1);
|
||||||
|
|
||||||
|
// Calendar end = (Calendar) ExecuteWorkflow.endingLogPeriod.clone();
|
||||||
|
// end.add(Calendar.MONTH, +1);
|
||||||
|
// end.add(Calendar.DAY_OF_MONTH, -1);
|
||||||
|
logger.info("(getIrusIRReport) Ending period for log download: " + simpleDateFormat.format(end.getTime()));
|
||||||
|
|
||||||
|
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd");
|
||||||
|
PreparedStatement st = ConnectDB
|
||||||
|
.getHiveConnection()
|
||||||
|
.prepareStatement(
|
||||||
|
"SELECT max(date) FROM " + ConnectDB.getUsageStatsDBSchema() + ".sushilog WHERE repository=?");
|
||||||
|
st.setString(1, "opendoar____::" + opendoar);
|
||||||
|
ResultSet rs_date = st.executeQuery();
|
||||||
|
Date dateMax = null;
|
||||||
|
while (rs_date.next()) {
|
||||||
|
if (rs_date.getString(1) != null && !rs_date.getString(1).equals("null")
|
||||||
|
&& !rs_date.getString(1).equals("")) {
|
||||||
|
start.setTime(sdf.parse(rs_date.getString(1)));
|
||||||
|
dateMax = sdf.parse(rs_date.getString(1));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
rs_date.close();
|
||||||
|
int batch_size = 0;
|
||||||
|
|
||||||
|
if (dateMax != null && end.getTime().compareTo(dateMax) <= 0) {
|
||||||
|
logger.info("Date found in logs " + dateMax + " and not downloanding logs for " + opendoar);
|
||||||
|
} else {
|
||||||
|
start.add(Calendar.MONTH, 1);
|
||||||
|
while (start.before(end)) {
|
||||||
|
logger.info("Downloading for date: " + simpleDateFormat.format(start.getTime()));
|
||||||
|
String reportUrl = this.irusUKURL + "GetReport/?Report=IR1&Release=4&RequestorID=OpenAIRE&BeginDate="
|
||||||
|
+ simpleDateFormat.format(start.getTime()) + "&EndDate=" + simpleDateFormat.format(start.getTime())
|
||||||
|
+ "&RepositoryIdentifier=opendoar%3A" + opendoar
|
||||||
|
+ "&ItemIdentifier=&ItemDataType=&hasDOI=&Granularity=Monthly&Callback=";
|
||||||
|
start.add(Calendar.MONTH, 1);
|
||||||
|
|
||||||
|
logger.info("Downloading file: " + reportUrl);
|
||||||
|
String text = getJson(reportUrl, "", "");
|
||||||
|
if (text == null) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
FileSystem fs = FileSystem.get(new Configuration());
|
||||||
|
String filePath = irusUKReportPath + "/" + "IrusIRReport_"
|
||||||
|
+ opendoar + "_" + simpleDateFormat.format(start.getTime()) + ".json";
|
||||||
|
logger.info("Storing to file: " + filePath);
|
||||||
|
FSDataOutputStream fin = fs.create(new Path(filePath), true);
|
||||||
|
|
||||||
|
JSONParser parser = new JSONParser();
|
||||||
|
JSONObject jsonObject = (JSONObject) parser.parse(text);
|
||||||
|
jsonObject = (JSONObject) jsonObject.get("ReportResponse");
|
||||||
|
jsonObject = (JSONObject) jsonObject.get("Report");
|
||||||
|
jsonObject = (JSONObject) jsonObject.get("Report");
|
||||||
|
jsonObject = (JSONObject) jsonObject.get("Customer");
|
||||||
|
JSONArray jsonArray = (JSONArray) jsonObject.get("ReportItems");
|
||||||
|
if (jsonArray == null) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
String oai = "";
|
||||||
|
for (Object aJsonArray : jsonArray) {
|
||||||
|
JSONObject jsonObjectRow = (JSONObject) aJsonArray;
|
||||||
|
fin.write(jsonObjectRow.toJSONString().getBytes());
|
||||||
|
fin.writeChar('\n');
|
||||||
|
}
|
||||||
|
|
||||||
|
fin.close();
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
// ConnectDB.getHiveConnection().close();
|
||||||
|
|
||||||
|
logger.info("(getIrusIRReport) Finished downloading report(s) with opendoar: " + opendoar);
|
||||||
|
}
|
||||||
|
|
||||||
|
private String getJson(String url) throws Exception {
|
||||||
|
try {
|
||||||
|
System.out.println("===> Connecting to: " + url);
|
||||||
|
URL website = new URL(url);
|
||||||
|
System.out.println("Connection url -----> " + url);
|
||||||
|
URLConnection connection = website.openConnection();
|
||||||
|
|
||||||
|
// connection.setRequestProperty ("Authorization", "Basic "+encoded);
|
||||||
|
StringBuilder response;
|
||||||
|
try (BufferedReader in = new BufferedReader(new InputStreamReader(connection.getInputStream()))) {
|
||||||
|
response = new StringBuilder();
|
||||||
|
String inputLine;
|
||||||
|
while ((inputLine = in.readLine()) != null) {
|
||||||
|
response.append(inputLine);
|
||||||
|
// response.append("\n");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
System.out.println("response ====> " + response.toString());
|
||||||
|
|
||||||
|
return response.toString();
|
||||||
|
} catch (Exception e) {
|
||||||
|
logger.error("Failed to get URL: " + e);
|
||||||
|
System.out.println("Failed to get URL: " + e);
|
||||||
|
throw new Exception("Failed to get URL: " + e.toString(), e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private String getJson(String url, String username, String password) throws Exception {
|
||||||
|
// String cred=username+":"+password;
|
||||||
|
// String encoded = new sun.misc.BASE64Encoder().encode (cred.getBytes());
|
||||||
|
try {
|
||||||
|
URL website = new URL(url);
|
||||||
|
URLConnection connection = website.openConnection();
|
||||||
|
// connection.setRequestProperty ("Authorization", "Basic "+encoded);
|
||||||
|
StringBuilder response;
|
||||||
|
try (BufferedReader in = new BufferedReader(new InputStreamReader(connection.getInputStream()))) {
|
||||||
|
response = new StringBuilder();
|
||||||
|
String inputLine;
|
||||||
|
while ((inputLine = in.readLine()) != null) {
|
||||||
|
response.append(inputLine);
|
||||||
|
response.append("\n");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return response.toString();
|
||||||
|
} catch (Exception e) {
|
||||||
|
logger.error("Failed to get URL", e);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,273 @@
|
||||||
|
|
||||||
|
package eu.dnetlib.oa.graph.usagerawdata.export;
|
||||||
|
|
||||||
|
import java.io.*;
|
||||||
|
import java.net.URL;
|
||||||
|
import java.net.URLConnection;
|
||||||
|
import java.sql.PreparedStatement;
|
||||||
|
import java.sql.ResultSet;
|
||||||
|
import java.sql.Statement;
|
||||||
|
import java.text.SimpleDateFormat;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.Calendar;
|
||||||
|
import java.util.Date;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
import org.apache.hadoop.conf.Configuration;
|
||||||
|
import org.apache.hadoop.fs.FSDataOutputStream;
|
||||||
|
import org.apache.hadoop.fs.FileSystem;
|
||||||
|
import org.apache.hadoop.fs.Path;
|
||||||
|
import org.json.simple.JSONArray;
|
||||||
|
import org.json.simple.JSONObject;
|
||||||
|
import org.json.simple.parser.JSONParser;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @author D. Pierrakos, S. Zoupanos
|
||||||
|
*/
|
||||||
|
public class LaReferenciaDownloadLogs {
|
||||||
|
|
||||||
|
private final String piwikUrl;
|
||||||
|
private Date startDate;
|
||||||
|
private final String tokenAuth;
|
||||||
|
|
||||||
|
/*
|
||||||
|
* The Piwik's API method
|
||||||
|
*/
|
||||||
|
private final String APImethod = "?module=API&method=Live.getLastVisitsDetails";
|
||||||
|
private final String format = "&format=json";
|
||||||
|
private final String ApimethodGetAllSites = "?module=API&method=SitesManager.getSitesWithViewAccess";
|
||||||
|
|
||||||
|
private static final Logger logger = LoggerFactory.getLogger(LaReferenciaDownloadLogs.class);
|
||||||
|
|
||||||
|
public LaReferenciaDownloadLogs(String piwikUrl, String tokenAuth) throws Exception {
|
||||||
|
this.piwikUrl = piwikUrl;
|
||||||
|
this.tokenAuth = tokenAuth;
|
||||||
|
this.createTables();
|
||||||
|
// this.createTmpTables();
|
||||||
|
}
|
||||||
|
|
||||||
|
public void reCreateLogDirs() throws IllegalArgumentException, IOException {
|
||||||
|
FileSystem dfs = FileSystem.get(new Configuration());
|
||||||
|
|
||||||
|
logger.info("Deleting lareferenciaLog directory: " + ExecuteWorkflow.lareferenciaLogPath);
|
||||||
|
dfs.delete(new Path(ExecuteWorkflow.lareferenciaLogPath), true);
|
||||||
|
|
||||||
|
logger.info("Creating lareferenciaLog directory: " + ExecuteWorkflow.lareferenciaLogPath);
|
||||||
|
dfs.mkdirs(new Path(ExecuteWorkflow.lareferenciaLogPath));
|
||||||
|
}
|
||||||
|
|
||||||
|
private void createTables() throws Exception {
|
||||||
|
try {
|
||||||
|
Statement stmt = ConnectDB.getHiveConnection().createStatement();
|
||||||
|
|
||||||
|
logger.info("Creating LaReferencia tables");
|
||||||
|
String sqlCreateTableLareferenciaLog = "CREATE TABLE IF NOT EXISTS "
|
||||||
|
+ ConnectDB.getUsageStatsDBSchema() + ".lareferencialog(matomoid INT, "
|
||||||
|
+ "source STRING, id_visit STRING, country STRING, action STRING, url STRING, entity_id STRING, "
|
||||||
|
+ "source_item_type STRING, timestamp STRING, referrer_name STRING, agent STRING) "
|
||||||
|
+ "clustered by (source, id_visit, action, timestamp, entity_id) into 100 buckets "
|
||||||
|
+ "stored as orc tblproperties('transactional'='true')";
|
||||||
|
stmt.executeUpdate(sqlCreateTableLareferenciaLog);
|
||||||
|
logger.info("Created LaReferencia tables");
|
||||||
|
// String sqlcreateRuleLaReferenciaLog = "CREATE OR REPLACE RULE ignore_duplicate_inserts AS "
|
||||||
|
// + " ON INSERT TO lareferencialog "
|
||||||
|
// + " WHERE (EXISTS ( SELECT lareferencialog.matomoid, lareferencialog.source, lareferencialog.id_visit,"
|
||||||
|
// + "lareferencialog.action, lareferencialog.\"timestamp\", lareferencialog.entity_id "
|
||||||
|
// + "FROM lareferencialog "
|
||||||
|
// + "WHERE lareferencialog.matomoid=new.matomoid AND lareferencialog.source = new.source AND lareferencialog.id_visit = new.id_visit AND lareferencialog.action = new.action AND lareferencialog.entity_id = new.entity_id AND lareferencialog.\"timestamp\" = new.\"timestamp\")) DO INSTEAD NOTHING;";
|
||||||
|
// String sqlCreateRuleIndexLaReferenciaLog = "create index if not exists lareferencialog_rule on lareferencialog(matomoid, source, id_visit, action, entity_id, \"timestamp\");";
|
||||||
|
// stmt.executeUpdate(sqlcreateRuleLaReferenciaLog);
|
||||||
|
// stmt.executeUpdate(sqlCreateRuleIndexLaReferenciaLog);
|
||||||
|
|
||||||
|
stmt.close();
|
||||||
|
ConnectDB.getHiveConnection().close();
|
||||||
|
logger.info("Lareferencia Tables Created");
|
||||||
|
|
||||||
|
} catch (Exception e) {
|
||||||
|
logger.error("Failed to create tables: " + e);
|
||||||
|
throw new Exception("Failed to create tables: " + e.toString(), e);
|
||||||
|
// System.exit(0);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// private void createTmpTables() throws Exception {
|
||||||
|
//
|
||||||
|
// try {
|
||||||
|
// Statement stmt = ConnectDB.getConnection().createStatement();
|
||||||
|
// String sqlCreateTmpTableLaReferenciaLog = "CREATE TABLE IF NOT EXISTS lareferencialogtmp(matomoid INTEGER, source TEXT, id_visit TEXT, country TEXT, action TEXT, url TEXT, entity_id TEXT, source_item_type TEXT, timestamp TEXT, referrer_name TEXT, agent TEXT, PRIMARY KEY(source, id_visit, action, timestamp, entity_id));";
|
||||||
|
// String sqlcreateTmpRuleLaReferenciaLog = "CREATE OR REPLACE RULE ignore_duplicate_inserts AS "
|
||||||
|
// + " ON INSERT TO lareferencialogtmp "
|
||||||
|
// + " WHERE (EXISTS ( SELECT lareferencialogtmp.matomoid, lareferencialogtmp.source, lareferencialogtmp.id_visit,"
|
||||||
|
// + "lareferencialogtmp.action, lareferencialogtmp.\"timestamp\", lareferencialogtmp.entity_id "
|
||||||
|
// + "FROM lareferencialogtmp "
|
||||||
|
// + "WHERE lareferencialogtmp.matomoid=new.matomoid AND lareferencialogtmp.source = new.source AND lareferencialogtmp.id_visit = new.id_visit AND lareferencialogtmp.action = new.action AND lareferencialogtmp.entity_id = new.entity_id AND lareferencialogtmp.\"timestamp\" = new.\"timestamp\")) DO INSTEAD NOTHING;";
|
||||||
|
// stmt.executeUpdate(sqlCreateTmpTableLaReferenciaLog);
|
||||||
|
// stmt.executeUpdate(sqlcreateTmpRuleLaReferenciaLog);
|
||||||
|
//
|
||||||
|
// stmt.close();
|
||||||
|
// log.info("Lareferencia Tmp Tables Created");
|
||||||
|
//
|
||||||
|
// } catch (Exception e) {
|
||||||
|
// log.error("Failed to create tmptables: " + e);
|
||||||
|
// throw new Exception("Failed to create tmp tables: " + e.toString(), e);
|
||||||
|
// // System.exit(0);
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
private String getPiwikLogUrl() {
|
||||||
|
return piwikUrl + "/";
|
||||||
|
}
|
||||||
|
|
||||||
|
private String getJson(String url) throws Exception {
|
||||||
|
try {
|
||||||
|
URL website = new URL(url);
|
||||||
|
URLConnection connection = website.openConnection();
|
||||||
|
|
||||||
|
StringBuilder response;
|
||||||
|
try (BufferedReader in = new BufferedReader(new InputStreamReader(connection.getInputStream()))) {
|
||||||
|
response = new StringBuilder();
|
||||||
|
String inputLine;
|
||||||
|
while ((inputLine = in.readLine()) != null) {
|
||||||
|
response.append(inputLine);
|
||||||
|
// response.append("\n");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return response.toString();
|
||||||
|
} catch (Exception e) {
|
||||||
|
logger.error("Failed to get URL: " + e);
|
||||||
|
throw new Exception("Failed to get URL: " + e.toString(), e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void GetLaReferenciaRepos(String repoLogsPath) throws Exception {
|
||||||
|
|
||||||
|
String baseApiUrl = getPiwikLogUrl() + ApimethodGetAllSites + format + "&token_auth=" + this.tokenAuth;
|
||||||
|
String content = "";
|
||||||
|
|
||||||
|
List<Integer> siteIdsToVisit = new ArrayList<Integer>();
|
||||||
|
|
||||||
|
// Getting all the siteIds in a list for logging reasons & limiting the list
|
||||||
|
// to the max number of siteIds
|
||||||
|
content = getJson(baseApiUrl);
|
||||||
|
JSONParser parser = new JSONParser();
|
||||||
|
JSONArray jsonArray = (JSONArray) parser.parse(content);
|
||||||
|
for (Object aJsonArray : jsonArray) {
|
||||||
|
JSONObject jsonObjectRow = (JSONObject) aJsonArray;
|
||||||
|
siteIdsToVisit.add(Integer.parseInt(jsonObjectRow.get("idsite").toString()));
|
||||||
|
}
|
||||||
|
logger.info("Found the following siteIds for download: " + siteIdsToVisit);
|
||||||
|
|
||||||
|
if (ExecuteWorkflow.numberOfPiwikIdsToDownload > 0
|
||||||
|
&& ExecuteWorkflow.numberOfPiwikIdsToDownload <= siteIdsToVisit.size()) {
|
||||||
|
logger.info("Trimming siteIds list to the size of: " + ExecuteWorkflow.numberOfPiwikIdsToDownload);
|
||||||
|
siteIdsToVisit = siteIdsToVisit.subList(0, ExecuteWorkflow.numberOfPiwikIdsToDownload);
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info("Downloading from repos with the followins siteIds: " + siteIdsToVisit);
|
||||||
|
|
||||||
|
for (int siteId : siteIdsToVisit) {
|
||||||
|
logger.info("Now working on LaReferencia MatomoId: " + siteId);
|
||||||
|
this.GetLaReFerenciaLogs(repoLogsPath, siteId);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void GetLaReFerenciaLogs(String repoLogsPath,
|
||||||
|
int laReferencialMatomoID) throws Exception {
|
||||||
|
|
||||||
|
logger.info("Downloading logs for LaReferencia repoid " + laReferencialMatomoID);
|
||||||
|
|
||||||
|
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd");
|
||||||
|
// Setting the starting period
|
||||||
|
Calendar start = (Calendar) ExecuteWorkflow.startingLogPeriod.clone();
|
||||||
|
logger.info("Starting period for log download: " + sdf.format(start.getTime()));
|
||||||
|
|
||||||
|
// Setting the ending period (last day of the month)
|
||||||
|
// Calendar end = (Calendar) ExecuteWorkflow.endingLogPeriod.clone();
|
||||||
|
// end.add(Calendar.MONTH, +1);
|
||||||
|
// end.add(Calendar.DAY_OF_MONTH, -1);
|
||||||
|
Calendar end = Calendar.getInstance();
|
||||||
|
end.add(Calendar.DAY_OF_MONTH, -1);
|
||||||
|
|
||||||
|
logger.info("Ending period for log download: " + sdf.format(end.getTime()));
|
||||||
|
|
||||||
|
PreparedStatement st = ConnectDB
|
||||||
|
.getHiveConnection()
|
||||||
|
.prepareStatement(
|
||||||
|
"SELECT max(timestamp) FROM " + ConnectDB.getUsageStatsDBSchema()
|
||||||
|
+ ".lareferencialog WHERE matomoid=?");
|
||||||
|
st.setInt(1, laReferencialMatomoID);
|
||||||
|
Date dateMax = null;
|
||||||
|
|
||||||
|
ResultSet rs_date = st.executeQuery();
|
||||||
|
while (rs_date.next()) {
|
||||||
|
if (rs_date.getString(1) != null && !rs_date.getString(1).equals("null")
|
||||||
|
&& !rs_date.getString(1).equals("")) {
|
||||||
|
start.setTime(sdf.parse(rs_date.getString(1)));
|
||||||
|
dateMax = sdf.parse(rs_date.getString(1));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
rs_date.close();
|
||||||
|
|
||||||
|
for (Calendar currDay = (Calendar) start.clone(); currDay.before(end); currDay.add(Calendar.DATE, 1)) {
|
||||||
|
Date date = currDay.getTime();
|
||||||
|
if (dateMax != null && currDay.getTime().compareTo(dateMax) <= 0) {
|
||||||
|
logger
|
||||||
|
.info(
|
||||||
|
"Date found in logs " + dateMax + " and not downloanding Matomo logs for "
|
||||||
|
+ laReferencialMatomoID);
|
||||||
|
} else {
|
||||||
|
logger
|
||||||
|
.info(
|
||||||
|
"Downloading logs for LaReferencia repoid " + laReferencialMatomoID + " and for "
|
||||||
|
+ sdf.format(date));
|
||||||
|
|
||||||
|
String period = "&period=day&date=" + sdf.format(date);
|
||||||
|
String outFolder = "";
|
||||||
|
outFolder = repoLogsPath;
|
||||||
|
|
||||||
|
FileSystem fs = FileSystem.get(new Configuration());
|
||||||
|
FSDataOutputStream fin = fs
|
||||||
|
.create(
|
||||||
|
new Path(
|
||||||
|
outFolder + "/" + laReferencialMatomoID + "_LaRefPiwiklog" + sdf.format((date)) + ".json"),
|
||||||
|
true);
|
||||||
|
|
||||||
|
String baseApiUrl = getPiwikLogUrl() + APImethod + "&idSite=" + laReferencialMatomoID + period + format
|
||||||
|
+ "&expanded=5&filter_limit=1000&token_auth=" + tokenAuth;
|
||||||
|
String content = "";
|
||||||
|
int i = 0;
|
||||||
|
|
||||||
|
JSONParser parser = new JSONParser();
|
||||||
|
do {
|
||||||
|
String apiUrl = baseApiUrl;
|
||||||
|
|
||||||
|
if (i > 0) {
|
||||||
|
apiUrl += "&filter_offset=" + (i * 1000);
|
||||||
|
}
|
||||||
|
|
||||||
|
content = getJson(apiUrl);
|
||||||
|
if (content.length() == 0 || content.equals("[]")) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
JSONArray jsonArray = (JSONArray) parser.parse(content);
|
||||||
|
for (Object aJsonArray : jsonArray) {
|
||||||
|
JSONObject jsonObjectRaw = (JSONObject) aJsonArray;
|
||||||
|
fin.write(jsonObjectRaw.toJSONString().getBytes());
|
||||||
|
fin.writeChar('\n');
|
||||||
|
}
|
||||||
|
|
||||||
|
logger
|
||||||
|
.info(
|
||||||
|
"Downloaded part " + i + " of logs for LaReferencia repoid " + laReferencialMatomoID
|
||||||
|
+ " and for "
|
||||||
|
+ sdf.format(date));
|
||||||
|
i++;
|
||||||
|
} while (true);
|
||||||
|
fin.close();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,291 @@
|
||||||
|
|
||||||
|
package eu.dnetlib.oa.graph.usagerawdata.export;
|
||||||
|
|
||||||
|
import java.io.*;
|
||||||
|
import java.net.URLDecoder;
|
||||||
|
import java.sql.Connection;
|
||||||
|
import java.sql.PreparedStatement;
|
||||||
|
import java.sql.SQLException;
|
||||||
|
import java.sql.Statement;
|
||||||
|
import java.sql.Timestamp;
|
||||||
|
import java.text.SimpleDateFormat;
|
||||||
|
import java.util.*;
|
||||||
|
import java.util.regex.Matcher;
|
||||||
|
import java.util.regex.Pattern;
|
||||||
|
|
||||||
|
import org.apache.hadoop.conf.Configuration;
|
||||||
|
import org.apache.hadoop.fs.FileSystem;
|
||||||
|
import org.apache.hadoop.fs.LocatedFileStatus;
|
||||||
|
import org.apache.hadoop.fs.Path;
|
||||||
|
import org.apache.hadoop.fs.RemoteIterator;
|
||||||
|
import org.json.simple.JSONArray;
|
||||||
|
import org.json.simple.JSONObject;
|
||||||
|
import org.json.simple.parser.JSONParser;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @author D. Pierrakos, S. Zoupanos
|
||||||
|
*/
|
||||||
|
public class LaReferenciaStats {
|
||||||
|
|
||||||
|
private static final Logger logger = LoggerFactory.getLogger(LaReferenciaStats.class);
|
||||||
|
|
||||||
|
private String logRepoPath;
|
||||||
|
|
||||||
|
private Statement stmt = null;
|
||||||
|
|
||||||
|
private String CounterRobotsURL;
|
||||||
|
private ArrayList robotsList;
|
||||||
|
|
||||||
|
public LaReferenciaStats(String logRepoPath) throws Exception {
|
||||||
|
this.logRepoPath = logRepoPath;
|
||||||
|
this.createTables();
|
||||||
|
// this.createTmpTables();
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* private void connectDB() throws Exception { try { ConnectDB connectDB = new ConnectDB(); } catch (Exception e) {
|
||||||
|
* log.error("Connect to db failed: " + e); throw new Exception("Failed to connect to db: " + e.toString(), e); } }
|
||||||
|
*/
|
||||||
|
private void createTables() throws Exception {
|
||||||
|
try {
|
||||||
|
Statement stmt = ConnectDB.getHiveConnection().createStatement();
|
||||||
|
|
||||||
|
logger.info("Creating LaReferencia tables");
|
||||||
|
String sqlCreateTableLareferenciaLog = "CREATE TABLE IF NOT EXISTS " +
|
||||||
|
ConnectDB.getUsageStatsDBSchema() + ".lareferencialog(matomoid INT, " +
|
||||||
|
"source STRING, id_visit STRING, country STRING, action STRING, url STRING, entity_id STRING, " +
|
||||||
|
"source_item_type STRING, timestamp STRING, referrer_name STRING, agent STRING) " +
|
||||||
|
"clustered by (source, id_visit, action, timestamp, entity_id) into 100 buckets " +
|
||||||
|
"stored as orc tblproperties('transactional'='true')";
|
||||||
|
stmt.executeUpdate(sqlCreateTableLareferenciaLog);
|
||||||
|
logger.info("Created LaReferencia tables");
|
||||||
|
|
||||||
|
stmt.close();
|
||||||
|
ConnectDB.getHiveConnection().close();
|
||||||
|
logger.info("Lareferencia Tables Created");
|
||||||
|
|
||||||
|
} catch (Exception e) {
|
||||||
|
logger.error("Failed to create tables: " + e);
|
||||||
|
throw new Exception("Failed to create tables: " + e.toString(), e);
|
||||||
|
// System.exit(0);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void processLogs() throws Exception {
|
||||||
|
try {
|
||||||
|
logger.info("Processing LaReferencia repository logs");
|
||||||
|
processlaReferenciaLog();
|
||||||
|
logger.info("LaReferencia repository logs process done");
|
||||||
|
|
||||||
|
logger.info("LaReferencia removing double clicks");
|
||||||
|
removeDoubleClicks();
|
||||||
|
logger.info("LaReferencia removed double clicks");
|
||||||
|
|
||||||
|
logger.info("LaReferencia updating Production Tables");
|
||||||
|
updateProdTables();
|
||||||
|
logger.info("LaReferencia updated Production Tables");
|
||||||
|
|
||||||
|
} catch (Exception e) {
|
||||||
|
logger.error("Failed to process logs: " + e);
|
||||||
|
throw new Exception("Failed to process logs: " + e.toString(), e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void processlaReferenciaLog() throws Exception {
|
||||||
|
Statement stmt = ConnectDB.getHiveConnection().createStatement();
|
||||||
|
ConnectDB.getHiveConnection().setAutoCommit(false);
|
||||||
|
|
||||||
|
logger.info("Adding JSON Serde jar");
|
||||||
|
stmt.executeUpdate("add jar /usr/share/cmf/common_jars/hive-hcatalog-core-1.1.0-cdh5.14.0.jar");
|
||||||
|
logger.info("Added JSON Serde jar");
|
||||||
|
|
||||||
|
logger.info("Dropping lareferencialogtmp_json table");
|
||||||
|
String drop_lareferencialogtmp_json = "DROP TABLE IF EXISTS " +
|
||||||
|
ConnectDB.getUsageStatsDBSchema() +
|
||||||
|
".lareferencialogtmp_json";
|
||||||
|
stmt.executeUpdate(drop_lareferencialogtmp_json);
|
||||||
|
logger.info("Dropped lareferencialogtmp_json table");
|
||||||
|
|
||||||
|
logger.info("Creating lareferencialogtmp_json");
|
||||||
|
String create_lareferencialogtmp_json = "CREATE EXTERNAL TABLE IF NOT EXISTS " +
|
||||||
|
ConnectDB.getUsageStatsDBSchema() +
|
||||||
|
".lareferencialogtmp_json(\n" +
|
||||||
|
" `idSite` STRING,\n" +
|
||||||
|
" `idVisit` STRING,\n" +
|
||||||
|
" `country` STRING,\n" +
|
||||||
|
" `referrerName` STRING,\n" +
|
||||||
|
" `browser` STRING,\n" +
|
||||||
|
" `repItem` STRING,\n" +
|
||||||
|
" `actionDetails` ARRAY<\n" +
|
||||||
|
" struct<\n" +
|
||||||
|
" timestamp: STRING,\n" +
|
||||||
|
" type: STRING,\n" +
|
||||||
|
" url: STRING,\n" +
|
||||||
|
" `customVariables`: struct<\n" +
|
||||||
|
" `1`: struct<\n" +
|
||||||
|
" `customVariablePageValue1`: STRING\n" +
|
||||||
|
" >,\n" +
|
||||||
|
" `2`: struct<\n" +
|
||||||
|
" `customVariablePageValue2`: STRING\n" +
|
||||||
|
" >\n" +
|
||||||
|
" >\n" +
|
||||||
|
" >\n" +
|
||||||
|
" >" +
|
||||||
|
")\n" +
|
||||||
|
"ROW FORMAT SERDE 'org.apache.hive.hcatalog.data.JsonSerDe'\n" +
|
||||||
|
"LOCATION '" + ExecuteWorkflow.lareferenciaLogPath + "'\n" +
|
||||||
|
"TBLPROPERTIES (\"transactional\"=\"false\")";
|
||||||
|
stmt.executeUpdate(create_lareferencialogtmp_json);
|
||||||
|
logger.info("Created lareferencialogtmp_json");
|
||||||
|
|
||||||
|
logger.info("Dropping lareferencialogtmp table");
|
||||||
|
String drop_lareferencialogtmp = "DROP TABLE IF EXISTS " +
|
||||||
|
ConnectDB.getUsageStatsDBSchema() +
|
||||||
|
".lareferencialogtmp";
|
||||||
|
stmt.executeUpdate(drop_lareferencialogtmp);
|
||||||
|
logger.info("Dropped lareferencialogtmp table");
|
||||||
|
|
||||||
|
logger.info("Creating lareferencialogtmp");
|
||||||
|
String create_lareferencialogtmp = "CREATE TABLE " +
|
||||||
|
ConnectDB.getUsageStatsDBSchema() + ".lareferencialogtmp(matomoid INT, " +
|
||||||
|
"source STRING, id_visit STRING, country STRING, action STRING, url STRING, entity_id STRING, " +
|
||||||
|
"source_item_type STRING, timestamp STRING, referrer_name STRING, agent STRING) " +
|
||||||
|
"clustered by (source, id_visit, action, timestamp, entity_id) into 100 buckets " +
|
||||||
|
"stored as orc tblproperties('transactional'='true')";
|
||||||
|
stmt.executeUpdate(create_lareferencialogtmp);
|
||||||
|
logger.info("Created lareferencialogtmp");
|
||||||
|
|
||||||
|
logger.info("Inserting into lareferencialogtmp");
|
||||||
|
String insert_lareferencialogtmp = "INSERT INTO " + ConnectDB.getUsageStatsDBSchema() + ".lareferencialogtmp " +
|
||||||
|
"SELECT DISTINCT cast(idSite as INT) as matomoid, CONCAT('opendoar____::', " +
|
||||||
|
"actiondetail.customVariables.`2`.customVariablePageValue2) as source, idVisit as id_Visit, country, " +
|
||||||
|
"actiondetail.type as action, actiondetail.url as url, " +
|
||||||
|
"actiondetail.customVariables.`1`.`customVariablePageValue1` as entity_id, " +
|
||||||
|
"'repItem' as source_item_type, from_unixtime(cast(actiondetail.timestamp as BIGINT)) as timestamp, " +
|
||||||
|
"referrerName as referrer_name, browser as agent " +
|
||||||
|
"FROM " + ConnectDB.getUsageStatsDBSchema() + ".lareferencialogtmp_json " +
|
||||||
|
"LATERAL VIEW explode(actiondetails) actiondetailsTable AS actiondetail";
|
||||||
|
stmt.executeUpdate(insert_lareferencialogtmp);
|
||||||
|
logger.info("Inserted into lareferencialogtmp");
|
||||||
|
|
||||||
|
stmt.close();
|
||||||
|
}
|
||||||
|
|
||||||
|
public void removeDoubleClicks() throws Exception {
|
||||||
|
|
||||||
|
Statement stmt = ConnectDB.getHiveConnection().createStatement();
|
||||||
|
ConnectDB.getHiveConnection().setAutoCommit(false);
|
||||||
|
|
||||||
|
logger.info("Cleaning download double clicks");
|
||||||
|
// clean download double clicks
|
||||||
|
String sql = "DELETE from " + ConnectDB.getUsageStatsDBSchema() + ".lareferencialogtmp WHERE EXISTS (" +
|
||||||
|
"SELECT DISTINCT p1.source, p1.id_visit, p1.action, p1.entity_id, p1.timestamp " +
|
||||||
|
"FROM " + ConnectDB.getUsageStatsDBSchema() + ".lareferencialogtmp p1, " +
|
||||||
|
ConnectDB.getUsageStatsDBSchema() + ".lareferencialogtmp p2 " +
|
||||||
|
"WHERE p1.source=p2.source AND p1.id_visit=p2.id_visit AND p1.entity_id=p2.entity_id " +
|
||||||
|
"AND p1.action=p2.action AND p1.action='download' AND p1.timestamp!=p2.timestamp " +
|
||||||
|
"AND p1.timestamp<p2.timestamp AND ((unix_timestamp(p2.timestamp)-unix_timestamp(p1.timestamp))/60)<30 " +
|
||||||
|
"AND lareferencialogtmp.source=p1.source AND lareferencialogtmp.id_visit=p1.id_visit " +
|
||||||
|
"AND lareferencialogtmp.action=p1.action AND lareferencialogtmp.entity_id=p1.entity_id " +
|
||||||
|
"AND lareferencialogtmp.timestamp=p1.timestamp)";
|
||||||
|
stmt.executeUpdate(sql);
|
||||||
|
stmt.close();
|
||||||
|
logger.info("Cleaned download double clicks");
|
||||||
|
|
||||||
|
stmt = ConnectDB.getHiveConnection().createStatement();
|
||||||
|
logger.info("Cleaning action double clicks");
|
||||||
|
// clean view double clicks
|
||||||
|
sql = "DELETE from " + ConnectDB.getUsageStatsDBSchema() + ".lareferencialogtmp WHERE EXISTS (" +
|
||||||
|
"SELECT DISTINCT p1.source, p1.id_visit, p1.action, p1.entity_id, p1.timestamp " +
|
||||||
|
"FROM " + ConnectDB.getUsageStatsDBSchema() + ".lareferencialogtmp p1, " +
|
||||||
|
ConnectDB.getUsageStatsDBSchema() + ".lareferencialogtmp p2 " +
|
||||||
|
"WHERE p1.source=p2.source AND p1.id_visit=p2.id_visit AND p1.entity_id=p2.entity_id " +
|
||||||
|
"AND p1.action=p2.action AND p1.action='action' AND p1.timestamp!=p2.timestamp " +
|
||||||
|
"AND p1.timestamp<p2.timestamp AND ((unix_timestamp(p2.timestamp)-unix_timestamp(p1.timestamp))/60)<10 " +
|
||||||
|
"AND lareferencialogtmp.source=p1.source AND lareferencialogtmp.id_visit=p1.id_visit " +
|
||||||
|
"AND lareferencialogtmp.action=p1.action AND lareferencialogtmp.entity_id=p1.entity_id " +
|
||||||
|
"AND lareferencialogtmp.timestamp=p1.timestamp)";
|
||||||
|
stmt.executeUpdate(sql);
|
||||||
|
stmt.close();
|
||||||
|
logger.info("Cleaned action double clicks");
|
||||||
|
// conn.close();
|
||||||
|
}
|
||||||
|
|
||||||
|
private void updateProdTables() throws SQLException, Exception {
|
||||||
|
|
||||||
|
Statement stmt = ConnectDB.getHiveConnection().createStatement();
|
||||||
|
ConnectDB.getHiveConnection().setAutoCommit(false);
|
||||||
|
|
||||||
|
logger.info("Updating lareferencialog");
|
||||||
|
String sql = "insert into " + ConnectDB.getUsageStatsDBSchema() + ".lareferencialog " +
|
||||||
|
"select * from " + ConnectDB.getUsageStatsDBSchema() + ".lareferencialogtmp";
|
||||||
|
stmt.executeUpdate(sql);
|
||||||
|
|
||||||
|
logger.info("Dropping lareferencialogtmp");
|
||||||
|
sql = "DROP TABLE " + ConnectDB.getUsageStatsDBSchema() + ".lareferencialogtmp";
|
||||||
|
logger.info("Dropped lareferencialogtmp");
|
||||||
|
stmt.executeUpdate(sql);
|
||||||
|
|
||||||
|
stmt.close();
|
||||||
|
ConnectDB.getHiveConnection().close();
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
private ArrayList<String> listHdfsDir(String dir) throws Exception {
|
||||||
|
FileSystem hdfs = FileSystem.get(new Configuration());
|
||||||
|
RemoteIterator<LocatedFileStatus> Files;
|
||||||
|
ArrayList<String> fileNames = new ArrayList<>();
|
||||||
|
|
||||||
|
try {
|
||||||
|
Path exportPath = new Path(hdfs.getUri() + dir);
|
||||||
|
Files = hdfs.listFiles(exportPath, false);
|
||||||
|
while (Files.hasNext()) {
|
||||||
|
String fileName = Files.next().getPath().toString();
|
||||||
|
// log.info("Found hdfs file " + fileName);
|
||||||
|
fileNames.add(fileName);
|
||||||
|
}
|
||||||
|
// hdfs.close();
|
||||||
|
} catch (Exception e) {
|
||||||
|
logger.error("HDFS file path with exported data does not exist : " + new Path(hdfs.getUri() + logRepoPath));
|
||||||
|
throw new Exception("HDFS file path with exported data does not exist : " + logRepoPath, e);
|
||||||
|
}
|
||||||
|
|
||||||
|
return fileNames;
|
||||||
|
}
|
||||||
|
|
||||||
|
private String readHDFSFile(String filename) throws Exception {
|
||||||
|
String result;
|
||||||
|
try {
|
||||||
|
|
||||||
|
FileSystem fs = FileSystem.get(new Configuration());
|
||||||
|
// log.info("reading file : " + filename);
|
||||||
|
|
||||||
|
BufferedReader br = new BufferedReader(new InputStreamReader(fs.open(new Path(filename))));
|
||||||
|
|
||||||
|
StringBuilder sb = new StringBuilder();
|
||||||
|
String line = br.readLine();
|
||||||
|
|
||||||
|
while (line != null) {
|
||||||
|
if (!line.equals("[]")) {
|
||||||
|
sb.append(line);
|
||||||
|
}
|
||||||
|
// sb.append(line);
|
||||||
|
line = br.readLine();
|
||||||
|
}
|
||||||
|
result = sb.toString().replace("][{\"idSite\"", ",{\"idSite\"");
|
||||||
|
if (result.equals("")) {
|
||||||
|
result = "[]";
|
||||||
|
}
|
||||||
|
|
||||||
|
// fs.close();
|
||||||
|
} catch (Exception e) {
|
||||||
|
logger.error(e.getMessage());
|
||||||
|
throw new Exception(e);
|
||||||
|
}
|
||||||
|
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,331 @@
|
||||||
|
|
||||||
|
package eu.dnetlib.oa.graph.usagerawdata.export;
|
||||||
|
|
||||||
|
import java.io.*;
|
||||||
|
import java.net.Authenticator;
|
||||||
|
import java.net.URL;
|
||||||
|
import java.net.URLConnection;
|
||||||
|
import java.nio.file.Files;
|
||||||
|
import java.nio.file.Paths;
|
||||||
|
import java.sql.PreparedStatement;
|
||||||
|
import java.sql.ResultSet;
|
||||||
|
import java.sql.Statement;
|
||||||
|
import java.text.SimpleDateFormat;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.Calendar;
|
||||||
|
import java.util.Date;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.concurrent.ExecutorService;
|
||||||
|
import java.util.concurrent.Executors;
|
||||||
|
|
||||||
|
import org.apache.hadoop.conf.Configuration;
|
||||||
|
import org.apache.hadoop.fs.FSDataOutputStream;
|
||||||
|
import org.apache.hadoop.fs.FileSystem;
|
||||||
|
import org.apache.hadoop.fs.Path;
|
||||||
|
import org.json.simple.JSONArray;
|
||||||
|
import org.json.simple.JSONObject;
|
||||||
|
import org.json.simple.parser.JSONParser;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @author D. Pierrakos, S. Zoupanos
|
||||||
|
*/
|
||||||
|
public class PiwikDownloadLogs {
|
||||||
|
|
||||||
|
private final String piwikUrl;
|
||||||
|
private Date startDate;
|
||||||
|
private final String tokenAuth;
|
||||||
|
|
||||||
|
/*
|
||||||
|
* The Piwik's API method
|
||||||
|
*/
|
||||||
|
private final String APImethod = "?module=API&method=Live.getLastVisitsDetails";
|
||||||
|
private final String format = "&format=json";
|
||||||
|
|
||||||
|
private static final Logger logger = LoggerFactory.getLogger(PiwikDownloadLogs.class);
|
||||||
|
|
||||||
|
public PiwikDownloadLogs(String piwikUrl, String tokenAuth) {
|
||||||
|
this.piwikUrl = piwikUrl;
|
||||||
|
this.tokenAuth = tokenAuth;
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
private String getPiwikLogUrl() {
|
||||||
|
return "https://" + piwikUrl + "/";
|
||||||
|
}
|
||||||
|
|
||||||
|
private String getJson(String url) throws Exception {
|
||||||
|
try {
|
||||||
|
logger.debug("Connecting to download the JSON: " + url);
|
||||||
|
URL website = new URL(url);
|
||||||
|
URLConnection connection = website.openConnection();
|
||||||
|
|
||||||
|
StringBuilder response;
|
||||||
|
try (BufferedReader in = new BufferedReader(new InputStreamReader(connection.getInputStream()))) {
|
||||||
|
response = new StringBuilder();
|
||||||
|
String inputLine;
|
||||||
|
while ((inputLine = in.readLine()) != null) {
|
||||||
|
response.append(inputLine);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return response.toString();
|
||||||
|
} catch (Exception e) {
|
||||||
|
logger.error("Failed to get URL: " + url + " Exception: " + e);
|
||||||
|
throw new Exception("Failed to get URL: " + url + " Exception: " + e.toString(), e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class WorkerThread implements Runnable {
|
||||||
|
|
||||||
|
private Calendar currDay;
|
||||||
|
private int siteId;
|
||||||
|
private String repoLogsPath;
|
||||||
|
private String portalLogPath;
|
||||||
|
private String portalMatomoID;
|
||||||
|
|
||||||
|
public WorkerThread(Calendar currDay, int siteId, String repoLogsPath, String portalLogPath,
|
||||||
|
String portalMatomoID) throws IOException {
|
||||||
|
this.currDay = (Calendar) currDay.clone();
|
||||||
|
this.siteId = new Integer(siteId);
|
||||||
|
this.repoLogsPath = new String(repoLogsPath);
|
||||||
|
this.portalLogPath = new String(portalLogPath);
|
||||||
|
this.portalMatomoID = new String(portalMatomoID);
|
||||||
|
}
|
||||||
|
|
||||||
|
public void run() {
|
||||||
|
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd");
|
||||||
|
System.out
|
||||||
|
.println(
|
||||||
|
Thread.currentThread().getName() + " (Start) Thread for "
|
||||||
|
+ "parameters: currDay=" + sdf.format(currDay.getTime()) + ", siteId=" + siteId
|
||||||
|
+ ", repoLogsPath=" + repoLogsPath + ", portalLogPath=" + portalLogPath
|
||||||
|
+ ", portalLogPath=" + portalLogPath + ", portalMatomoID=" + portalMatomoID);
|
||||||
|
try {
|
||||||
|
GetOpenAIRELogsForDate(currDay, siteId, repoLogsPath, portalLogPath, portalMatomoID);
|
||||||
|
|
||||||
|
} catch (Exception e) {
|
||||||
|
// TODO Auto-generated catch block
|
||||||
|
e.printStackTrace();
|
||||||
|
}
|
||||||
|
System.out
|
||||||
|
.println(
|
||||||
|
Thread.currentThread().getName() + " (End) Thread for "
|
||||||
|
+ "parameters: currDay=" + sdf.format(currDay.getTime()) + ", siteId=" + siteId
|
||||||
|
+ ", repoLogsPath=" + repoLogsPath + ", portalLogPath=" + portalLogPath
|
||||||
|
+ ", portalLogPath=" + portalLogPath + ", portalMatomoID=" + portalMatomoID);
|
||||||
|
}
|
||||||
|
|
||||||
|
public void GetOpenAIRELogsForDate(Calendar currDay, int siteId, String repoLogsPath, String portalLogPath,
|
||||||
|
String portalMatomoID) throws Exception {
|
||||||
|
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd");
|
||||||
|
|
||||||
|
Date date = currDay.getTime();
|
||||||
|
logger.info("Downloading logs for repoid " + siteId + " and for " + sdf.format(date));
|
||||||
|
|
||||||
|
String period = "&period=day&date=" + sdf.format(date);
|
||||||
|
String outFolder = "";
|
||||||
|
if (siteId == Integer.parseInt(portalMatomoID)) {
|
||||||
|
outFolder = portalLogPath;
|
||||||
|
} else {
|
||||||
|
outFolder = repoLogsPath;
|
||||||
|
}
|
||||||
|
|
||||||
|
String baseApiUrl = getPiwikLogUrl() + APImethod + "&idSite=" + siteId + period + format
|
||||||
|
+ "&expanded=5&filter_limit=1000&token_auth=" + tokenAuth;
|
||||||
|
String content = "";
|
||||||
|
|
||||||
|
int i = 0;
|
||||||
|
|
||||||
|
JSONParser parser = new JSONParser();
|
||||||
|
StringBuffer totalContent = new StringBuffer();
|
||||||
|
FileSystem fs = FileSystem.get(new Configuration());
|
||||||
|
|
||||||
|
do {
|
||||||
|
int writtenBytes = 0;
|
||||||
|
String apiUrl = baseApiUrl;
|
||||||
|
|
||||||
|
if (i > 0) {
|
||||||
|
apiUrl += "&filter_offset=" + (i * 1000);
|
||||||
|
}
|
||||||
|
|
||||||
|
content = getJson(apiUrl);
|
||||||
|
if (content.length() == 0 || content.equals("[]")) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
FSDataOutputStream fin = fs
|
||||||
|
.create(
|
||||||
|
new Path(outFolder + "/" + siteId + "_Piwiklog" + sdf.format((date)) + "_offset_" + i
|
||||||
|
+ ".json"),
|
||||||
|
true);
|
||||||
|
JSONArray jsonArray = (JSONArray) parser.parse(content);
|
||||||
|
for (Object aJsonArray : jsonArray) {
|
||||||
|
JSONObject jsonObjectRaw = (JSONObject) aJsonArray;
|
||||||
|
byte[] jsonObjectRawBytes = jsonObjectRaw.toJSONString().getBytes();
|
||||||
|
fin.write(jsonObjectRawBytes);
|
||||||
|
fin.writeChar('\n');
|
||||||
|
|
||||||
|
writtenBytes += jsonObjectRawBytes.length + 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
fin.close();
|
||||||
|
System.out
|
||||||
|
.println(
|
||||||
|
Thread.currentThread().getName() + " (Finished writing) Wrote " + writtenBytes
|
||||||
|
+ " bytes. Filename: " + siteId + "_Piwiklog" + sdf.format((date)) + "_offset_" + i
|
||||||
|
+ ".json");
|
||||||
|
|
||||||
|
i++;
|
||||||
|
} while (true);
|
||||||
|
|
||||||
|
fs.close();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void GetOpenAIRELogs(String repoLogsPath, String portalLogPath, String portalMatomoID) throws Exception {
|
||||||
|
|
||||||
|
Statement statement = ConnectDB.getHiveConnection().createStatement();
|
||||||
|
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd");
|
||||||
|
|
||||||
|
ResultSet rs = statement
|
||||||
|
.executeQuery(
|
||||||
|
"SELECT distinct piwik_id from " + ConnectDB.getStatsDBSchema()
|
||||||
|
+ ".datasource where piwik_id is not null and piwik_id <> 0 order by piwik_id");
|
||||||
|
|
||||||
|
// Getting all the piwikids in a list for logging reasons & limitting the list
|
||||||
|
// to the max number of piwikids
|
||||||
|
List<Integer> piwikIdToVisit = new ArrayList<Integer>();
|
||||||
|
while (rs.next()) {
|
||||||
|
piwikIdToVisit.add(rs.getInt(1));
|
||||||
|
}
|
||||||
|
logger.info("Found the following piwikIds for download: " + piwikIdToVisit);
|
||||||
|
|
||||||
|
if (ExecuteWorkflow.numberOfPiwikIdsToDownload > 0
|
||||||
|
&& ExecuteWorkflow.numberOfPiwikIdsToDownload <= piwikIdToVisit.size()) {
|
||||||
|
logger.info("Trimming piwikIds list to the size of: " + ExecuteWorkflow.numberOfPiwikIdsToDownload);
|
||||||
|
piwikIdToVisit = piwikIdToVisit.subList(0, ExecuteWorkflow.numberOfPiwikIdsToDownload);
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info("Downloading from repos with the followins piwikIds: " + piwikIdToVisit);
|
||||||
|
|
||||||
|
// ExecutorService executor = Executors.newFixedThreadPool(ExecuteWorkflow.numberOfDownloadThreads);
|
||||||
|
for (int siteId : piwikIdToVisit) {
|
||||||
|
// Setting the starting period
|
||||||
|
Calendar start = (Calendar) ExecuteWorkflow.startingLogPeriod.clone();
|
||||||
|
logger.info("Starting period for log download: " + sdf.format(start.getTime()));
|
||||||
|
|
||||||
|
// Setting the ending period (last day of the month)
|
||||||
|
// Calendar end = (Calendar) ExecuteWorkflow.endingLogPeriod.clone();
|
||||||
|
Calendar end = Calendar.getInstance();
|
||||||
|
end.add(Calendar.DAY_OF_MONTH, -1);
|
||||||
|
// end.add(Calendar.MONTH, +1);
|
||||||
|
// end.add(Calendar.DAY_OF_MONTH, -1);
|
||||||
|
logger.info("Ending period for log download: " + sdf.format(end.getTime()));
|
||||||
|
|
||||||
|
logger.info("Now working on piwikId: " + siteId);
|
||||||
|
|
||||||
|
PreparedStatement st = ConnectDB.DB_HIVE_CONNECTION
|
||||||
|
.prepareStatement(
|
||||||
|
"SELECT max(timestamp) FROM " + ConnectDB.getUsageStatsDBSchema()
|
||||||
|
+ ".piwiklog WHERE source=?");
|
||||||
|
st.setInt(1, siteId);
|
||||||
|
Date dateMax = null;
|
||||||
|
ResultSet rs_date = st.executeQuery();
|
||||||
|
while (rs_date.next()) {
|
||||||
|
logger.info("Found max date: " + rs_date.getString(1) + " for repository " + siteId);
|
||||||
|
|
||||||
|
if (rs_date.getString(1) != null && !rs_date.getString(1).equals("null")
|
||||||
|
&& !rs_date.getString(1).equals("")) {
|
||||||
|
start.setTime(sdf.parse(rs_date.getString(1)));
|
||||||
|
dateMax = sdf.parse(rs_date.getString(1));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
rs_date.close();
|
||||||
|
|
||||||
|
for (Calendar currDay = (Calendar) start.clone(); currDay.before(end); currDay.add(Calendar.DATE, 1)) {
|
||||||
|
// logger.info("Date used " + currDay.toString());
|
||||||
|
// Runnable worker = new WorkerThread(currDay, siteId, repoLogsPath, portalLogPath, portalMatomoID);
|
||||||
|
// executor.execute(worker);// calling execute method of ExecutorService
|
||||||
|
logger.info("Date used " + currDay.getTime().toString());
|
||||||
|
|
||||||
|
if (dateMax != null && currDay.getTime().compareTo(dateMax) <= 0) {
|
||||||
|
logger.info("Date found in logs " + dateMax + " and not downloanding Matomo logs for " + siteId);
|
||||||
|
} else {
|
||||||
|
GetOpenAIRELogsForDate(currDay, siteId, repoLogsPath, portalLogPath, portalMatomoID);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// executor.shutdown();
|
||||||
|
// while (!executor.isTerminated()) {
|
||||||
|
// }
|
||||||
|
// System.out.println("Finished all threads");
|
||||||
|
}
|
||||||
|
|
||||||
|
public void GetOpenAIRELogsForDate(Calendar currDay, int siteId, String repoLogsPath, String portalLogPath,
|
||||||
|
String portalMatomoID) throws Exception {
|
||||||
|
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd");
|
||||||
|
|
||||||
|
Date date = currDay.getTime();
|
||||||
|
logger.info("Downloading logs for repoid " + siteId + " and for " + sdf.format(date));
|
||||||
|
|
||||||
|
String period = "&period=day&date=" + sdf.format(date);
|
||||||
|
String outFolder = "";
|
||||||
|
if (siteId == Integer.parseInt(portalMatomoID)) {
|
||||||
|
outFolder = portalLogPath;
|
||||||
|
} else {
|
||||||
|
outFolder = repoLogsPath;
|
||||||
|
}
|
||||||
|
|
||||||
|
String baseApiUrl = getPiwikLogUrl() + APImethod + "&idSite=" + siteId + period + format
|
||||||
|
+ "&expanded=5&filter_limit=1000&token_auth=" + tokenAuth;
|
||||||
|
String content = "";
|
||||||
|
|
||||||
|
int i = 0;
|
||||||
|
|
||||||
|
JSONParser parser = new JSONParser();
|
||||||
|
StringBuffer totalContent = new StringBuffer();
|
||||||
|
FileSystem fs = FileSystem.get(new Configuration());
|
||||||
|
|
||||||
|
do {
|
||||||
|
int writtenBytes = 0;
|
||||||
|
String apiUrl = baseApiUrl;
|
||||||
|
|
||||||
|
if (i > 0) {
|
||||||
|
apiUrl += "&filter_offset=" + (i * 1000);
|
||||||
|
}
|
||||||
|
|
||||||
|
content = getJson(apiUrl);
|
||||||
|
if (content.length() == 0 || content.equals("[]")) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
FSDataOutputStream fin = fs
|
||||||
|
.create(
|
||||||
|
new Path(outFolder + "/" + siteId + "_Piwiklog" + sdf.format((date)) + "_offset_" + i
|
||||||
|
+ ".json"),
|
||||||
|
true);
|
||||||
|
JSONArray jsonArray = (JSONArray) parser.parse(content);
|
||||||
|
for (Object aJsonArray : jsonArray) {
|
||||||
|
JSONObject jsonObjectRaw = (JSONObject) aJsonArray;
|
||||||
|
byte[] jsonObjectRawBytes = jsonObjectRaw.toJSONString().getBytes();
|
||||||
|
fin.write(jsonObjectRawBytes);
|
||||||
|
fin.writeChar('\n');
|
||||||
|
|
||||||
|
writtenBytes += jsonObjectRawBytes.length + 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
fin.close();
|
||||||
|
System.out
|
||||||
|
.println(
|
||||||
|
Thread.currentThread().getName() + " (Finished writing) Wrote " + writtenBytes
|
||||||
|
+ " bytes. Filename: " + siteId + "_Piwiklog" + sdf.format((date)) + "_offset_" + i
|
||||||
|
+ ".json");
|
||||||
|
|
||||||
|
i++;
|
||||||
|
} while (true);
|
||||||
|
|
||||||
|
fs.close();
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,835 @@
|
||||||
|
|
||||||
|
package eu.dnetlib.oa.graph.usagerawdata.export;
|
||||||
|
|
||||||
|
import java.io.*;
|
||||||
|
import java.net.URLDecoder;
|
||||||
|
import java.sql.Connection;
|
||||||
|
import java.sql.SQLException;
|
||||||
|
import java.sql.Statement;
|
||||||
|
import java.text.SimpleDateFormat;
|
||||||
|
import java.util.*;
|
||||||
|
|
||||||
|
import org.apache.hadoop.conf.Configuration;
|
||||||
|
import org.apache.hadoop.fs.FileSystem;
|
||||||
|
import org.apache.hadoop.fs.LocatedFileStatus;
|
||||||
|
import org.apache.hadoop.fs.Path;
|
||||||
|
import org.apache.hadoop.fs.RemoteIterator;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @author D. Pierrakos, S. Zoupanos
|
||||||
|
*/
|
||||||
|
public class PiwikStatsDB {
|
||||||
|
|
||||||
|
private String logPath;
|
||||||
|
private String logRepoPath;
|
||||||
|
private String logPortalPath;
|
||||||
|
|
||||||
|
private Statement stmt = null;
|
||||||
|
|
||||||
|
private static final Logger logger = LoggerFactory.getLogger(PiwikStatsDB.class);
|
||||||
|
|
||||||
|
private String CounterRobotsURL;
|
||||||
|
private ArrayList robotsList;
|
||||||
|
|
||||||
|
public PiwikStatsDB(String logRepoPath, String logPortalPath) throws Exception {
|
||||||
|
this.logRepoPath = logRepoPath;
|
||||||
|
this.logPortalPath = logPortalPath;
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
public void reCreateLogDirs() throws IllegalArgumentException, IOException {
|
||||||
|
FileSystem dfs = FileSystem.get(new Configuration());
|
||||||
|
|
||||||
|
logger.info("Deleting repoLog directory: " + ExecuteWorkflow.repoLogPath);
|
||||||
|
dfs.delete(new Path(ExecuteWorkflow.repoLogPath), true);
|
||||||
|
|
||||||
|
logger.info("Deleting portalLog directory: " + ExecuteWorkflow.portalLogPath);
|
||||||
|
dfs.delete(new Path(ExecuteWorkflow.portalLogPath), true);
|
||||||
|
|
||||||
|
logger.info("Creating repoLog directory: " + ExecuteWorkflow.repoLogPath);
|
||||||
|
dfs.mkdirs(new Path(ExecuteWorkflow.repoLogPath));
|
||||||
|
|
||||||
|
logger.info("Creating portalLog directory: " + ExecuteWorkflow.portalLogPath);
|
||||||
|
dfs.mkdirs(new Path(ExecuteWorkflow.portalLogPath));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void recreateDBAndTables() throws Exception {
|
||||||
|
this.createDatabase();
|
||||||
|
this.createTables();
|
||||||
|
// The piwiklog table is not needed since it is built
|
||||||
|
// on top of JSON files
|
||||||
|
//////////// this.createTmpTables();
|
||||||
|
}
|
||||||
|
|
||||||
|
public ArrayList getRobotsList() {
|
||||||
|
return robotsList;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setRobotsList(ArrayList robotsList) {
|
||||||
|
this.robotsList = robotsList;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getCounterRobotsURL() {
|
||||||
|
return CounterRobotsURL;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setCounterRobotsURL(String CounterRobotsURL) {
|
||||||
|
this.CounterRobotsURL = CounterRobotsURL;
|
||||||
|
}
|
||||||
|
|
||||||
|
private void createDatabase() throws Exception {
|
||||||
|
try {
|
||||||
|
stmt = ConnectDB.getHiveConnection().createStatement();
|
||||||
|
|
||||||
|
logger.info("Dropping usagestats DB: " + ConnectDB.getUsageStatsDBSchema());
|
||||||
|
String dropDatabase = "DROP DATABASE IF EXISTS " + ConnectDB.getUsageStatsDBSchema() + " CASCADE";
|
||||||
|
stmt.executeUpdate(dropDatabase);
|
||||||
|
|
||||||
|
} catch (Exception e) {
|
||||||
|
logger.error("Failed to drop database: " + e);
|
||||||
|
throw new Exception("Failed to drop database: " + e.toString(), e);
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
stmt = ConnectDB.getHiveConnection().createStatement();
|
||||||
|
|
||||||
|
logger.info("Creating usagestats DB: " + ConnectDB.getUsageStatsDBSchema());
|
||||||
|
String createDatabase = "CREATE DATABASE IF NOT EXISTS " + ConnectDB.getUsageStatsDBSchema();
|
||||||
|
stmt.executeUpdate(createDatabase);
|
||||||
|
|
||||||
|
} catch (Exception e) {
|
||||||
|
logger.error("Failed to create database: " + e);
|
||||||
|
throw new Exception("Failed to create database: " + e.toString(), e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private void createTables() throws Exception {
|
||||||
|
try {
|
||||||
|
stmt = ConnectDB.getHiveConnection().createStatement();
|
||||||
|
|
||||||
|
// Create Piwiklog table - This table should exist
|
||||||
|
String sqlCreateTablePiwikLog = "CREATE TABLE IF NOT EXISTS "
|
||||||
|
+ ConnectDB.getUsageStatsDBSchema()
|
||||||
|
+ ".piwiklog(source INT, id_visit STRING, country STRING, action STRING, url STRING, "
|
||||||
|
+ "entity_id STRING, source_item_type STRING, timestamp STRING, referrer_name STRING, agent STRING) "
|
||||||
|
+ "clustered by (source, id_visit, action, timestamp, entity_id) "
|
||||||
|
+ "into 100 buckets stored as orc tblproperties('transactional'='true')";
|
||||||
|
stmt.executeUpdate(sqlCreateTablePiwikLog);
|
||||||
|
|
||||||
|
// String dropT = "TRUNCATE TABLE "
|
||||||
|
// + ConnectDB.getUsageStatsDBSchema()
|
||||||
|
// + ".piwiklog ";
|
||||||
|
// stmt.executeUpdate(dropT);
|
||||||
|
// logger.info("truncated piwiklog");
|
||||||
|
|
||||||
|
/////////////////////////////////////////
|
||||||
|
// Rule for duplicate inserts @ piwiklog
|
||||||
|
/////////////////////////////////////////
|
||||||
|
String sqlCreateTablePortalLog = "CREATE TABLE IF NOT EXISTS "
|
||||||
|
+ ConnectDB.getUsageStatsDBSchema()
|
||||||
|
+ ".process_portal_log(source INT, id_visit STRING, country STRING, action STRING, url STRING, "
|
||||||
|
+ "entity_id STRING, source_item_type STRING, timestamp STRING, referrer_name STRING, agent STRING) "
|
||||||
|
+ "clustered by (source, id_visit, timestamp) into 100 buckets stored as orc tblproperties('transactional'='true')";
|
||||||
|
stmt.executeUpdate(sqlCreateTablePortalLog);
|
||||||
|
|
||||||
|
//////////////////////////////////////////////////
|
||||||
|
// Rule for duplicate inserts @ process_portal_log
|
||||||
|
//////////////////////////////////////////////////
|
||||||
|
stmt.close();
|
||||||
|
ConnectDB.getHiveConnection().close();
|
||||||
|
|
||||||
|
} catch (Exception e) {
|
||||||
|
logger.error("Failed to create tables: " + e);
|
||||||
|
throw new Exception("Failed to create tables: " + e.toString(), e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void processLogs() throws Exception {
|
||||||
|
try {
|
||||||
|
ReadCounterRobotsList counterRobots = new ReadCounterRobotsList(this.getCounterRobotsURL());
|
||||||
|
this.robotsList = counterRobots.getRobotsPatterns();
|
||||||
|
|
||||||
|
logger.info("Processing repository logs");
|
||||||
|
processRepositoryLog();
|
||||||
|
logger.info("Repository logs process done");
|
||||||
|
|
||||||
|
logger.info("Removing double clicks");
|
||||||
|
removeDoubleClicks();
|
||||||
|
logger.info("Removing double clicks done");
|
||||||
|
|
||||||
|
logger.info("Cleaning oai");
|
||||||
|
cleanOAI();
|
||||||
|
logger.info("Cleaning oai done");
|
||||||
|
|
||||||
|
logger.info("Processing portal logs");
|
||||||
|
processPortalLog();
|
||||||
|
logger.info("Portal logs process done");
|
||||||
|
|
||||||
|
logger.info("Processing portal usagestats");
|
||||||
|
portalLogs();
|
||||||
|
logger.info("Portal usagestats process done");
|
||||||
|
|
||||||
|
logger.info("Updating Production Tables");
|
||||||
|
updateProdTables();
|
||||||
|
logger.info("Updated Production Tables");
|
||||||
|
|
||||||
|
logger.info("Create Pedocs Tables");
|
||||||
|
createPedocsOldUsageData();
|
||||||
|
logger.info("Pedocs Tables Created");
|
||||||
|
|
||||||
|
} catch (Exception e) {
|
||||||
|
logger.error("Failed to process logs: " + e);
|
||||||
|
throw new Exception("Failed to process logs: " + e.toString(), e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void processRepositoryLog() throws Exception {
|
||||||
|
|
||||||
|
Statement stmt = ConnectDB.getHiveConnection().createStatement();
|
||||||
|
ConnectDB.getHiveConnection().setAutoCommit(false);
|
||||||
|
|
||||||
|
logger.info("Adding JSON Serde jar");
|
||||||
|
stmt.executeUpdate("add jar /usr/share/cmf/common_jars/hive-hcatalog-core-1.1.0-cdh5.14.0.jar");
|
||||||
|
logger.info("Added JSON Serde jar");
|
||||||
|
|
||||||
|
logger.info("Dropping piwiklogtmp_json table");
|
||||||
|
String drop_piwiklogtmp_json = "DROP TABLE IF EXISTS "
|
||||||
|
+ ConnectDB.getUsageStatsDBSchema()
|
||||||
|
+ ".piwiklogtmp_json";
|
||||||
|
stmt.executeUpdate(drop_piwiklogtmp_json);
|
||||||
|
logger.info("Dropped piwiklogtmp_json table");
|
||||||
|
|
||||||
|
logger.info("Creating piwiklogtmp_json");
|
||||||
|
String create_piwiklogtmp_json = "CREATE EXTERNAL TABLE IF NOT EXISTS "
|
||||||
|
+ ConnectDB.getUsageStatsDBSchema()
|
||||||
|
+ ".piwiklogtmp_json(\n"
|
||||||
|
+ " `idSite` STRING,\n"
|
||||||
|
+ " `idVisit` STRING,\n"
|
||||||
|
+ " `country` STRING,\n"
|
||||||
|
+ " `referrerName` STRING,\n"
|
||||||
|
+ " `browser` STRING,\n"
|
||||||
|
+ " `actionDetails` ARRAY<\n"
|
||||||
|
+ " struct<\n"
|
||||||
|
+ " type: STRING,\n"
|
||||||
|
+ " url: STRING,\n"
|
||||||
|
+ " `customVariables`: struct<\n"
|
||||||
|
+ " `1`: struct<\n"
|
||||||
|
+ " `customVariablePageValue1`: STRING\n"
|
||||||
|
+ " >\n"
|
||||||
|
+ " >,\n"
|
||||||
|
+ " timestamp: String\n"
|
||||||
|
+ " >\n"
|
||||||
|
+ " >\n"
|
||||||
|
+ ")\n"
|
||||||
|
+ "ROW FORMAT SERDE 'org.apache.hive.hcatalog.data.JsonSerDe'\n"
|
||||||
|
+ "LOCATION '" + ExecuteWorkflow.repoLogPath + "'\n"
|
||||||
|
+ "TBLPROPERTIES (\"transactional\"=\"false\")";
|
||||||
|
stmt.executeUpdate(create_piwiklogtmp_json);
|
||||||
|
logger.info("Created piwiklogtmp_json");
|
||||||
|
|
||||||
|
logger.info("Dropping piwiklogtmp table");
|
||||||
|
String drop_piwiklogtmp = "DROP TABLE IF EXISTS "
|
||||||
|
+ ConnectDB.getUsageStatsDBSchema()
|
||||||
|
+ ".piwiklogtmp";
|
||||||
|
stmt.executeUpdate(drop_piwiklogtmp);
|
||||||
|
logger.info("Dropped piwiklogtmp");
|
||||||
|
|
||||||
|
logger.info("Creating piwiklogtmp");
|
||||||
|
String create_piwiklogtmp = "CREATE TABLE "
|
||||||
|
+ ConnectDB.getUsageStatsDBSchema()
|
||||||
|
+ ".piwiklogtmp (source BIGINT, id_Visit STRING, country STRING, action STRING, url STRING, "
|
||||||
|
+ "entity_id STRING, source_item_type STRING, timestamp STRING, referrer_name STRING, agent STRING) "
|
||||||
|
+ "clustered by (source) into 100 buckets stored as orc tblproperties('transactional'='true')";
|
||||||
|
stmt.executeUpdate(create_piwiklogtmp);
|
||||||
|
logger.info("Created piwiklogtmp");
|
||||||
|
|
||||||
|
logger.info("Inserting into piwiklogtmp");
|
||||||
|
String insert_piwiklogtmp = "INSERT INTO " + ConnectDB.getUsageStatsDBSchema() + ".piwiklogtmp "
|
||||||
|
+ "SELECT DISTINCT cast(idSite as BIGINT) as source, idVisit as id_Visit, country, "
|
||||||
|
+ "actiondetail.type as action, actiondetail.url as url, "
|
||||||
|
+ "actiondetail.customVariables.`1`.`customVariablePageValue1` as entity_id, "
|
||||||
|
+ "'repItem' as source_item_type, from_unixtime(cast(actiondetail.timestamp as BIGINT)) as timestamp, "
|
||||||
|
+ "referrerName as referrer_name, browser as agent\n"
|
||||||
|
+ "FROM " + ConnectDB.getUsageStatsDBSchema() + ".piwiklogtmp_json\n"
|
||||||
|
+ "LATERAL VIEW explode(actiondetails) actiondetailsTable AS actiondetail";
|
||||||
|
stmt.executeUpdate(insert_piwiklogtmp);
|
||||||
|
logger.info("Inserted into piwiklogtmp");
|
||||||
|
|
||||||
|
stmt.close();
|
||||||
|
}
|
||||||
|
|
||||||
|
public void removeDoubleClicks() throws Exception {
|
||||||
|
Statement stmt = ConnectDB.getHiveConnection().createStatement();
|
||||||
|
ConnectDB.getHiveConnection().setAutoCommit(false);
|
||||||
|
|
||||||
|
logger.info("Cleaning download double clicks");
|
||||||
|
// clean download double clicks
|
||||||
|
String sql = "DELETE from " + ConnectDB.getUsageStatsDBSchema() + ".piwiklogtmp "
|
||||||
|
+ "WHERE EXISTS (\n"
|
||||||
|
+ "SELECT DISTINCT p1.source, p1.id_visit, p1.action, p1.entity_id, p1.timestamp \n"
|
||||||
|
+ "FROM " + ConnectDB.getUsageStatsDBSchema() + ".piwiklogtmp p1, "
|
||||||
|
+ ConnectDB.getUsageStatsDBSchema() + ".piwiklogtmp p2\n"
|
||||||
|
+ "WHERE p1.source=p2.source AND p1.id_visit=p2.id_visit AND p1.entity_id=p2.entity_id \n"
|
||||||
|
+ "AND p1.action=p2.action AND p1.action='download' AND p1.timestamp!=p2.timestamp \n"
|
||||||
|
+ "AND p1.timestamp<p2.timestamp AND ((unix_timestamp(p2.timestamp)-unix_timestamp(p1.timestamp))/60)<30 \n"
|
||||||
|
+ "AND piwiklogtmp.source=p1.source AND piwiklogtmp.id_visit=p1.id_visit \n"
|
||||||
|
+ "AND piwiklogtmp.action=p1.action AND piwiklogtmp.entity_id=p1.entity_id AND piwiklogtmp.timestamp=p1.timestamp)";
|
||||||
|
stmt.executeUpdate(sql);
|
||||||
|
logger.info("Cleaned download double clicks");
|
||||||
|
|
||||||
|
// clean view double clicks
|
||||||
|
logger.info("Cleaning action double clicks");
|
||||||
|
sql = "DELETE from " + ConnectDB.getUsageStatsDBSchema() + ".piwiklogtmp "
|
||||||
|
+ "WHERE EXISTS (\n"
|
||||||
|
+ "SELECT DISTINCT p1.source, p1.id_visit, p1.action, p1.entity_id, p1.timestamp \n"
|
||||||
|
+ "FROM " + ConnectDB.getUsageStatsDBSchema() + ".piwiklogtmp p1, "
|
||||||
|
+ ConnectDB.getUsageStatsDBSchema() + ".piwiklogtmp p2\n"
|
||||||
|
+ "WHERE p1.source=p2.source AND p1.id_visit=p2.id_visit AND p1.entity_id=p2.entity_id \n"
|
||||||
|
+ "AND p1.action=p2.action AND p1.action='action' AND p1.timestamp!=p2.timestamp \n"
|
||||||
|
+ "AND p1.timestamp<p2.timestamp AND (unix_timestamp(p2.timestamp)-unix_timestamp(p1.timestamp))<10 \n"
|
||||||
|
+ "AND piwiklogtmp.source=p1.source AND piwiklogtmp.id_visit=p1.id_visit \n"
|
||||||
|
+ "AND piwiklogtmp.action=p1.action AND piwiklogtmp.entity_id=p1.entity_id AND piwiklogtmp.timestamp=p1.timestamp)";
|
||||||
|
stmt.executeUpdate(sql);
|
||||||
|
logger.info("Cleaned action double clicks");
|
||||||
|
stmt.close();
|
||||||
|
}
|
||||||
|
|
||||||
|
public void processPortalLog() throws Exception {
|
||||||
|
Statement stmt = ConnectDB.getHiveConnection().createStatement();
|
||||||
|
ConnectDB.getHiveConnection().setAutoCommit(false);
|
||||||
|
|
||||||
|
logger.info("Adding JSON Serde jar");
|
||||||
|
stmt.executeUpdate("add jar /usr/share/cmf/common_jars/hive-hcatalog-core-1.1.0-cdh5.14.0.jar");
|
||||||
|
logger.info("Added JSON Serde jar");
|
||||||
|
|
||||||
|
logger.info("Dropping process_portal_log_tmp_json table");
|
||||||
|
String drop_process_portal_log_tmp_json = "DROP TABLE IF EXISTS "
|
||||||
|
+ ConnectDB.getUsageStatsDBSchema()
|
||||||
|
+ ".process_portal_log_tmp_json";
|
||||||
|
stmt.executeUpdate(drop_process_portal_log_tmp_json);
|
||||||
|
logger.info("Dropped process_portal_log_tmp_json table");
|
||||||
|
|
||||||
|
logger.info("Creating process_portal_log_tmp_json");
|
||||||
|
String create_process_portal_log_tmp_json = "CREATE EXTERNAL TABLE IF NOT EXISTS "
|
||||||
|
+ ConnectDB.getUsageStatsDBSchema() + ".process_portal_log_tmp_json("
|
||||||
|
+ " `idSite` STRING,\n"
|
||||||
|
+ " `idVisit` STRING,\n"
|
||||||
|
+ " `country` STRING,\n"
|
||||||
|
+ " `referrerName` STRING,\n"
|
||||||
|
+ " `browser` STRING,\n"
|
||||||
|
+ " `actionDetails` ARRAY<\n"
|
||||||
|
+ " struct<\n"
|
||||||
|
+ " type: STRING,\n"
|
||||||
|
+ " url: STRING,\n"
|
||||||
|
+ " timestamp: String\n"
|
||||||
|
+ " >\n"
|
||||||
|
+ " >\n"
|
||||||
|
+ ")\n"
|
||||||
|
+ "ROW FORMAT SERDE 'org.apache.hive.hcatalog.data.JsonSerDe'\n"
|
||||||
|
+ "LOCATION '" + ExecuteWorkflow.portalLogPath + "'\n"
|
||||||
|
+ "TBLPROPERTIES (\"transactional\"=\"false\")";
|
||||||
|
stmt.executeUpdate(create_process_portal_log_tmp_json);
|
||||||
|
logger.info("Created process_portal_log_tmp_json");
|
||||||
|
|
||||||
|
logger.info("Droping process_portal_log_tmp table");
|
||||||
|
String drop_process_portal_log_tmp = "DROP TABLE IF EXISTS "
|
||||||
|
+ ConnectDB.getUsageStatsDBSchema()
|
||||||
|
+ ".process_portal_log_tmp";
|
||||||
|
stmt.executeUpdate(drop_process_portal_log_tmp);
|
||||||
|
logger.info("Dropped process_portal_log_tmp");
|
||||||
|
|
||||||
|
logger.info("Creating process_portal_log_tmp");
|
||||||
|
String create_process_portal_log_tmp = "CREATE TABLE "
|
||||||
|
+ ConnectDB.getUsageStatsDBSchema()
|
||||||
|
+ ".process_portal_log_tmp (source BIGINT, id_visit STRING, country STRING, action STRING, url STRING, "
|
||||||
|
+ "entity_id STRING, source_item_type STRING, timestamp STRING, referrer_name STRING, agent STRING) "
|
||||||
|
+ "clustered by (source, id_visit, timestamp) into 100 buckets stored as orc tblproperties('transactional'='true')";
|
||||||
|
stmt.executeUpdate(create_process_portal_log_tmp);
|
||||||
|
logger.info("Created process_portal_log_tmp");
|
||||||
|
|
||||||
|
logger.info("Inserting into process_portal_log_tmp");
|
||||||
|
String insert_process_portal_log_tmp = "INSERT INTO " + ConnectDB.getUsageStatsDBSchema()
|
||||||
|
+ ".process_portal_log_tmp "
|
||||||
|
+ "SELECT DISTINCT cast(idSite as BIGINT) as source, idVisit as id_Visit, country, actiondetail.type as action, "
|
||||||
|
+ "actiondetail.url as url, "
|
||||||
|
+ "CASE\n"
|
||||||
|
+ " WHEN (actiondetail.url like '%datasourceId=%') THEN split(actiondetail.url,'datasourceId=')[1] "
|
||||||
|
+ " WHEN (actiondetail.url like '%datasource=%') THEN split(actiondetail.url,'datasource=')[1] "
|
||||||
|
+ " WHEN (actiondetail.url like '%datasourceFilter=%') THEN split(actiondetail.url,'datasourceFilter=')[1] "
|
||||||
|
+ " WHEN (actiondetail.url like '%articleId=%') THEN split(actiondetail.url,'articleId=')[1] "
|
||||||
|
+ " WHEN (actiondetail.url like '%datasetId=%') THEN split(actiondetail.url,'datasetId=')[1] "
|
||||||
|
+ " WHEN (actiondetail.url like '%projectId=%') THEN split(actiondetail.url,'projectId=')[1] "
|
||||||
|
+ " WHEN (actiondetail.url like '%organizationId=%') THEN split(actiondetail.url,'organizationId=')[1] "
|
||||||
|
+ " ELSE '' "
|
||||||
|
+ "END AS entity_id, "
|
||||||
|
+ "CASE "
|
||||||
|
+ " WHEN (actiondetail.url like '%datasourceId=%') THEN 'datasource' "
|
||||||
|
+ " WHEN (actiondetail.url like '%datasource=%') THEN 'datasource' "
|
||||||
|
+ " WHEN (actiondetail.url like '%datasourceFilter=%') THEN 'datasource' "
|
||||||
|
+ " WHEN (actiondetail.url like '%articleId=%') THEN 'result' "
|
||||||
|
+ " WHEN (actiondetail.url like '%datasetId=%') THEN 'result' "
|
||||||
|
+ " WHEN (actiondetail.url like '%projectId=%') THEN 'project' "
|
||||||
|
+ " WHEN (actiondetail.url like '%organizationId=%') THEN 'organization' "
|
||||||
|
+ " ELSE '' "
|
||||||
|
+ "END AS source_item_type, "
|
||||||
|
+ "from_unixtime(cast(actiondetail.timestamp as BIGINT)) as timestamp, referrerName as referrer_name, "
|
||||||
|
+ "browser as agent "
|
||||||
|
+ "FROM " + ConnectDB.getUsageStatsDBSchema() + ".process_portal_log_tmp_json "
|
||||||
|
+ "LATERAL VIEW explode(actiondetails) actiondetailsTable AS actiondetail";
|
||||||
|
stmt.executeUpdate(insert_process_portal_log_tmp);
|
||||||
|
logger.info("Inserted into process_portal_log_tmp");
|
||||||
|
|
||||||
|
stmt.close();
|
||||||
|
}
|
||||||
|
|
||||||
|
public void portalLogs() throws SQLException {
|
||||||
|
Connection con = ConnectDB.getHiveConnection();
|
||||||
|
Statement stmt = con.createStatement();
|
||||||
|
con.setAutoCommit(false);
|
||||||
|
|
||||||
|
logger.info("PortalStats - Step 1");
|
||||||
|
String sql = "INSERT INTO " + ConnectDB.getUsageStatsDBSchema() + ".piwiklogtmp "
|
||||||
|
+ "SELECT DISTINCT source, id_visit, country, action, url, entity_id, 'oaItem', `timestamp`, referrer_name, agent "
|
||||||
|
+ "FROM " + ConnectDB.getUsageStatsDBSchema() + ".process_portal_log_tmp "
|
||||||
|
+ "WHERE process_portal_log_tmp.entity_id IS NOT NULL AND process_portal_log_tmp.entity_id "
|
||||||
|
+ "IN (SELECT roid.id FROM " + ConnectDB.getStatsDBSchema()
|
||||||
|
+ ".result_oids roid WHERE roid.id IS NOT NULL)";
|
||||||
|
stmt.executeUpdate(sql);
|
||||||
|
stmt.close();
|
||||||
|
|
||||||
|
logger.info("PortalStats - Step 2");
|
||||||
|
stmt = con.createStatement();
|
||||||
|
sql = "INSERT INTO " + ConnectDB.getUsageStatsDBSchema() + ".piwiklogtmp "
|
||||||
|
+ "SELECT DISTINCT source, id_visit, country, action, url, entity_id, 'datasource', `timestamp`, referrer_name, agent "
|
||||||
|
+ "FROM " + ConnectDB.getUsageStatsDBSchema() + ".process_portal_log_tmp "
|
||||||
|
+ "WHERE process_portal_log_tmp.entity_id IS NOT NULL AND process_portal_log_tmp.entity_id "
|
||||||
|
+ "IN (SELECT roid.id FROM " + ConnectDB.getStatsDBSchema()
|
||||||
|
+ ".datasource_oids roid WHERE roid.id IS NOT NULL)";
|
||||||
|
stmt.executeUpdate(sql);
|
||||||
|
stmt.close();
|
||||||
|
|
||||||
|
/*
|
||||||
|
* logger.info("PortalStats - Step 3"); stmt = con.createStatement(); sql = "INSERT INTO " +
|
||||||
|
* ConnectDB.getUsageStatsDBSchema() + ".piwiklogtmp " +
|
||||||
|
* "SELECT DISTINCT source, id_visit, country, action, url, entity_id, 'organization', `timestamp`, referrer_name, agent "
|
||||||
|
* + "FROM " + ConnectDB.getUsageStatsDBSchema() + ".process_portal_log_tmp " +
|
||||||
|
* "WHERE process_portal_log_tmp.entity_id IS NOT NULL AND process_portal_log_tmp.entity_id " +
|
||||||
|
* "IN (SELECT roid.id FROM " + ConnectDB.getStatsDBSchema() +
|
||||||
|
* ".organization_oids roid WHERE roid.id IS NOT NULL)"; // stmt.executeUpdate(sql); stmt.close();
|
||||||
|
*/
|
||||||
|
logger.info("PortalStats - Step 3");
|
||||||
|
stmt = con.createStatement();
|
||||||
|
sql = "INSERT INTO " + ConnectDB.getUsageStatsDBSchema() + ".piwiklogtmp "
|
||||||
|
+ "SELECT DISTINCT source, id_visit, country, action, url, entity_id, 'project', `timestamp`, referrer_name, agent "
|
||||||
|
+ "FROM " + ConnectDB.getUsageStatsDBSchema() + ".process_portal_log_tmp "
|
||||||
|
+ "WHERE process_portal_log_tmp.entity_id IS NOT NULL AND process_portal_log_tmp.entity_id "
|
||||||
|
+ "IN (SELECT roid.id FROM " + ConnectDB.getStatsDBSchema()
|
||||||
|
+ ".project_oids roid WHERE roid.id IS NOT NULL)";
|
||||||
|
stmt.executeUpdate(sql);
|
||||||
|
stmt.close();
|
||||||
|
|
||||||
|
con.close();
|
||||||
|
}
|
||||||
|
|
||||||
|
private void cleanOAI() throws Exception {
|
||||||
|
ConnectDB.getHiveConnection().setAutoCommit(false);
|
||||||
|
|
||||||
|
logger.info("Cleaning oai - Step 1");
|
||||||
|
stmt = ConnectDB.getHiveConnection().createStatement();
|
||||||
|
String sql = "UPDATE " + ConnectDB.getUsageStatsDBSchema() + ".piwiklogtmp "
|
||||||
|
+ "SET entity_id = regexp_replace(entity_id, '^oai:repositorio.chlc.min-saude.pt/',"
|
||||||
|
+ "'oai:repositorio.chlc.min-saude.pt:') WHERE entity_id LIKE 'oai:repositorio.chlc.min-saude.pt/%'";
|
||||||
|
stmt.executeUpdate(sql);
|
||||||
|
stmt.close();
|
||||||
|
|
||||||
|
logger.info("Cleaning oai - Step 2");
|
||||||
|
stmt = ConnectDB.getHiveConnection().createStatement();
|
||||||
|
sql = "UPDATE " + ConnectDB.getUsageStatsDBSchema() + ".piwiklogtmp "
|
||||||
|
+ "SET entity_id = regexp_replace(entity_id, '^oai:repositorio.hospitaldebraga.pt/',"
|
||||||
|
+ "'oai:repositorio.hospitaldebraga.pt:') WHERE entity_id LIKE 'oai:repositorio.hospitaldebraga.pt/%'";
|
||||||
|
stmt.executeUpdate(sql);
|
||||||
|
stmt.close();
|
||||||
|
|
||||||
|
logger.info("Cleaning oai - Step 3");
|
||||||
|
stmt = ConnectDB.getHiveConnection().createStatement();
|
||||||
|
sql = "UPDATE " + ConnectDB.getUsageStatsDBSchema() + ".piwiklogtmp "
|
||||||
|
+ "SET entity_id = regexp_replace(entity_id, '^oai:repositorio.ipl.pt/',"
|
||||||
|
+ "'oai:repositorio.ipl.pt:') WHERE entity_id LIKE 'oai:repositorio.ipl.pt/%'";
|
||||||
|
stmt.executeUpdate(sql);
|
||||||
|
stmt.close();
|
||||||
|
|
||||||
|
logger.info("Cleaning oai - Step 4");
|
||||||
|
stmt = ConnectDB.getHiveConnection().createStatement();
|
||||||
|
sql = "UPDATE " + ConnectDB.getUsageStatsDBSchema() + ".piwiklogtmp "
|
||||||
|
+ "SET entity_id = regexp_replace(entity_id, '^oai:bibliotecadigital.ipb.pt/',"
|
||||||
|
+ "'oai:bibliotecadigital.ipb.pt:') WHERE entity_id LIKE 'oai:bibliotecadigital.ipb.pt/%'";
|
||||||
|
stmt.executeUpdate(sql);
|
||||||
|
stmt.close();
|
||||||
|
|
||||||
|
logger.info("Cleaning oai - Step 5");
|
||||||
|
stmt = ConnectDB.getHiveConnection().createStatement();
|
||||||
|
sql = "UPDATE " + ConnectDB.getUsageStatsDBSchema() + ".piwiklogtmp "
|
||||||
|
+ "SET entity_id = regexp_replace(entity_id, '^oai:repositorio.ismai.pt/',"
|
||||||
|
+ "'oai:repositorio.ismai.pt:') WHERE entity_id LIKE 'oai:repositorio.ismai.pt/%'";
|
||||||
|
stmt.executeUpdate(sql);
|
||||||
|
stmt.close();
|
||||||
|
|
||||||
|
logger.info("Cleaning oai - Step 6");
|
||||||
|
stmt = ConnectDB.getHiveConnection().createStatement();
|
||||||
|
sql = "UPDATE " + ConnectDB.getUsageStatsDBSchema() + ".piwiklogtmp "
|
||||||
|
+ "SET entity_id = regexp_replace(entity_id, '^oai:repositorioaberto.uab.pt/',"
|
||||||
|
+ "'oai:repositorioaberto.uab.pt:') WHERE entity_id LIKE 'oai:repositorioaberto.uab.pt/%'";
|
||||||
|
stmt.executeUpdate(sql);
|
||||||
|
stmt.close();
|
||||||
|
|
||||||
|
logger.info("Cleaning oai - Step 7");
|
||||||
|
stmt = ConnectDB.getHiveConnection().createStatement();
|
||||||
|
sql = "UPDATE " + ConnectDB.getUsageStatsDBSchema() + ".piwiklogtmp "
|
||||||
|
+ "SET entity_id = regexp_replace(entity_id, '^oai:repositorio.uac.pt/',"
|
||||||
|
+ "'oai:repositorio.uac.pt:') WHERE entity_id LIKE 'oai:repositorio.uac.pt/%'";
|
||||||
|
stmt.executeUpdate(sql);
|
||||||
|
stmt.close();
|
||||||
|
|
||||||
|
logger.info("Cleaning oai - Step 8");
|
||||||
|
stmt = ConnectDB.getHiveConnection().createStatement();
|
||||||
|
sql = "UPDATE " + ConnectDB.getUsageStatsDBSchema() + ".piwiklogtmp "
|
||||||
|
+ "SET entity_id = regexp_replace(entity_id, '^oai:repositorio.insa.pt/',"
|
||||||
|
+ "'oai:repositorio.insa.pt:') WHERE entity_id LIKE 'oai:repositorio.insa.pt/%'";
|
||||||
|
stmt.executeUpdate(sql);
|
||||||
|
stmt.close();
|
||||||
|
|
||||||
|
logger.info("Cleaning oai - Step 9");
|
||||||
|
stmt = ConnectDB.getHiveConnection().createStatement();
|
||||||
|
sql = "UPDATE " + ConnectDB.getUsageStatsDBSchema() + ".piwiklogtmp "
|
||||||
|
+ "SET entity_id = regexp_replace(entity_id, '^oai:repositorio.ipcb.pt/',"
|
||||||
|
+ "'oai:repositorio.ipcb.pt:') WHERE entity_id LIKE 'oai:repositorio.ipcb.pt/%'";
|
||||||
|
stmt.executeUpdate(sql);
|
||||||
|
stmt.close();
|
||||||
|
|
||||||
|
logger.info("Cleaning oai - Step 10");
|
||||||
|
stmt = ConnectDB.getHiveConnection().createStatement();
|
||||||
|
sql = "UPDATE " + ConnectDB.getUsageStatsDBSchema() + ".piwiklogtmp "
|
||||||
|
+ "SET entity_id = regexp_replace(entity_id, '^oai:repositorio.ispa.pt/',"
|
||||||
|
+ "'oai:repositorio.ispa.pt:') WHERE entity_id LIKE 'oai:repositorio.ispa.pt/%'";
|
||||||
|
stmt.executeUpdate(sql);
|
||||||
|
stmt.close();
|
||||||
|
|
||||||
|
logger.info("Cleaning oai - Step 11");
|
||||||
|
stmt = ConnectDB.getHiveConnection().createStatement();
|
||||||
|
sql = "UPDATE " + ConnectDB.getUsageStatsDBSchema() + ".piwiklogtmp "
|
||||||
|
+ "SET entity_id = regexp_replace(entity_id, '^oai:repositorio.chporto.pt/',"
|
||||||
|
+ "'oai:repositorio.chporto.pt:') WHERE entity_id LIKE 'oai:repositorio.chporto.pt/%'";
|
||||||
|
stmt.executeUpdate(sql);
|
||||||
|
stmt.close();
|
||||||
|
|
||||||
|
logger.info("Cleaning oai - Step 12");
|
||||||
|
stmt = ConnectDB.getHiveConnection().createStatement();
|
||||||
|
sql = "UPDATE " + ConnectDB.getUsageStatsDBSchema() + ".piwiklogtmp "
|
||||||
|
+ "SET entity_id = regexp_replace(entity_id, '^oai:repositorio.ucp.pt/',"
|
||||||
|
+ "'oai:repositorio.ucp.pt:') WHERE entity_id LIKE 'oai:repositorio.ucp.pt/%'";
|
||||||
|
stmt.executeUpdate(sql);
|
||||||
|
stmt.close();
|
||||||
|
|
||||||
|
logger.info("Cleaning oai - Step 13");
|
||||||
|
stmt = ConnectDB.getHiveConnection().createStatement();
|
||||||
|
sql = "UPDATE " + ConnectDB.getUsageStatsDBSchema() + ".piwiklogtmp "
|
||||||
|
+ "SET entity_id = regexp_replace(entity_id, '^oai:rihuc.huc.min-saude.pt/',"
|
||||||
|
+ "'oai:rihuc.huc.min-saude.pt:') WHERE entity_id LIKE 'oai:rihuc.huc.min-saude.pt/%'";
|
||||||
|
stmt.executeUpdate(sql);
|
||||||
|
stmt.close();
|
||||||
|
|
||||||
|
logger.info("Cleaning oai - Step 14");
|
||||||
|
stmt = ConnectDB.getHiveConnection().createStatement();
|
||||||
|
sql = "UPDATE " + ConnectDB.getUsageStatsDBSchema() + ".piwiklogtmp "
|
||||||
|
+ "SET entity_id = regexp_replace(entity_id, '^oai:repositorio.ipv.pt/',"
|
||||||
|
+ "'oai:repositorio.ipv.pt:') WHERE entity_id LIKE 'oai:repositorio.ipv.pt/%'";
|
||||||
|
stmt.executeUpdate(sql);
|
||||||
|
stmt.close();
|
||||||
|
|
||||||
|
logger.info("Cleaning oai - Step 15");
|
||||||
|
stmt = ConnectDB.getHiveConnection().createStatement();
|
||||||
|
sql = "UPDATE " + ConnectDB.getUsageStatsDBSchema() + ".piwiklogtmp "
|
||||||
|
+ "SET entity_id = regexp_replace(entity_id, '^oai:www.repository.utl.pt/',"
|
||||||
|
+ "'oai:www.repository.utl.pt:') WHERE entity_id LIKE 'oai:www.repository.utl.pt/%'";
|
||||||
|
stmt.executeUpdate(sql);
|
||||||
|
stmt.close();
|
||||||
|
|
||||||
|
logger.info("Cleaning oai - Step 16");
|
||||||
|
stmt = ConnectDB.getHiveConnection().createStatement();
|
||||||
|
sql = "UPDATE " + ConnectDB.getUsageStatsDBSchema() + ".piwiklogtmp "
|
||||||
|
+ "SET entity_id = regexp_replace(entity_id, '^oai:run.unl.pt/',"
|
||||||
|
+ "'oai:run.unl.pt:') WHERE entity_id LIKE 'oai:run.unl.pt/%'";
|
||||||
|
stmt.executeUpdate(sql);
|
||||||
|
stmt.close();
|
||||||
|
|
||||||
|
logger.info("Cleaning oai - Step 17");
|
||||||
|
stmt = ConnectDB.getHiveConnection().createStatement();
|
||||||
|
sql = "UPDATE " + ConnectDB.getUsageStatsDBSchema() + ".piwiklogtmp "
|
||||||
|
+ "SET entity_id = regexp_replace(entity_id, '^oai:sapientia.ualg.pt/',"
|
||||||
|
+ "'oai:sapientia.ualg.pt:') WHERE entity_id LIKE 'oai:sapientia.ualg.pt/%'";
|
||||||
|
stmt.executeUpdate(sql);
|
||||||
|
stmt.close();
|
||||||
|
|
||||||
|
logger.info("Cleaning oai - Step 18");
|
||||||
|
stmt = ConnectDB.getHiveConnection().createStatement();
|
||||||
|
sql = "UPDATE " + ConnectDB.getUsageStatsDBSchema() + ".piwiklogtmp "
|
||||||
|
+ "SET entity_id = regexp_replace(entity_id, '^oai:repositorio.ipsantarem.pt/',"
|
||||||
|
+ "'oai:repositorio.ipsantarem.pt:') WHERE entity_id LIKE 'oai:repositorio.ipsantarem.pt/%'";
|
||||||
|
stmt.executeUpdate(sql);
|
||||||
|
stmt.close();
|
||||||
|
|
||||||
|
logger.info("Cleaning oai - Step 19");
|
||||||
|
stmt = ConnectDB.getHiveConnection().createStatement();
|
||||||
|
sql = "UPDATE " + ConnectDB.getUsageStatsDBSchema() + ".piwiklogtmp "
|
||||||
|
+ "SET entity_id = regexp_replace(entity_id, '^oai:arca.igc.gulbenkian.pt/',"
|
||||||
|
+ "'oai:arca.igc.gulbenkian.pt:') WHERE entity_id LIKE 'oai:arca.igc.gulbenkian.pt/%'";
|
||||||
|
stmt.executeUpdate(sql);
|
||||||
|
stmt.close();
|
||||||
|
|
||||||
|
logger.info("Cleaning oai - Step 20");
|
||||||
|
stmt = ConnectDB.getHiveConnection().createStatement();
|
||||||
|
sql = "UPDATE " + ConnectDB.getUsageStatsDBSchema() + ".piwiklogtmp "
|
||||||
|
+ "SET entity_id = regexp_replace(entity_id, '^oai:ubibliorum.ubi.pt/',"
|
||||||
|
+ "'oai:ubibliorum.ubi.pt:') WHERE entity_id LIKE 'oai:ubibliorum.ubi.pt/%'";
|
||||||
|
stmt.executeUpdate(sql);
|
||||||
|
stmt.close();
|
||||||
|
|
||||||
|
logger.info("Cleaning oai - Step 21");
|
||||||
|
stmt = ConnectDB.getHiveConnection().createStatement();
|
||||||
|
sql = "UPDATE " + ConnectDB.getUsageStatsDBSchema() + ".piwiklogtmp "
|
||||||
|
+ "SET entity_id = regexp_replace(entity_id, '^oai:digituma.uma.pt/',"
|
||||||
|
+ "'oai:digituma.uma.pt:') WHERE entity_id LIKE 'oai:digituma.uma.pt/%'";
|
||||||
|
stmt.executeUpdate(sql);
|
||||||
|
stmt.close();
|
||||||
|
|
||||||
|
logger.info("Cleaning oai - Step 22");
|
||||||
|
stmt = ConnectDB.getHiveConnection().createStatement();
|
||||||
|
sql = "UPDATE " + ConnectDB.getUsageStatsDBSchema() + ".piwiklogtmp "
|
||||||
|
+ "SET entity_id = regexp_replace(entity_id, '^oai:repositorio.ul.pt/',"
|
||||||
|
+ "'oai:repositorio.ul.pt:') WHERE entity_id LIKE 'oai:repositorio.ul.pt/%'";
|
||||||
|
stmt.executeUpdate(sql);
|
||||||
|
stmt.close();
|
||||||
|
|
||||||
|
logger.info("Cleaning oai - Step 23");
|
||||||
|
stmt = ConnectDB.getHiveConnection().createStatement();
|
||||||
|
sql = "UPDATE " + ConnectDB.getUsageStatsDBSchema() + ".piwiklogtmp "
|
||||||
|
+ "SET entity_id = regexp_replace(entity_id, '^oai:repositorio.hff.min-saude.pt/',"
|
||||||
|
+ "'oai:repositorio.hff.min-saude.pt:') WHERE entity_id LIKE 'oai:repositorio.hff.min-saude.pt/%'";
|
||||||
|
stmt.executeUpdate(sql);
|
||||||
|
stmt.close();
|
||||||
|
|
||||||
|
logger.info("Cleaning oai - Step 24");
|
||||||
|
stmt = ConnectDB.getHiveConnection().createStatement();
|
||||||
|
sql = "UPDATE " + ConnectDB.getUsageStatsDBSchema() + ".piwiklogtmp "
|
||||||
|
+ "SET entity_id = regexp_replace(entity_id, '^oai:repositorium.sdum.uminho.pt/',"
|
||||||
|
+ "'oai:repositorium.sdum.uminho.pt:') WHERE entity_id LIKE 'oai:repositorium.sdum.uminho.pt/%'";
|
||||||
|
stmt.executeUpdate(sql);
|
||||||
|
stmt.close();
|
||||||
|
|
||||||
|
logger.info("Cleaning oai - Step 25");
|
||||||
|
stmt = ConnectDB.getHiveConnection().createStatement();
|
||||||
|
sql = "UPDATE " + ConnectDB.getUsageStatsDBSchema() + ".piwiklogtmp "
|
||||||
|
+ "SET entity_id = regexp_replace(entity_id, '^oai:recipp.ipp.pt/',"
|
||||||
|
+ "'oai:recipp.ipp.pt:') WHERE entity_id LIKE 'oai:recipp.ipp.pt/%'";
|
||||||
|
stmt.executeUpdate(sql);
|
||||||
|
stmt.close();
|
||||||
|
|
||||||
|
logger.info("Cleaning oai - Step 26");
|
||||||
|
stmt = ConnectDB.getHiveConnection().createStatement();
|
||||||
|
sql = "UPDATE " + ConnectDB.getUsageStatsDBSchema() + ".piwiklogtmp "
|
||||||
|
+ "SET entity_id = regexp_replace(entity_id, '^oai:bdigital.ufp.pt/',"
|
||||||
|
+ "'oai:bdigital.ufp.pt:') WHERE entity_id LIKE 'oai:bdigital.ufp.pt/%'";
|
||||||
|
stmt.executeUpdate(sql);
|
||||||
|
stmt.close();
|
||||||
|
|
||||||
|
logger.info("Cleaning oai - Step 27");
|
||||||
|
stmt = ConnectDB.getHiveConnection().createStatement();
|
||||||
|
sql = "UPDATE " + ConnectDB.getUsageStatsDBSchema() + ".piwiklogtmp "
|
||||||
|
+ "SET entity_id = regexp_replace(entity_id, '^oai:repositorio.lneg.pt/',"
|
||||||
|
+ "'oai:repositorio.lneg.pt:') WHERE entity_id LIKE 'oai:repositorio.lneg.pt/%'";
|
||||||
|
stmt.executeUpdate(sql);
|
||||||
|
stmt.close();
|
||||||
|
|
||||||
|
logger.info("Cleaning oai - Step 28");
|
||||||
|
stmt = ConnectDB.getHiveConnection().createStatement();
|
||||||
|
sql = "UPDATE " + ConnectDB.getUsageStatsDBSchema() + ".piwiklogtmp "
|
||||||
|
+ "SET entity_id = regexp_replace(entity_id, '^oai:iconline.ipleiria.pt/',"
|
||||||
|
+ "'oai:iconline.ipleiria.pt:') WHERE entity_id LIKE 'oai:iconline.ipleiria.pt/%'";
|
||||||
|
stmt.executeUpdate(sql);
|
||||||
|
stmt.close();
|
||||||
|
|
||||||
|
logger.info("Cleaning oai - Step 29");
|
||||||
|
stmt = ConnectDB.getHiveConnection().createStatement();
|
||||||
|
sql = "UPDATE " + ConnectDB.getUsageStatsDBSchema() + ".piwiklogtmp "
|
||||||
|
+ "SET entity_id = regexp_replace(entity_id, '^oai:comum.rcaap.pt/',"
|
||||||
|
+ "'oai:comum.rcaap.pt:') WHERE entity_id LIKE 'oai:comum.rcaap.pt/%'";
|
||||||
|
stmt.executeUpdate(sql);
|
||||||
|
stmt.close();
|
||||||
|
|
||||||
|
logger.info("Cleaning oai - Done, closing connection");
|
||||||
|
ConnectDB.getHiveConnection().close();
|
||||||
|
}
|
||||||
|
|
||||||
|
private void updateProdTables() throws SQLException {
|
||||||
|
Statement stmt = ConnectDB.getHiveConnection().createStatement();
|
||||||
|
ConnectDB.getHiveConnection().setAutoCommit(false);
|
||||||
|
|
||||||
|
logger.info("Inserting data to piwiklog");
|
||||||
|
String sql = "INSERT INTO " + ConnectDB.getUsageStatsDBSchema() + ".piwiklog "
|
||||||
|
+ "SELECT * FROM " + ConnectDB.getUsageStatsDBSchema() + ".piwiklogtmp";
|
||||||
|
stmt.executeUpdate(sql);
|
||||||
|
|
||||||
|
logger.info("Dropping piwiklogtmp");
|
||||||
|
sql = "DROP TABLE " + ConnectDB.getUsageStatsDBSchema() + ".piwiklogtmp";
|
||||||
|
stmt.executeUpdate(sql);
|
||||||
|
logger.info("Dropped piwiklogtmp");
|
||||||
|
|
||||||
|
logger.info("Dropping process_portal_log_tmp");
|
||||||
|
sql = "DROP TABLE " + ConnectDB.getUsageStatsDBSchema() + ".process_portal_log_tmp";
|
||||||
|
stmt.executeUpdate(sql);
|
||||||
|
logger.info("Dropped process_portal_log_tmp");
|
||||||
|
|
||||||
|
stmt.close();
|
||||||
|
ConnectDB.getHiveConnection().close();
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
public void finalizeStats() throws SQLException {
|
||||||
|
Statement stmt = ConnectDB.getHiveConnection().createStatement();
|
||||||
|
ConnectDB.getHiveConnection().setAutoCommit(false);
|
||||||
|
|
||||||
|
logger.info("Dropping piwiklogtmp");
|
||||||
|
String sql = "DROP TABLE " + ConnectDB.getUsageStatsDBSchema() + ".piwiklogtmp";
|
||||||
|
stmt.executeUpdate(sql);
|
||||||
|
logger.info("Dropped piwiklogtmp");
|
||||||
|
|
||||||
|
logger.info("Dropping process_portal_log_tmp");
|
||||||
|
sql = "DROP TABLE " + ConnectDB.getUsageStatsDBSchema() + ".process_portal_log_tmp";
|
||||||
|
stmt.executeUpdate(sql);
|
||||||
|
logger.info("Dropped process_portal_log_tmp");
|
||||||
|
|
||||||
|
logger.info("Dropping irus_sushilogtmp");
|
||||||
|
sql = "DROP TABLE " + ConnectDB.getUsageStatsDBSchema() + ".irus_sushilogtmp";
|
||||||
|
stmt.executeUpdate(sql);
|
||||||
|
logger.info("Dropped irus_sushilogtmp");
|
||||||
|
|
||||||
|
logger.info("Dropping irus_sushilogtmp_json");
|
||||||
|
sql = "DROP TABLE " + ConnectDB.getUsageStatsDBSchema() + ".irus_sushilogtmp_json";
|
||||||
|
stmt.executeUpdate(sql);
|
||||||
|
logger.info("Dropped irus_sushilogtmp_json");
|
||||||
|
|
||||||
|
logger.info("Dropping lareferencialogtmp_json");
|
||||||
|
sql = "DROP TABLE " + ConnectDB.getUsageStatsDBSchema() + ".lareferencialogtmp_json";
|
||||||
|
stmt.executeUpdate(sql);
|
||||||
|
logger.info("Dropped lareferencialogtmp_json");
|
||||||
|
|
||||||
|
logger.info("Dropping piwiklogtmp_json");
|
||||||
|
sql = "DROP TABLE " + ConnectDB.getUsageStatsDBSchema() + ".piwiklogtmp_json";
|
||||||
|
stmt.executeUpdate(sql);
|
||||||
|
logger.info("Dropped piwiklogtmp_json");
|
||||||
|
|
||||||
|
logger.info("Dropping process_portal_log_tmp_json");
|
||||||
|
sql = "DROP TABLE " + ConnectDB.getUsageStatsDBSchema() + ".process_portal_log_tmp_json";
|
||||||
|
stmt.executeUpdate(sql);
|
||||||
|
logger.info("Dropped process_portal_log_tmp_json");
|
||||||
|
|
||||||
|
logger.info("Dropping sarc_sushilogtmp");
|
||||||
|
sql = "DROP TABLE " + ConnectDB.getUsageStatsDBSchema() + ".sarc_sushilogtmp";
|
||||||
|
stmt.executeUpdate(sql);
|
||||||
|
logger.info("Dropped sarc_sushilogtmp");
|
||||||
|
|
||||||
|
logger.info("Dropping sarc_sushilogtmp_json_array");
|
||||||
|
sql = "DROP TABLE " + ConnectDB.getUsageStatsDBSchema() + ".sarc_sushilogtmp_json_array";
|
||||||
|
stmt.executeUpdate(sql);
|
||||||
|
logger.info("Dropped sarc_sushilogtmp_json_array");
|
||||||
|
|
||||||
|
logger.info("Dropping sarc_sushilogtmp_json_non_array");
|
||||||
|
sql = "DROP TABLE " + ConnectDB.getUsageStatsDBSchema() + ".sarc_sushilogtmp_json_non_array";
|
||||||
|
stmt.executeUpdate(sql);
|
||||||
|
logger.info("Dropped sarc_sushilogtmp_json_non_array");
|
||||||
|
|
||||||
|
stmt.close();
|
||||||
|
ConnectDB.getHiveConnection().close();
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
private ArrayList<String> listHdfsDir(String dir) throws Exception {
|
||||||
|
|
||||||
|
FileSystem hdfs = FileSystem.get(new Configuration());
|
||||||
|
RemoteIterator<LocatedFileStatus> Files;
|
||||||
|
ArrayList<String> fileNames = new ArrayList<>();
|
||||||
|
|
||||||
|
try {
|
||||||
|
Path exportPath = new Path(hdfs.getUri() + dir);
|
||||||
|
Files = hdfs.listFiles(exportPath, false);
|
||||||
|
while (Files.hasNext()) {
|
||||||
|
String fileName = Files.next().getPath().toString();
|
||||||
|
fileNames.add(fileName);
|
||||||
|
}
|
||||||
|
|
||||||
|
hdfs.close();
|
||||||
|
} catch (Exception e) {
|
||||||
|
logger.error("HDFS file path with exported data does not exist : " + new Path(hdfs.getUri() + logPath));
|
||||||
|
throw new Exception("HDFS file path with exported data does not exist : " + logPath, e);
|
||||||
|
}
|
||||||
|
|
||||||
|
return fileNames;
|
||||||
|
}
|
||||||
|
|
||||||
|
private String readHDFSFile(String filename) throws Exception {
|
||||||
|
String result;
|
||||||
|
try {
|
||||||
|
|
||||||
|
FileSystem fs = FileSystem.get(new Configuration());
|
||||||
|
// log.info("reading file : " + filename);
|
||||||
|
|
||||||
|
BufferedReader br = new BufferedReader(new InputStreamReader(fs.open(new Path(filename))));
|
||||||
|
|
||||||
|
StringBuilder sb = new StringBuilder();
|
||||||
|
String line = br.readLine();
|
||||||
|
|
||||||
|
while (line != null) {
|
||||||
|
if (!line.equals("[]")) {
|
||||||
|
sb.append(line);
|
||||||
|
}
|
||||||
|
// sb.append(line);
|
||||||
|
line = br.readLine();
|
||||||
|
}
|
||||||
|
result = sb.toString().replace("][{\"idSite\"", ",{\"idSite\"");
|
||||||
|
if (result.equals("")) {
|
||||||
|
result = "[]";
|
||||||
|
}
|
||||||
|
|
||||||
|
// fs.close();
|
||||||
|
} catch (Exception e) {
|
||||||
|
logger.error(e.getMessage());
|
||||||
|
throw new Exception(e);
|
||||||
|
}
|
||||||
|
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
private Connection getConnection() throws SQLException {
|
||||||
|
return ConnectDB.getHiveConnection();
|
||||||
|
}
|
||||||
|
|
||||||
|
public void createPedocsOldUsageData() throws SQLException {
|
||||||
|
Statement stmt = ConnectDB.getHiveConnection().createStatement();
|
||||||
|
ConnectDB.getHiveConnection().setAutoCommit(false);
|
||||||
|
|
||||||
|
logger.info("Creating PeDocs Old Views Table");
|
||||||
|
String sql = "Create TABLE IF NOT EXISTS " + ConnectDB.getUsageStatsDBSchema()
|
||||||
|
+ ".pedocsoldviews as select * from default.pedocsviews";
|
||||||
|
stmt.executeUpdate(sql);
|
||||||
|
logger.info("PeDocs Old Views Table created");
|
||||||
|
|
||||||
|
logger.info("Creating PeDocs Old Downloads Table");
|
||||||
|
sql = "Create TABLE IF NOT EXISTS " + ConnectDB.getUsageStatsDBSchema()
|
||||||
|
+ ".pedocsolddownloads as select * from default.pedocsdownloads";
|
||||||
|
stmt.executeUpdate(sql);
|
||||||
|
logger.info("PeDocs Old Downloads Table created");
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,54 @@
|
||||||
|
/*
|
||||||
|
* To change this license header, choose License Headers in Project Properties.
|
||||||
|
* To change this template file, choose Tools | Templates
|
||||||
|
* and open the template in the editor.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package eu.dnetlib.oa.graph.usagerawdata.export;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @author D. Pierrakos, S. Zoupanos
|
||||||
|
*/
|
||||||
|
/**
|
||||||
|
* @author D. Pierrakos, S. Zoupanos
|
||||||
|
*/
|
||||||
|
import java.io.BufferedReader;
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.io.InputStream;
|
||||||
|
import java.io.InputStreamReader;
|
||||||
|
import java.net.MalformedURLException;
|
||||||
|
import java.net.URL;
|
||||||
|
import java.nio.charset.Charset;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
|
||||||
|
import org.json.JSONException;
|
||||||
|
import org.json.simple.JSONArray;
|
||||||
|
import org.json.simple.parser.JSONParser;
|
||||||
|
import org.json.simple.parser.ParseException;
|
||||||
|
|
||||||
|
public class ReadCounterRobotsList {
|
||||||
|
|
||||||
|
private ArrayList robotsPatterns = new ArrayList();
|
||||||
|
private String COUNTER_ROBOTS_URL;
|
||||||
|
|
||||||
|
public ReadCounterRobotsList(String url) throws IOException, JSONException, ParseException {
|
||||||
|
COUNTER_ROBOTS_URL = url;
|
||||||
|
robotsPatterns = readRobotsPartners(COUNTER_ROBOTS_URL);
|
||||||
|
}
|
||||||
|
|
||||||
|
private ArrayList readRobotsPartners(String url) throws MalformedURLException, IOException, ParseException {
|
||||||
|
InputStream is = new URL(url).openStream();
|
||||||
|
JSONParser parser = new JSONParser();
|
||||||
|
BufferedReader reader = new BufferedReader(new InputStreamReader(is, Charset.forName("ISO-8859-1")));
|
||||||
|
JSONArray jsonArray = (JSONArray) parser.parse(reader);
|
||||||
|
for (Object aJsonArray : jsonArray) {
|
||||||
|
org.json.simple.JSONObject jsonObjectRow = (org.json.simple.JSONObject) aJsonArray;
|
||||||
|
robotsPatterns.add(jsonObjectRow.get("pattern").toString().replace("\\", "\\\\"));
|
||||||
|
}
|
||||||
|
return robotsPatterns;
|
||||||
|
}
|
||||||
|
|
||||||
|
public ArrayList getRobotsPatterns() {
|
||||||
|
return robotsPatterns;
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,500 @@
|
||||||
|
|
||||||
|
package eu.dnetlib.oa.graph.usagerawdata.export;
|
||||||
|
|
||||||
|
import java.io.*;
|
||||||
|
// import java.io.BufferedReader;
|
||||||
|
// import java.io.InputStreamReader;
|
||||||
|
import java.net.URL;
|
||||||
|
import java.net.URLConnection;
|
||||||
|
import java.sql.PreparedStatement;
|
||||||
|
import java.sql.ResultSet;
|
||||||
|
import java.sql.SQLException;
|
||||||
|
import java.sql.Statement;
|
||||||
|
import java.text.SimpleDateFormat;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.Calendar;
|
||||||
|
import java.util.Date;
|
||||||
|
import java.util.HashSet;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Set;
|
||||||
|
|
||||||
|
import org.apache.hadoop.conf.Configuration;
|
||||||
|
import org.apache.hadoop.fs.FSDataOutputStream;
|
||||||
|
import org.apache.hadoop.fs.FileSystem;
|
||||||
|
import org.apache.hadoop.fs.Path;
|
||||||
|
import org.json.simple.JSONArray;
|
||||||
|
import org.json.simple.JSONObject;
|
||||||
|
import org.json.simple.parser.JSONParser;
|
||||||
|
import org.json.simple.parser.ParseException;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @author D. Pierrakos, S. Zoupanos
|
||||||
|
*/
|
||||||
|
public class SarcStats {
|
||||||
|
|
||||||
|
private Statement stmtHive = null;
|
||||||
|
private Statement stmtImpala = null;
|
||||||
|
|
||||||
|
private static final Logger logger = LoggerFactory.getLogger(SarcStats.class);
|
||||||
|
|
||||||
|
public SarcStats() throws Exception {
|
||||||
|
// createTables();
|
||||||
|
}
|
||||||
|
|
||||||
|
private void createTables() throws Exception {
|
||||||
|
try {
|
||||||
|
|
||||||
|
stmtHive = ConnectDB.getHiveConnection().createStatement();
|
||||||
|
String sqlCreateTableSushiLog = "CREATE TABLE IF NOT EXISTS sushilog(source TEXT, repository TEXT, rid TEXT, date TEXT, metric_type TEXT, count INT, PRIMARY KEY(source, repository, rid, date, metric_type));";
|
||||||
|
stmtHive.executeUpdate(sqlCreateTableSushiLog);
|
||||||
|
|
||||||
|
// String sqlCopyPublicSushiLog="INSERT INTO sushilog SELECT * FROM public.sushilog;";
|
||||||
|
// stmt.executeUpdate(sqlCopyPublicSushiLog);
|
||||||
|
String sqlcreateRuleSushiLog = "CREATE OR REPLACE RULE ignore_duplicate_inserts AS "
|
||||||
|
+ " ON INSERT TO sushilog "
|
||||||
|
+ " WHERE (EXISTS ( SELECT sushilog.source, sushilog.repository,"
|
||||||
|
+ "sushilog.rid, sushilog.date "
|
||||||
|
+ "FROM sushilog "
|
||||||
|
+ "WHERE sushilog.source = new.source AND sushilog.repository = new.repository AND sushilog.rid = new.rid AND sushilog.date = new.date AND sushilog.metric_type = new.metric_type)) DO INSTEAD NOTHING;";
|
||||||
|
stmtHive.executeUpdate(sqlcreateRuleSushiLog);
|
||||||
|
String createSushiIndex = "create index if not exists sushilog_duplicates on sushilog(source, repository, rid, date, metric_type);";
|
||||||
|
stmtHive.executeUpdate(createSushiIndex);
|
||||||
|
|
||||||
|
stmtHive.close();
|
||||||
|
ConnectDB.getHiveConnection().close();
|
||||||
|
logger.info("Sushi Tables Created");
|
||||||
|
} catch (Exception e) {
|
||||||
|
logger.error("Failed to create tables: " + e);
|
||||||
|
throw new Exception("Failed to create tables: " + e.toString(), e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void reCreateLogDirs() throws IOException {
|
||||||
|
FileSystem dfs = FileSystem.get(new Configuration());
|
||||||
|
|
||||||
|
logger.info("Deleting sarcsReport (Array) directory: " + ExecuteWorkflow.sarcsReportPathArray);
|
||||||
|
dfs.delete(new Path(ExecuteWorkflow.sarcsReportPathArray), true);
|
||||||
|
|
||||||
|
logger.info("Deleting sarcsReport (NonArray) directory: " + ExecuteWorkflow.sarcsReportPathNonArray);
|
||||||
|
dfs.delete(new Path(ExecuteWorkflow.sarcsReportPathNonArray), true);
|
||||||
|
|
||||||
|
logger.info("Creating sarcsReport (Array) directory: " + ExecuteWorkflow.sarcsReportPathArray);
|
||||||
|
dfs.mkdirs(new Path(ExecuteWorkflow.sarcsReportPathArray));
|
||||||
|
|
||||||
|
logger.info("Creating sarcsReport (NonArray) directory: " + ExecuteWorkflow.sarcsReportPathNonArray);
|
||||||
|
dfs.mkdirs(new Path(ExecuteWorkflow.sarcsReportPathNonArray));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void processSarc(String sarcsReportPathArray, String sarcsReportPathNonArray) throws Exception {
|
||||||
|
Statement stmt = ConnectDB.getHiveConnection().createStatement();
|
||||||
|
ConnectDB.getHiveConnection().setAutoCommit(false);
|
||||||
|
|
||||||
|
logger.info("Adding JSON Serde jar");
|
||||||
|
stmt.executeUpdate("add jar /usr/share/cmf/common_jars/hive-hcatalog-core-1.1.0-cdh5.14.0.jar");
|
||||||
|
logger.info("Added JSON Serde jar");
|
||||||
|
|
||||||
|
logger.info("Dropping sarc_sushilogtmp_json_array table");
|
||||||
|
String drop_sarc_sushilogtmp_json_array = "DROP TABLE IF EXISTS "
|
||||||
|
+ ConnectDB.getUsageStatsDBSchema() + ".sarc_sushilogtmp_json_array";
|
||||||
|
stmt.executeUpdate(drop_sarc_sushilogtmp_json_array);
|
||||||
|
logger.info("Dropped sarc_sushilogtmp_json_array table");
|
||||||
|
|
||||||
|
logger.info("Creating sarc_sushilogtmp_json_array table");
|
||||||
|
String create_sarc_sushilogtmp_json_array = "CREATE EXTERNAL TABLE IF NOT EXISTS "
|
||||||
|
+ ConnectDB.getUsageStatsDBSchema() + ".sarc_sushilogtmp_json_array(\n"
|
||||||
|
+ " `ItemIdentifier` ARRAY<\n"
|
||||||
|
+ " struct<\n"
|
||||||
|
+ " `Type`: STRING,\n"
|
||||||
|
+ " `Value`: STRING\n"
|
||||||
|
+ " >\n"
|
||||||
|
+ " >,\n"
|
||||||
|
+ " `ItemPerformance` struct<\n"
|
||||||
|
+ " `Period`: struct<\n"
|
||||||
|
+ " `Begin`: STRING,\n"
|
||||||
|
+ " `End`: STRING\n"
|
||||||
|
+ " >,\n"
|
||||||
|
+ " `Instance`: struct<\n"
|
||||||
|
+ " `Count`: STRING,\n"
|
||||||
|
+ " `MetricType`: STRING\n"
|
||||||
|
+ " >\n"
|
||||||
|
+ " >\n"
|
||||||
|
+ ")"
|
||||||
|
+ "ROW FORMAT SERDE 'org.apache.hive.hcatalog.data.JsonSerDe'\n"
|
||||||
|
+ "LOCATION '" + sarcsReportPathArray + "/'\n"
|
||||||
|
+ "TBLPROPERTIES (\"transactional\"=\"false\")";
|
||||||
|
stmt.executeUpdate(create_sarc_sushilogtmp_json_array);
|
||||||
|
logger.info("Created sarc_sushilogtmp_json_array table");
|
||||||
|
|
||||||
|
logger.info("Dropping sarc_sushilogtmp_json_non_array table");
|
||||||
|
String drop_sarc_sushilogtmp_json_non_array = "DROP TABLE IF EXISTS "
|
||||||
|
+ ConnectDB.getUsageStatsDBSchema()
|
||||||
|
+ ".sarc_sushilogtmp_json_non_array";
|
||||||
|
stmt.executeUpdate(drop_sarc_sushilogtmp_json_non_array);
|
||||||
|
logger.info("Dropped sarc_sushilogtmp_json_non_array table");
|
||||||
|
|
||||||
|
logger.info("Creating sarc_sushilogtmp_json_non_array table");
|
||||||
|
String create_sarc_sushilogtmp_json_non_array = "CREATE EXTERNAL TABLE IF NOT EXISTS "
|
||||||
|
+ ConnectDB.getUsageStatsDBSchema() + ".sarc_sushilogtmp_json_non_array (\n"
|
||||||
|
+ " `ItemIdentifier` struct<\n"
|
||||||
|
+ " `Type`: STRING,\n"
|
||||||
|
+ " `Value`: STRING\n"
|
||||||
|
+ " >,\n"
|
||||||
|
+ " `ItemPerformance` struct<\n"
|
||||||
|
+ " `Period`: struct<\n"
|
||||||
|
+ " `Begin`: STRING,\n"
|
||||||
|
+ " `End`: STRING\n"
|
||||||
|
+ " >,\n"
|
||||||
|
+ " `Instance`: struct<\n"
|
||||||
|
+ " `Count`: STRING,\n"
|
||||||
|
+ " `MetricType`: STRING\n"
|
||||||
|
+ " >\n"
|
||||||
|
+ " >"
|
||||||
|
+ ")"
|
||||||
|
+ "ROW FORMAT SERDE 'org.apache.hive.hcatalog.data.JsonSerDe'\n"
|
||||||
|
+ "LOCATION '" + sarcsReportPathNonArray + "/'\n"
|
||||||
|
+ "TBLPROPERTIES (\"transactional\"=\"false\")";
|
||||||
|
stmt.executeUpdate(create_sarc_sushilogtmp_json_non_array);
|
||||||
|
logger.info("Created sarc_sushilogtmp_json_non_array table");
|
||||||
|
|
||||||
|
logger.info("Creating sarc_sushilogtmp table");
|
||||||
|
String create_sarc_sushilogtmp = "CREATE TABLE IF NOT EXISTS " + ConnectDB.getUsageStatsDBSchema()
|
||||||
|
+ ".sarc_sushilogtmp(source STRING, repository STRING, "
|
||||||
|
+ "rid STRING, date STRING, metric_type STRING, count INT) clustered by (source) into 100 buckets stored as orc "
|
||||||
|
+ "tblproperties('transactional'='true')";
|
||||||
|
stmt.executeUpdate(create_sarc_sushilogtmp);
|
||||||
|
logger.info("Created sarc_sushilogtmp table");
|
||||||
|
|
||||||
|
logger.info("Inserting to sarc_sushilogtmp table (sarc_sushilogtmp_json_array)");
|
||||||
|
String insert_sarc_sushilogtmp = "INSERT INTO " + ConnectDB.getUsageStatsDBSchema() + ".sarc_sushilogtmp "
|
||||||
|
+ "SELECT 'SARC-OJS', split(split(INPUT__FILE__NAME,'SarcsARReport_')[1],'_')[0], "
|
||||||
|
+ " `ItemIdent`.`Value`, `ItemPerformance`.`Period`.`Begin`, "
|
||||||
|
+ "`ItemPerformance`.`Instance`.`MetricType`, `ItemPerformance`.`Instance`.`Count` "
|
||||||
|
+ "FROM " + ConnectDB.getUsageStatsDBSchema() + ".sarc_sushilogtmp_json_array "
|
||||||
|
+ "LATERAL VIEW posexplode(ItemIdentifier) ItemIdentifierTable AS seqi, ItemIdent "
|
||||||
|
+ "WHERE `ItemIdent`.`Type`='DOI'";
|
||||||
|
stmt.executeUpdate(insert_sarc_sushilogtmp);
|
||||||
|
logger.info("Inserted to sarc_sushilogtmp table (sarc_sushilogtmp_json_array)");
|
||||||
|
|
||||||
|
logger.info("Inserting to sarc_sushilogtmp table (sarc_sushilogtmp_json_non_array)");
|
||||||
|
insert_sarc_sushilogtmp = "INSERT INTO " + ConnectDB.getUsageStatsDBSchema() + ".sarc_sushilogtmp "
|
||||||
|
+ "SELECT 'SARC-OJS', split(split(INPUT__FILE__NAME,'SarcsARReport_')[1],'_')[0], "
|
||||||
|
+ "`ItemIdentifier`.`Value`, `ItemPerformance`.`Period`.`Begin`, "
|
||||||
|
+ "`ItemPerformance`.`Instance`.`MetricType`, `ItemPerformance`.`Instance`.`Count` "
|
||||||
|
+ "FROM " + ConnectDB.getUsageStatsDBSchema() + ".sarc_sushilogtmp_json_non_array";
|
||||||
|
stmt.executeUpdate(insert_sarc_sushilogtmp);
|
||||||
|
logger.info("Inserted to sarc_sushilogtmp table (sarc_sushilogtmp_json_non_array)");
|
||||||
|
|
||||||
|
ConnectDB.getHiveConnection().close();
|
||||||
|
}
|
||||||
|
|
||||||
|
public void getAndProcessSarc(String sarcsReportPathArray, String sarcsReportPathNonArray) throws Exception {
|
||||||
|
|
||||||
|
Statement stmt = ConnectDB.getHiveConnection().createStatement();
|
||||||
|
ConnectDB.getHiveConnection().setAutoCommit(false);
|
||||||
|
|
||||||
|
logger.info("Creating sushilog table");
|
||||||
|
String createSushilog = "CREATE TABLE IF NOT EXISTS " + ConnectDB.getUsageStatsDBSchema()
|
||||||
|
+ ".sushilog "
|
||||||
|
+ "(`source` string, "
|
||||||
|
+ "`repository` string, "
|
||||||
|
+ "`rid` string, "
|
||||||
|
+ "`date` string, "
|
||||||
|
+ "`metric_type` string, "
|
||||||
|
+ "`count` int)";
|
||||||
|
stmt.executeUpdate(createSushilog);
|
||||||
|
logger.info("Created sushilog table");
|
||||||
|
|
||||||
|
logger.info("Dropping sarc_sushilogtmp table");
|
||||||
|
String drop_sarc_sushilogtmp = "DROP TABLE IF EXISTS "
|
||||||
|
+ ConnectDB.getUsageStatsDBSchema()
|
||||||
|
+ ".sarc_sushilogtmp";
|
||||||
|
stmt.executeUpdate(drop_sarc_sushilogtmp);
|
||||||
|
logger.info("Dropped sarc_sushilogtmp table");
|
||||||
|
ConnectDB.getHiveConnection().close();
|
||||||
|
|
||||||
|
List<String[]> issnAndUrls = new ArrayList<String[]>();
|
||||||
|
issnAndUrls.add(new String[] {
|
||||||
|
"https://revistas.rcaap.pt/motricidade/sushiLite/v1_7/", "1646-107X"
|
||||||
|
});
|
||||||
|
issnAndUrls.add(new String[] {
|
||||||
|
"https://revistas.rcaap.pt/antropologicas/sushiLite/v1_7/", "0873-819X"
|
||||||
|
});
|
||||||
|
issnAndUrls.add(new String[] {
|
||||||
|
"https://revistas.rcaap.pt/interaccoes/sushiLite/v1_7/", "1646-2335"
|
||||||
|
});
|
||||||
|
issnAndUrls.add(new String[] {
|
||||||
|
"https://revistas.rcaap.pt/cct/sushiLite/v1_7/", "2182-3030"
|
||||||
|
});
|
||||||
|
issnAndUrls.add(new String[] {
|
||||||
|
"https://actapediatrica.spp.pt/sushiLite/v1_7/", "0873-9781"
|
||||||
|
});
|
||||||
|
issnAndUrls.add(new String[] {
|
||||||
|
"https://revistas.rcaap.pt/sociologiapp/sushiLite/v1_7/", "0873-6529"
|
||||||
|
});
|
||||||
|
issnAndUrls.add(new String[] {
|
||||||
|
"https://revistas.rcaap.pt/finisterra/sushiLite/v1_7/", "0430-5027"
|
||||||
|
});
|
||||||
|
issnAndUrls.add(new String[] {
|
||||||
|
"https://revistas.rcaap.pt/sisyphus/sushiLite/v1_7/", "2182-8474"
|
||||||
|
});
|
||||||
|
issnAndUrls.add(new String[] {
|
||||||
|
"https://revistas.rcaap.pt/anestesiologia/sushiLite/v1_7/", "0871-6099"
|
||||||
|
});
|
||||||
|
issnAndUrls.add(new String[] {
|
||||||
|
"https://revistas.rcaap.pt/rpe/sushiLite/v1_7/", "0871-9187"
|
||||||
|
});
|
||||||
|
issnAndUrls.add(new String[] {
|
||||||
|
"https://revistas.rcaap.pt/psilogos/sushiLite/v1_7/", "1646-091X"
|
||||||
|
});
|
||||||
|
issnAndUrls.add(new String[] {
|
||||||
|
"https://revistas.rcaap.pt/juridica/sushiLite/v1_7/", "2183-5799"
|
||||||
|
});
|
||||||
|
issnAndUrls.add(new String[] {
|
||||||
|
"https://revistas.rcaap.pt/ecr/sushiLite/v1_7/", "1647-2098"
|
||||||
|
});
|
||||||
|
issnAndUrls.add(new String[] {
|
||||||
|
"https://revistas.rcaap.pt/nascercrescer/sushiLite/v1_7/", "0872-0754"
|
||||||
|
});
|
||||||
|
issnAndUrls.add(new String[] {
|
||||||
|
"https://revistas.rcaap.pt/cea/sushiLite/v1_7/", "1645-3794"
|
||||||
|
});
|
||||||
|
issnAndUrls.add(new String[] {
|
||||||
|
"https://revistas.rcaap.pt/proelium/sushiLite/v1_7/", "1645-8826"
|
||||||
|
});
|
||||||
|
issnAndUrls.add(new String[] {
|
||||||
|
"https://revistas.rcaap.pt/millenium/sushiLite/v1_7/", "0873-3015"
|
||||||
|
});
|
||||||
|
|
||||||
|
if (ExecuteWorkflow.sarcNumberOfIssnToDownload > 0
|
||||||
|
&& ExecuteWorkflow.sarcNumberOfIssnToDownload <= issnAndUrls.size()) {
|
||||||
|
logger.info("Trimming siteIds list to the size of: " + ExecuteWorkflow.sarcNumberOfIssnToDownload);
|
||||||
|
issnAndUrls = issnAndUrls.subList(0, ExecuteWorkflow.sarcNumberOfIssnToDownload);
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info("(getAndProcessSarc) Downloading the followins opendoars: " + issnAndUrls);
|
||||||
|
|
||||||
|
for (String[] issnAndUrl : issnAndUrls) {
|
||||||
|
logger.info("Now working on ISSN: " + issnAndUrl[1]);
|
||||||
|
getARReport(sarcsReportPathArray, sarcsReportPathNonArray, issnAndUrl[0], issnAndUrl[1]);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
public void updateSarcLogs() throws Exception {
|
||||||
|
stmtHive = ConnectDB.getHiveConnection().createStatement();
|
||||||
|
ConnectDB.getHiveConnection().setAutoCommit(false);
|
||||||
|
stmtImpala = ConnectDB.getImpalaConnection().createStatement();
|
||||||
|
|
||||||
|
// Insert into sushilog
|
||||||
|
logger.info("Inserting into sushilog");
|
||||||
|
String insertSushiLog = "INSERT INTO " + ConnectDB.getUsageStatsDBSchema()
|
||||||
|
+ ".sushilog SELECT * " + "FROM " + ConnectDB.getUsageStatsDBSchema() + ".sarc_sushilogtmp";
|
||||||
|
stmtHive.executeUpdate(insertSushiLog);
|
||||||
|
logger.info("Inserted into sushilog");
|
||||||
|
|
||||||
|
stmtHive.close();
|
||||||
|
ConnectDB.getHiveConnection().close();
|
||||||
|
}
|
||||||
|
|
||||||
|
public void getARReport(String sarcsReportPathArray, String sarcsReportPathNonArray,
|
||||||
|
String url, String issn) throws Exception {
|
||||||
|
logger.info("Processing SARC! issn: " + issn + " with url: " + url);
|
||||||
|
ConnectDB.getHiveConnection().setAutoCommit(false);
|
||||||
|
|
||||||
|
SimpleDateFormat simpleDateFormat = new SimpleDateFormat("YYYY-MM");
|
||||||
|
// Setting the starting period
|
||||||
|
Calendar start = (Calendar) ExecuteWorkflow.startingLogPeriod.clone();
|
||||||
|
logger.info("(getARReport) Starting period for log download: " + simpleDateFormat.format(start.getTime()));
|
||||||
|
|
||||||
|
// Setting the ending period (last day of the month)
|
||||||
|
// Calendar end = (Calendar) ExecuteWorkflow.endingLogPeriod.clone();
|
||||||
|
// end.add(Calendar.MONTH, +1);
|
||||||
|
// end.add(Calendar.DAY_OF_MONTH, -1);
|
||||||
|
Calendar end = Calendar.getInstance();
|
||||||
|
end.add(Calendar.DAY_OF_MONTH, -1);
|
||||||
|
|
||||||
|
logger.info("(getARReport) Ending period for log download: " + simpleDateFormat.format(end.getTime()));
|
||||||
|
|
||||||
|
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd");
|
||||||
|
PreparedStatement st = ConnectDB
|
||||||
|
.getHiveConnection()
|
||||||
|
.prepareStatement(
|
||||||
|
"SELECT max(date) FROM " + ConnectDB.getUsageStatsDBSchema() + ".sushilog WHERE repository=?");
|
||||||
|
st.setString(1, issn);
|
||||||
|
ResultSet rs_date = st.executeQuery();
|
||||||
|
Date dateMax = null;
|
||||||
|
while (rs_date.next()) {
|
||||||
|
if (rs_date.getString(1) != null && !rs_date.getString(1).equals("null")
|
||||||
|
&& !rs_date.getString(1).equals("")) {
|
||||||
|
start.setTime(sdf.parse(rs_date.getString(1)));
|
||||||
|
dateMax = sdf.parse(rs_date.getString(1));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
rs_date.close();
|
||||||
|
|
||||||
|
// Creating the needed configuration for the correct storing of data
|
||||||
|
Configuration config = new Configuration();
|
||||||
|
config.addResource(new Path("/etc/hadoop/conf/core-site.xml"));
|
||||||
|
config.addResource(new Path("/etc/hadoop/conf/hdfs-site.xml"));
|
||||||
|
config
|
||||||
|
.set(
|
||||||
|
"fs.hdfs.impl",
|
||||||
|
org.apache.hadoop.hdfs.DistributedFileSystem.class.getName());
|
||||||
|
config
|
||||||
|
.set(
|
||||||
|
"fs.file.impl",
|
||||||
|
org.apache.hadoop.fs.LocalFileSystem.class.getName());
|
||||||
|
FileSystem dfs = FileSystem.get(config);
|
||||||
|
|
||||||
|
if (dateMax != null && end.getTime().compareTo(dateMax) <= 0) {
|
||||||
|
logger.info("Date found in logs " + dateMax + " and not downloanding logs for " + issn);
|
||||||
|
} else {
|
||||||
|
start.add(Calendar.MONTH, 1);
|
||||||
|
while (start.before(end)) {
|
||||||
|
String reportUrl = url + "GetReport/?Report=AR1&Format=json&BeginDate="
|
||||||
|
+ simpleDateFormat.format(start.getTime()) + "&EndDate=" + simpleDateFormat.format(start.getTime());
|
||||||
|
start.add(Calendar.MONTH, 1);
|
||||||
|
|
||||||
|
logger.info("(getARReport) Getting report: " + reportUrl);
|
||||||
|
String text = getJson(reportUrl);
|
||||||
|
if (text == null) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
JSONParser parser = new JSONParser();
|
||||||
|
JSONObject jsonObject = null;
|
||||||
|
try {
|
||||||
|
jsonObject = (JSONObject) parser.parse(text);
|
||||||
|
} // if there is a parsing error continue with the next url
|
||||||
|
catch (ParseException pe) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
jsonObject = (JSONObject) jsonObject.get("sc:ReportResponse");
|
||||||
|
jsonObject = (JSONObject) jsonObject.get("sc:Report");
|
||||||
|
if (jsonObject == null) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
jsonObject = (JSONObject) jsonObject.get("c:Report");
|
||||||
|
jsonObject = (JSONObject) jsonObject.get("c:Customer");
|
||||||
|
Object obj = jsonObject.get("c:ReportItems");
|
||||||
|
JSONArray jsonArray = new JSONArray();
|
||||||
|
if (obj instanceof JSONObject) {
|
||||||
|
jsonArray.add(obj);
|
||||||
|
} else {
|
||||||
|
jsonArray = (JSONArray) obj;
|
||||||
|
// jsonArray = (JSONArray) jsonObject.get("c:ReportItems");
|
||||||
|
}
|
||||||
|
if (jsonArray == null) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Creating the file in the filesystem for the ItemIdentifier as array object
|
||||||
|
String filePathArray = sarcsReportPathArray + "/SarcsARReport_" + issn + "_"
|
||||||
|
+ simpleDateFormat.format(start.getTime()) + ".json";
|
||||||
|
logger.info("Storing to file: " + filePathArray);
|
||||||
|
FSDataOutputStream finArray = dfs.create(new Path(filePathArray), true);
|
||||||
|
|
||||||
|
// Creating the file in the filesystem for the ItemIdentifier as array object
|
||||||
|
String filePathNonArray = sarcsReportPathNonArray + "/SarcsARReport_" + issn + "_"
|
||||||
|
+ simpleDateFormat.format(start.getTime()) + ".json";
|
||||||
|
logger.info("Storing to file: " + filePathNonArray);
|
||||||
|
FSDataOutputStream finNonArray = dfs.create(new Path(filePathNonArray), true);
|
||||||
|
|
||||||
|
for (Object aJsonArray : jsonArray) {
|
||||||
|
|
||||||
|
JSONObject jsonObjectRow = (JSONObject) aJsonArray;
|
||||||
|
renameKeysRecursively(":", jsonObjectRow);
|
||||||
|
|
||||||
|
if (jsonObjectRow.get("ItemIdentifier") instanceof JSONObject) {
|
||||||
|
finNonArray.write(jsonObjectRow.toJSONString().getBytes());
|
||||||
|
finNonArray.writeChar('\n');
|
||||||
|
} else {
|
||||||
|
finArray.write(jsonObjectRow.toJSONString().getBytes());
|
||||||
|
finArray.writeChar('\n');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
finArray.close();
|
||||||
|
finNonArray.close();
|
||||||
|
|
||||||
|
// Check the file size and if it is too big, delete it
|
||||||
|
File fileArray = new File(filePathArray);
|
||||||
|
if (fileArray.length() == 0) {
|
||||||
|
fileArray.delete();
|
||||||
|
}
|
||||||
|
File fileNonArray = new File(filePathNonArray);
|
||||||
|
if (fileNonArray.length() == 0) {
|
||||||
|
fileNonArray.delete();
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
dfs.close();
|
||||||
|
}
|
||||||
|
// ConnectDB.getHiveConnection().close();
|
||||||
|
}
|
||||||
|
|
||||||
|
private void renameKeysRecursively(String delimiter, JSONArray givenJsonObj) throws Exception {
|
||||||
|
for (Object jjval : givenJsonObj) {
|
||||||
|
if (jjval instanceof JSONArray) {
|
||||||
|
renameKeysRecursively(delimiter, (JSONArray) jjval);
|
||||||
|
} else if (jjval instanceof JSONObject) {
|
||||||
|
renameKeysRecursively(delimiter, (JSONObject) jjval);
|
||||||
|
} // All other types of vals
|
||||||
|
else
|
||||||
|
;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private void renameKeysRecursively(String delimiter, JSONObject givenJsonObj) throws Exception {
|
||||||
|
Set<String> jkeys = new HashSet<String>(givenJsonObj.keySet());
|
||||||
|
for (String jkey : jkeys) {
|
||||||
|
|
||||||
|
String[] splitArray = jkey.split(delimiter);
|
||||||
|
String newJkey = splitArray[splitArray.length - 1];
|
||||||
|
|
||||||
|
Object jval = givenJsonObj.get(jkey);
|
||||||
|
givenJsonObj.remove(jkey);
|
||||||
|
givenJsonObj.put(newJkey, jval);
|
||||||
|
|
||||||
|
if (jval instanceof JSONObject) {
|
||||||
|
renameKeysRecursively(delimiter, (JSONObject) jval);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (jval instanceof JSONArray) {
|
||||||
|
renameKeysRecursively(delimiter, (JSONArray) jval);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private String getJson(String url) throws Exception {
|
||||||
|
// String cred=username+":"+password;
|
||||||
|
// String encoded = new sun.misc.BASE64Encoder().encode (cred.getBytes());
|
||||||
|
try {
|
||||||
|
URL website = new URL(url);
|
||||||
|
URLConnection connection = website.openConnection();
|
||||||
|
// connection.setRequestProperty ("Authorization", "Basic "+encoded);
|
||||||
|
StringBuilder response;
|
||||||
|
try (BufferedReader in = new BufferedReader(new InputStreamReader(connection.getInputStream()))) {
|
||||||
|
response = new StringBuilder();
|
||||||
|
String inputLine;
|
||||||
|
while ((inputLine = in.readLine()) != null) {
|
||||||
|
response.append(inputLine);
|
||||||
|
response.append("\n");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return response.toString();
|
||||||
|
} catch (Exception e) {
|
||||||
|
|
||||||
|
// Logging error and silently continuing
|
||||||
|
logger.error("Failed to get URL: " + e);
|
||||||
|
System.out.println("Failed to get URL: " + e);
|
||||||
|
// return null;
|
||||||
|
// throw new Exception("Failed to get URL: " + e.toString(), e);
|
||||||
|
}
|
||||||
|
return "";
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,206 @@
|
||||||
|
|
||||||
|
package eu.dnetlib.oa.graph.usagerawdata.export;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.sql.SQLException;
|
||||||
|
import java.sql.Statement;
|
||||||
|
|
||||||
|
import org.apache.hadoop.conf.Configuration;
|
||||||
|
import org.apache.hadoop.fs.FileSystem;
|
||||||
|
import org.apache.hadoop.fs.Path;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Main class for downloading and processing Usage statistics
|
||||||
|
*
|
||||||
|
* @author D. Pierrakos, S. Zoupanos
|
||||||
|
*/
|
||||||
|
public class UsageStatsExporter {
|
||||||
|
|
||||||
|
public UsageStatsExporter() {
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
private static final Logger logger = LoggerFactory.getLogger(UsageStatsExporter.class);
|
||||||
|
|
||||||
|
private void reCreateLogDirs() throws IllegalArgumentException, IOException {
|
||||||
|
FileSystem dfs = FileSystem.get(new Configuration());
|
||||||
|
|
||||||
|
logger.info("Deleting repoLog directory: " + ExecuteWorkflow.repoLogPath);
|
||||||
|
dfs.delete(new Path(ExecuteWorkflow.repoLogPath), true);
|
||||||
|
|
||||||
|
logger.info("Deleting portalLog directory: " + ExecuteWorkflow.portalLogPath);
|
||||||
|
dfs.delete(new Path(ExecuteWorkflow.portalLogPath), true);
|
||||||
|
|
||||||
|
logger.info("Deleting lareferenciaLog directory: " + ExecuteWorkflow.lareferenciaLogPath);
|
||||||
|
dfs.delete(new Path(ExecuteWorkflow.lareferenciaLogPath), true);
|
||||||
|
|
||||||
|
logger.info("Creating repoLog directory: " + ExecuteWorkflow.repoLogPath);
|
||||||
|
dfs.mkdirs(new Path(ExecuteWorkflow.repoLogPath));
|
||||||
|
|
||||||
|
logger.info("Creating portalLog directory: " + ExecuteWorkflow.portalLogPath);
|
||||||
|
dfs.mkdirs(new Path(ExecuteWorkflow.portalLogPath));
|
||||||
|
|
||||||
|
logger.info("Creating lareferenciaLog directory: " + ExecuteWorkflow.lareferenciaLogPath);
|
||||||
|
dfs.mkdirs(new Path(ExecuteWorkflow.lareferenciaLogPath));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void export() throws Exception {
|
||||||
|
|
||||||
|
logger.info("Initialising DB properties");
|
||||||
|
ConnectDB.init();
|
||||||
|
|
||||||
|
PiwikStatsDB piwikstatsdb = new PiwikStatsDB(ExecuteWorkflow.repoLogPath, ExecuteWorkflow.portalLogPath);
|
||||||
|
|
||||||
|
logger.info("Re-creating database and tables");
|
||||||
|
if (ExecuteWorkflow.recreateDbAndTables) {
|
||||||
|
piwikstatsdb.recreateDBAndTables();
|
||||||
|
logger.info("DB-Tables-TmpTables are created ");
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info("Initializing the download logs module");
|
||||||
|
PiwikDownloadLogs piwd = new PiwikDownloadLogs(ExecuteWorkflow.matomoBaseURL, ExecuteWorkflow.matomoAuthToken);
|
||||||
|
|
||||||
|
if (ExecuteWorkflow.piwikEmptyDirs) {
|
||||||
|
logger.info("Recreating Piwik log directories");
|
||||||
|
piwikstatsdb.reCreateLogDirs();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Downloading piwik logs (also managing directory creation)
|
||||||
|
if (ExecuteWorkflow.downloadPiwikLogs) {
|
||||||
|
logger.info("Downloading piwik logs");
|
||||||
|
piwd
|
||||||
|
.GetOpenAIRELogs(
|
||||||
|
ExecuteWorkflow.repoLogPath,
|
||||||
|
ExecuteWorkflow.portalLogPath, ExecuteWorkflow.portalMatomoID);
|
||||||
|
}
|
||||||
|
logger.info("Downloaded piwik logs");
|
||||||
|
|
||||||
|
// Create DB tables, insert/update statistics
|
||||||
|
String cRobotsUrl = "https://raw.githubusercontent.com/atmire/COUNTER-Robots/master/COUNTER_Robots_list.json";
|
||||||
|
piwikstatsdb.setCounterRobotsURL(cRobotsUrl);
|
||||||
|
|
||||||
|
if (ExecuteWorkflow.processPiwikLogs) {
|
||||||
|
logger.info("Processing logs");
|
||||||
|
piwikstatsdb.processLogs();
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info("Creating LaReferencia tables");
|
||||||
|
LaReferenciaDownloadLogs lrf = new LaReferenciaDownloadLogs(ExecuteWorkflow.lareferenciaBaseURL,
|
||||||
|
ExecuteWorkflow.lareferenciaAuthToken);
|
||||||
|
|
||||||
|
if (ExecuteWorkflow.laReferenciaEmptyDirs) {
|
||||||
|
logger.info("Recreating LaReferencia log directories");
|
||||||
|
lrf.reCreateLogDirs();
|
||||||
|
}
|
||||||
|
|
||||||
|
if (ExecuteWorkflow.downloadLaReferenciaLogs) {
|
||||||
|
logger.info("Downloading LaReferencia logs");
|
||||||
|
lrf.GetLaReferenciaRepos(ExecuteWorkflow.lareferenciaLogPath);
|
||||||
|
logger.info("Downloaded LaReferencia logs");
|
||||||
|
}
|
||||||
|
|
||||||
|
LaReferenciaStats lastats = new LaReferenciaStats(ExecuteWorkflow.lareferenciaLogPath);
|
||||||
|
|
||||||
|
if (ExecuteWorkflow.processLaReferenciaLogs) {
|
||||||
|
logger.info("Processing LaReferencia logs");
|
||||||
|
lastats.processLogs();
|
||||||
|
logger.info("LaReferencia logs done");
|
||||||
|
}
|
||||||
|
|
||||||
|
IrusStats irusstats = new IrusStats(ExecuteWorkflow.irusUKBaseURL);
|
||||||
|
if (ExecuteWorkflow.irusCreateTablesEmptyDirs) {
|
||||||
|
logger.info("Creating Irus Stats tables");
|
||||||
|
irusstats.createTables();
|
||||||
|
logger.info("Created Irus Stats tables");
|
||||||
|
|
||||||
|
logger.info("Re-create log dirs");
|
||||||
|
irusstats.reCreateLogDirs();
|
||||||
|
logger.info("Re-created log dirs");
|
||||||
|
}
|
||||||
|
|
||||||
|
if (ExecuteWorkflow.irusDownloadReports) {
|
||||||
|
irusstats.getIrusRRReport(ExecuteWorkflow.irusUKReportPath);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (ExecuteWorkflow.irusProcessStats) {
|
||||||
|
irusstats.processIrusStats();
|
||||||
|
logger.info("Irus done");
|
||||||
|
}
|
||||||
|
|
||||||
|
SarcStats sarcStats = new SarcStats();
|
||||||
|
if (ExecuteWorkflow.sarcCreateTablesEmptyDirs) {
|
||||||
|
sarcStats.reCreateLogDirs();
|
||||||
|
}
|
||||||
|
if (ExecuteWorkflow.sarcDownloadReports) {
|
||||||
|
sarcStats.getAndProcessSarc(ExecuteWorkflow.sarcsReportPathArray, ExecuteWorkflow.sarcsReportPathNonArray);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (ExecuteWorkflow.sarcProcessStats) {
|
||||||
|
sarcStats.processSarc(ExecuteWorkflow.sarcsReportPathArray, ExecuteWorkflow.sarcsReportPathNonArray);
|
||||||
|
sarcStats.updateSarcLogs();
|
||||||
|
}
|
||||||
|
logger.info("Sarc done");
|
||||||
|
// finalize usagestats
|
||||||
|
|
||||||
|
logger.info("Dropping tmp tables");
|
||||||
|
if (ExecuteWorkflow.finalizeStats) {
|
||||||
|
piwikstatsdb.finalizeStats();
|
||||||
|
logger.info("Dropped tmp tables");
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info("Raw Data Download End");
|
||||||
|
}
|
||||||
|
|
||||||
|
public void createdDBWithTablesOnly() throws Exception {
|
||||||
|
logger.info("Initialising DB properties");
|
||||||
|
ConnectDB.init();
|
||||||
|
|
||||||
|
PiwikStatsDB piwikstatsdb = new PiwikStatsDB(ExecuteWorkflow.repoLogPath, ExecuteWorkflow.portalLogPath);
|
||||||
|
piwikstatsdb.recreateDBAndTables();
|
||||||
|
|
||||||
|
piwikstatsdb.createPedocsOldUsageData();
|
||||||
|
Statement stmt = ConnectDB.getHiveConnection().createStatement();
|
||||||
|
|
||||||
|
logger.info("Creating LaReferencia tables");
|
||||||
|
String sqlCreateTableLareferenciaLog = "CREATE TABLE IF NOT EXISTS "
|
||||||
|
+ ConnectDB.getUsageStatsDBSchema() + ".lareferencialog(matomoid INT, "
|
||||||
|
+ "source STRING, id_visit STRING, country STRING, action STRING, url STRING, entity_id STRING, "
|
||||||
|
+ "source_item_type STRING, timestamp STRING, referrer_name STRING, agent STRING) "
|
||||||
|
+ "clustered by (source, id_visit, action, timestamp, entity_id) into 100 buckets "
|
||||||
|
+ "stored as orc tblproperties('transactional'='true')";
|
||||||
|
stmt.executeUpdate(sqlCreateTableLareferenciaLog);
|
||||||
|
logger.info("Created LaReferencia tables");
|
||||||
|
|
||||||
|
logger.info("Creating sushilog");
|
||||||
|
|
||||||
|
String sqlCreateTableSushiLog = "CREATE TABLE IF NOT EXISTS " + ConnectDB.getUsageStatsDBSchema()
|
||||||
|
+ ".sushilog(source STRING, "
|
||||||
|
+ "repository STRING, rid STRING, date STRING, metric_type STRING, count INT) clustered by (source, "
|
||||||
|
+ "repository, rid, date, metric_type) into 100 buckets stored as orc tblproperties('transactional'='true')";
|
||||||
|
stmt.executeUpdate(sqlCreateTableSushiLog);
|
||||||
|
logger.info("Created sushilog");
|
||||||
|
|
||||||
|
logger.info("Updating piwiklog");
|
||||||
|
String sql = "insert into " + ConnectDB.getUsageStatsDBSchema()
|
||||||
|
+ ".piwiklog select * from openaire_prod_usage_raw.piwiklog";
|
||||||
|
stmt.executeUpdate(sql);
|
||||||
|
|
||||||
|
logger.info("Updating lareferencialog");
|
||||||
|
sql = "insert into " + ConnectDB.getUsageStatsDBSchema()
|
||||||
|
+ ".lareferencialog select * from openaire_prod_usage_raw.lareferencialog";
|
||||||
|
stmt.executeUpdate(sql);
|
||||||
|
|
||||||
|
logger.info("Updating sushilog");
|
||||||
|
sql = "insert into " + ConnectDB.getUsageStatsDBSchema()
|
||||||
|
+ ".sushilog select * from openaire_prod_usage_raw.sushilog";
|
||||||
|
stmt.executeUpdate(sql);
|
||||||
|
|
||||||
|
stmt.close();
|
||||||
|
ConnectDB.getHiveConnection().close();
|
||||||
|
logger.info("Sushi Tables Created");
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,219 @@
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"paramName": "mat",
|
||||||
|
"paramLongName": "matomoAuthToken",
|
||||||
|
"paramDescription": "when true will stop SparkSession after job execution",
|
||||||
|
"paramRequired": false
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "mbu",
|
||||||
|
"paramLongName": "matomoBaseURL",
|
||||||
|
"paramDescription": "URL of the isLookUp Service",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "rlp",
|
||||||
|
"paramLongName": "repoLogPath",
|
||||||
|
"paramDescription": "nameNode of the source cluster",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "plp",
|
||||||
|
"paramLongName": "portalLogPath",
|
||||||
|
"paramDescription": "namoNode of the target cluster",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "pmi",
|
||||||
|
"paramLongName": "portalMatomoID",
|
||||||
|
"paramDescription": "namoNode of the target cluster",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "iukbuw",
|
||||||
|
"paramLongName": "irusUKBaseURL",
|
||||||
|
"paramDescription": "working directory",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "iukrp",
|
||||||
|
"paramLongName": "irusUKReportPath",
|
||||||
|
"paramDescription": "maximum number of map tasks used in the distcp process",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "srpa",
|
||||||
|
"paramLongName": "sarcsReportPathArray",
|
||||||
|
"paramDescription": "memory for distcp action copying actionsets from remote cluster",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "srpna",
|
||||||
|
"paramLongName": "sarcsReportPathNonArray",
|
||||||
|
"paramDescription": "timeout for distcp copying actions from remote cluster",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "llp",
|
||||||
|
"paramLongName": "lareferenciaLogPath",
|
||||||
|
"paramDescription": "activate tranform-only mode. Only apply transformation step",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "lbu",
|
||||||
|
"paramLongName": "lareferenciaBaseURL",
|
||||||
|
"paramDescription": "activate tranform-only mode. Only apply transformation step",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "lat",
|
||||||
|
"paramLongName": "lareferenciaAuthToken",
|
||||||
|
"paramDescription": "activate tranform-only mode. Only apply transformation step",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "dbhu",
|
||||||
|
"paramLongName": "dbHiveUrl",
|
||||||
|
"paramDescription": "activate tranform-only mode. Only apply transformation step",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "dbiu",
|
||||||
|
"paramLongName": "dbImpalaUrl",
|
||||||
|
"paramDescription": "activate tranform-only mode. Only apply transformation step",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "usdbs",
|
||||||
|
"paramLongName": "usageStatsDBSchema",
|
||||||
|
"paramDescription": "activate tranform-only mode. Only apply transformation step",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "sdbs",
|
||||||
|
"paramLongName": "statsDBSchema",
|
||||||
|
"paramDescription": "activate tranform-only mode. Only apply transformation step",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "rdbt",
|
||||||
|
"paramLongName": "recreateDbAndTables",
|
||||||
|
"paramDescription": "Re-create database and initial tables?",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "pwed",
|
||||||
|
"paramLongName": "piwikEmptyDirs",
|
||||||
|
"paramDescription": "Empty piwik directories?",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "ppwl",
|
||||||
|
"paramLongName": "processPiwikLogs",
|
||||||
|
"paramDescription": "Process the piwiklogs (create & fill in the needed tables and process the data) based on the downloaded data",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "dpwl",
|
||||||
|
"paramLongName": "downloadPiwikLogs",
|
||||||
|
"paramDescription": "download piwik logs?",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "slp",
|
||||||
|
"paramLongName": "startingLogPeriod",
|
||||||
|
"paramDescription": "Starting log period",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "npidd",
|
||||||
|
"paramLongName": "numberOfPiwikIdsToDownload",
|
||||||
|
"paramDescription": "Limit the number of the downloaded piwikids to the first numberOfPiwikIdsToDownload",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "nsidd",
|
||||||
|
"paramLongName": "numberOfSiteIdsToDownload",
|
||||||
|
"paramDescription": "Limit the number of the downloaded siteids (La Referencia logs) to the first numberOfSiteIdsToDownload",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "lerd",
|
||||||
|
"paramLongName": "laReferenciaEmptyDirs",
|
||||||
|
"paramDescription": "Empty LaReferencia directories?",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "plrl",
|
||||||
|
"paramLongName": "processLaReferenciaLogs",
|
||||||
|
"paramDescription": "Process the La Referencia logs (create & fill in the needed tables and process the data) based on the downloaded data",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "dlrl",
|
||||||
|
"paramLongName": "downloadLaReferenciaLogs",
|
||||||
|
"paramDescription": "download La Referencia logs?",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "icted",
|
||||||
|
"paramLongName": "irusCreateTablesEmptyDirs",
|
||||||
|
"paramDescription": "Irus section: Create tables and empty JSON directories?",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "idr",
|
||||||
|
"paramLongName": "irusDownloadReports",
|
||||||
|
"paramDescription": "Irus section: Download reports?",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "ipr",
|
||||||
|
"paramLongName": "irusProcessStats",
|
||||||
|
"paramDescription": "Irus section: Process stats?",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "inod",
|
||||||
|
"paramLongName": "irusNumberOfOpendoarsToDownload",
|
||||||
|
"paramDescription": "Limit the number of the downloaded Opendoars (Irus) to the first irusNumberOfOpendoarsToDownload",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "icted",
|
||||||
|
"paramLongName": "sarcCreateTablesEmptyDirs",
|
||||||
|
"paramDescription": "Sarc section: Create tables and empty JSON directories?",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "idr",
|
||||||
|
"paramLongName": "sarcDownloadReports",
|
||||||
|
"paramDescription": "Sarc section: Download reports?",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "ipr",
|
||||||
|
"paramLongName": "sarcProcessStats",
|
||||||
|
"paramDescription": "Sarc section: Process stats?",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "inod",
|
||||||
|
"paramLongName": "sarcNumberOfIssnToDownload",
|
||||||
|
"paramDescription": "Limit the number of the downloaded ISSN (Sarc) to the first sarcNumberOfIssnToDownload",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
|
||||||
|
{
|
||||||
|
"paramName": "fs",
|
||||||
|
"paramLongName": "finalizeStats",
|
||||||
|
"paramDescription": "Create the usage_stats table?",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "nodt",
|
||||||
|
"paramLongName": "numberOfDownloadThreads",
|
||||||
|
"paramDescription": "Number of download threads",
|
||||||
|
"paramRequired": true
|
||||||
|
}
|
||||||
|
]
|
|
@ -0,0 +1,38 @@
|
||||||
|
<configuration>
|
||||||
|
<property>
|
||||||
|
<name>jobTracker</name>
|
||||||
|
<value>${jobTracker}</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>nameNode</name>
|
||||||
|
<value>${nameNode}</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>oozie.use.system.libpath</name>
|
||||||
|
<value>true</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>oozie.action.sharelib.for.spark</name>
|
||||||
|
<value>spark2</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>hiveMetastoreUris</name>
|
||||||
|
<value>thrift://iis-cdh5-test-m3.ocean.icm.edu.pl:9083</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>hiveJdbcUrl</name>
|
||||||
|
<value>jdbc:hive2://iis-cdh5-test-m3.ocean.icm.edu.pl:10000/;UseNativeQuery=1</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>impalaJdbcUrl</name>
|
||||||
|
<value>jdbc:hive2://iis-cdh5-test-gw.ocean.icm.edu.pl:21050/;auth=noSasl;</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>oozie.wf.workflow.notification.url</name>
|
||||||
|
<value>{serviceUrl}/v1/oozieNotification/jobUpdate?jobId=$jobId%26status=$status</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>oozie.use.system.libpath</name>
|
||||||
|
<value>true</value>
|
||||||
|
</property>
|
||||||
|
</configuration>
|
|
@ -0,0 +1,88 @@
|
||||||
|
<workflow-app name="Usage Graph Stats" xmlns="uri:oozie:workflow:0.5">
|
||||||
|
<parameters>
|
||||||
|
<property>
|
||||||
|
<name>hiveMetastoreUris</name>
|
||||||
|
<description>Hive server metastore URIs</description>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>hiveJdbcUrl</name>
|
||||||
|
<description>Hive server jdbc url</description>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>impalaJdbcUrl</name>
|
||||||
|
<description>Impala server jdbc url</description>
|
||||||
|
</property>
|
||||||
|
</parameters>
|
||||||
|
|
||||||
|
<global>
|
||||||
|
<job-tracker>${jobTracker}</job-tracker>
|
||||||
|
<name-node>${nameNode}</name-node>
|
||||||
|
<configuration>
|
||||||
|
<property>
|
||||||
|
<name>hive.metastore.uris</name>
|
||||||
|
<value>${hiveMetastoreUris}</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>mapreduce.job.queuename</name>
|
||||||
|
<value>${queueName}</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>oozie.launcher.mapred.job.queue.name</name>
|
||||||
|
<value>${oozieLauncherQueueName}</value>
|
||||||
|
</property>
|
||||||
|
</configuration>
|
||||||
|
</global>
|
||||||
|
|
||||||
|
<start to="Step1"/>
|
||||||
|
|
||||||
|
<kill name="Kill">
|
||||||
|
<message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
|
||||||
|
</kill>
|
||||||
|
|
||||||
|
<action name='Step1'>
|
||||||
|
<java>
|
||||||
|
<main-class>eu.dnetlib.oa.graph.usagerawdata.export.ExecuteWorkflow</main-class>
|
||||||
|
<arg>--matomoAuthToken</arg><arg>${matomoAuthToken}</arg>
|
||||||
|
<arg>--matomoBaseURL</arg><arg>${matomoBaseURL}</arg>
|
||||||
|
<arg>--repoLogPath</arg><arg>${repoLogPath}</arg>
|
||||||
|
<arg>--portalLogPath</arg><arg>${portalLogPath}</arg>
|
||||||
|
<arg>--portalMatomoID</arg><arg>${portalMatomoID}</arg>
|
||||||
|
<arg>--irusUKBaseURL</arg><arg>${irusUKBaseURL}</arg>
|
||||||
|
<arg>--irusUKReportPath</arg><arg>${irusUKReportPath}</arg>
|
||||||
|
<arg>--sarcsReportPathArray</arg><arg>${sarcsReportPathArray}</arg>
|
||||||
|
<arg>--sarcsReportPathNonArray</arg><arg>${sarcsReportPathNonArray}</arg>
|
||||||
|
<arg>--lareferenciaLogPath</arg><arg>${lareferenciaLogPath}</arg>
|
||||||
|
<arg>--lareferenciaBaseURL</arg><arg>${lareferenciaBaseURL}</arg>
|
||||||
|
<arg>--lareferenciaAuthToken</arg><arg>${lareferenciaAuthToken}</arg>
|
||||||
|
<arg>--dbHiveUrl</arg><arg>${hiveJdbcUrl}</arg>
|
||||||
|
<arg>--dbImpalaUrl</arg><arg>${impalaJdbcUrl}</arg>
|
||||||
|
<arg>--usageStatsDBSchema</arg><arg>${usageStatsDBSchema}</arg>
|
||||||
|
<arg>--statsDBSchema</arg><arg>${statsDBSchema}</arg>
|
||||||
|
<arg>--recreateDbAndTables</arg><arg>${recreateDbAndTables}</arg>
|
||||||
|
<arg>--piwikEmptyDirs</arg><arg>${piwikEmptyDirs}</arg>
|
||||||
|
<arg>--downloadPiwikLogs</arg><arg>${downloadPiwikLogs}</arg>
|
||||||
|
<arg>--processPiwikLogs</arg><arg>${processPiwikLogs}</arg>
|
||||||
|
<arg>--startingLogPeriod</arg><arg>${startingLogPeriod}</arg>
|
||||||
|
<arg>--numberOfPiwikIdsToDownload</arg><arg>${numberOfPiwikIdsToDownload}</arg>
|
||||||
|
<arg>--numberOfSiteIdsToDownload</arg><arg>${numberOfSiteIdsToDownload}</arg>
|
||||||
|
<arg>--laReferenciaEmptyDirs</arg><arg>${laReferenciaEmptyDirs}</arg>
|
||||||
|
<arg>--downloadLaReferenciaLogs</arg><arg>${downloadLaReferenciaLogs}</arg>
|
||||||
|
<arg>--processLaReferenciaLogs</arg><arg>${processLaReferenciaLogs}</arg>
|
||||||
|
<arg>--irusCreateTablesEmptyDirs</arg><arg>${irusCreateTablesEmptyDirs}</arg>
|
||||||
|
<arg>--irusDownloadReports</arg><arg>${irusDownloadReports}</arg>
|
||||||
|
<arg>--irusProcessStats</arg><arg>${irusProcessStats}</arg>
|
||||||
|
<arg>--irusNumberOfOpendoarsToDownload</arg><arg>${irusNumberOfOpendoarsToDownload}</arg>
|
||||||
|
<arg>--sarcCreateTablesEmptyDirs</arg><arg>${sarcCreateTablesEmptyDirs}</arg>
|
||||||
|
<arg>--sarcDownloadReports</arg><arg>${sarcDownloadReports}</arg>
|
||||||
|
<arg>--sarcProcessStats</arg><arg>${sarcProcessStats}</arg>
|
||||||
|
<arg>--sarcNumberOfIssnToDownload</arg><arg>${sarcNumberOfIssnToDownload}</arg>
|
||||||
|
<arg>--finalizeStats</arg><arg>${finalizeStats}</arg>
|
||||||
|
<arg>--numberOfDownloadThreads</arg><arg>${numberOfDownloadThreads}</arg>
|
||||||
|
<capture-output/>
|
||||||
|
</java>
|
||||||
|
<ok to="End" />
|
||||||
|
<error to="Kill" />
|
||||||
|
</action>
|
||||||
|
|
||||||
|
<end name="End"/>
|
||||||
|
</workflow-app>
|
|
@ -0,0 +1,91 @@
|
||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||||
|
<parent>
|
||||||
|
<artifactId>dhp-workflows</artifactId>
|
||||||
|
<groupId>eu.dnetlib.dhp</groupId>
|
||||||
|
<version>1.2.4-SNAPSHOT</version>
|
||||||
|
</parent>
|
||||||
|
<modelVersion>4.0.0</modelVersion>
|
||||||
|
<artifactId>dhp-usage-stats-build</artifactId>
|
||||||
|
<build>
|
||||||
|
<plugins>
|
||||||
|
<plugin>
|
||||||
|
<groupId>pl.project13.maven</groupId>
|
||||||
|
<artifactId>git-commit-id-plugin</artifactId>
|
||||||
|
<version>2.1.15</version>
|
||||||
|
<executions>
|
||||||
|
<execution>
|
||||||
|
<goals>
|
||||||
|
<goal>revision</goal>
|
||||||
|
</goals>
|
||||||
|
</execution>
|
||||||
|
</executions>
|
||||||
|
<configuration>
|
||||||
|
<dotGitDirectory>${project.basedir}/../.git</dotGitDirectory>
|
||||||
|
<!-- more config here as you see fit -->
|
||||||
|
</configuration>
|
||||||
|
</plugin>
|
||||||
|
<plugin>
|
||||||
|
<groupId>org.apache.maven.plugins</groupId>
|
||||||
|
<artifactId>maven-compiler-plugin</artifactId>
|
||||||
|
<version>3.6.1</version>
|
||||||
|
<configuration>
|
||||||
|
<source>1.8</source>
|
||||||
|
<target>1.8</target>
|
||||||
|
</configuration>
|
||||||
|
</plugin>
|
||||||
|
</plugins>
|
||||||
|
</build>
|
||||||
|
<properties>
|
||||||
|
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
|
||||||
|
<project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
|
||||||
|
<cdh.hive.version>0.13.1-cdh5.2.1</cdh.hive.version>
|
||||||
|
<cdh.hadoop.version>2.5.0-cdh5.2.1</cdh.hadoop.version>
|
||||||
|
</properties>
|
||||||
|
|
||||||
|
<dependencies>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.apache.spark</groupId>
|
||||||
|
<artifactId>spark-core_2.11</artifactId>
|
||||||
|
<version>2.2.0</version>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.apache.spark</groupId>
|
||||||
|
<artifactId>spark-sql_2.11</artifactId>
|
||||||
|
<version>2.4.5</version>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>com.googlecode.json-simple</groupId>
|
||||||
|
<artifactId>json-simple</artifactId>
|
||||||
|
<version>1.1.1</version>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.json</groupId>
|
||||||
|
<artifactId>json</artifactId>
|
||||||
|
<version>20180130</version>
|
||||||
|
<type>jar</type>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.apache.hive</groupId>
|
||||||
|
<artifactId>hive-jdbc</artifactId>
|
||||||
|
<version>${cdh.hive.version}</version>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.apache.hadoop</groupId>
|
||||||
|
<artifactId>hadoop-common</artifactId>
|
||||||
|
<version>${cdh.hadoop.version}</version>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>eu.dnetlib.dhp</groupId>
|
||||||
|
<artifactId>dhp-common</artifactId>
|
||||||
|
<version>${project.version}</version>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>c3p0</groupId>
|
||||||
|
<artifactId>c3p0</artifactId>
|
||||||
|
<version>0.9.1.2</version>
|
||||||
|
<type>jar</type>
|
||||||
|
</dependency>
|
||||||
|
</dependencies>
|
||||||
|
<name>dhp-usage-stats-build</name>
|
||||||
|
</project>
|
|
@ -0,0 +1 @@
|
||||||
|
mvn clean package -Poozie-package,deploy,run -Dworkflow.source.dir=eu/dnetlib/dhp/oa/graph/usagestatsbuild
|
|
@ -0,0 +1,147 @@
|
||||||
|
/*
|
||||||
|
* To change this license header, choose License Headers in Project Properties.
|
||||||
|
* To change this template file, choose Tools | Templates
|
||||||
|
* and open the template in the editor.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package eu.dnetlib.oa.graph.usagestatsbuild.export;
|
||||||
|
|
||||||
|
import java.sql.Connection;
|
||||||
|
import java.sql.DriverManager;
|
||||||
|
import java.sql.SQLException;
|
||||||
|
import java.sql.Statement;
|
||||||
|
import java.text.DateFormat;
|
||||||
|
import java.text.SimpleDateFormat;
|
||||||
|
import java.util.Calendar;
|
||||||
|
import java.util.Date;
|
||||||
|
import java.util.Properties;
|
||||||
|
|
||||||
|
import org.apache.log4j.Logger;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @author D. Pierrakos, S. Zoupanos
|
||||||
|
*/
|
||||||
|
/**
|
||||||
|
* @author D. Pierrakos, S. Zoupanos
|
||||||
|
*/
|
||||||
|
import com.mchange.v2.c3p0.ComboPooledDataSource;
|
||||||
|
|
||||||
|
public abstract class ConnectDB {
|
||||||
|
|
||||||
|
public static Connection DB_HIVE_CONNECTION;
|
||||||
|
public static Connection DB_IMPALA_CONNECTION;
|
||||||
|
|
||||||
|
private static String dbHiveUrl;
|
||||||
|
private static String dbImpalaUrl;
|
||||||
|
private static String usageRawDataDBSchema;
|
||||||
|
private static String usageStatsDBSchema;
|
||||||
|
private static String usagestatsPermanentDBSchema;
|
||||||
|
private static String statsDBSchema;
|
||||||
|
private final static Logger log = Logger.getLogger(ConnectDB.class);
|
||||||
|
|
||||||
|
static void init() throws ClassNotFoundException {
|
||||||
|
|
||||||
|
dbHiveUrl = ExecuteWorkflow.dbHiveUrl;
|
||||||
|
dbImpalaUrl = ExecuteWorkflow.dbImpalaUrl;
|
||||||
|
usageStatsDBSchema = ExecuteWorkflow.usageStatsDBSchema;
|
||||||
|
statsDBSchema = ExecuteWorkflow.statsDBSchema;
|
||||||
|
usageRawDataDBSchema = ExecuteWorkflow.usageRawDataDBSchema;
|
||||||
|
usagestatsPermanentDBSchema = ExecuteWorkflow.usagestatsPermanentDBSchema;
|
||||||
|
|
||||||
|
Class.forName("org.apache.hive.jdbc.HiveDriver");
|
||||||
|
}
|
||||||
|
|
||||||
|
public static Connection getHiveConnection() throws SQLException {
|
||||||
|
if (DB_HIVE_CONNECTION != null && !DB_HIVE_CONNECTION.isClosed()) {
|
||||||
|
return DB_HIVE_CONNECTION;
|
||||||
|
} else {
|
||||||
|
DB_HIVE_CONNECTION = connectHive();
|
||||||
|
|
||||||
|
return DB_HIVE_CONNECTION;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public static Connection getImpalaConnection() throws SQLException {
|
||||||
|
if (DB_IMPALA_CONNECTION != null && !DB_IMPALA_CONNECTION.isClosed()) {
|
||||||
|
return DB_IMPALA_CONNECTION;
|
||||||
|
} else {
|
||||||
|
DB_IMPALA_CONNECTION = connectImpala();
|
||||||
|
|
||||||
|
return DB_IMPALA_CONNECTION;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public static String getUsageRawDataDBSchema() {
|
||||||
|
return ConnectDB.usageRawDataDBSchema;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static String getUsageStatsDBSchema() {
|
||||||
|
String datePattern = "YYYYMMdd";
|
||||||
|
DateFormat df = new SimpleDateFormat(datePattern);
|
||||||
|
// Get the today date using Calendar object.
|
||||||
|
Date today = Calendar.getInstance().getTime();
|
||||||
|
String todayAsString = df.format(today);
|
||||||
|
|
||||||
|
return ConnectDB.usageStatsDBSchema + "_" + todayAsString;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static String getStatsDBSchema() {
|
||||||
|
return ConnectDB.statsDBSchema;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static String getUsagestatsPermanentDBSchema() {
|
||||||
|
return ConnectDB.usagestatsPermanentDBSchema;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static Connection connectHive() throws SQLException {
|
||||||
|
/*
|
||||||
|
* Connection connection = DriverManager.getConnection(dbHiveUrl); Statement stmt =
|
||||||
|
* connection.createStatement(); log.debug("Opened database successfully"); return connection;
|
||||||
|
*/
|
||||||
|
ComboPooledDataSource cpds = new ComboPooledDataSource();
|
||||||
|
cpds.setJdbcUrl(dbHiveUrl);
|
||||||
|
cpds.setAcquireIncrement(1);
|
||||||
|
cpds.setMaxPoolSize(100);
|
||||||
|
cpds.setMinPoolSize(1);
|
||||||
|
cpds.setInitialPoolSize(1);
|
||||||
|
cpds.setMaxIdleTime(300);
|
||||||
|
cpds.setMaxConnectionAge(36000);
|
||||||
|
|
||||||
|
cpds.setAcquireRetryAttempts(30);
|
||||||
|
cpds.setAcquireRetryDelay(2000);
|
||||||
|
cpds.setBreakAfterAcquireFailure(false);
|
||||||
|
|
||||||
|
cpds.setCheckoutTimeout(0);
|
||||||
|
cpds.setPreferredTestQuery("SELECT 1");
|
||||||
|
cpds.setIdleConnectionTestPeriod(60);
|
||||||
|
return cpds.getConnection();
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
private static Connection connectImpala() throws SQLException {
|
||||||
|
/*
|
||||||
|
* Connection connection = DriverManager.getConnection(dbImpalaUrl); Statement stmt =
|
||||||
|
* connection.createStatement(); log.debug("Opened database successfully"); return connection;
|
||||||
|
*/
|
||||||
|
ComboPooledDataSource cpds = new ComboPooledDataSource();
|
||||||
|
cpds.setJdbcUrl(dbImpalaUrl);
|
||||||
|
cpds.setAcquireIncrement(1);
|
||||||
|
cpds.setMaxPoolSize(100);
|
||||||
|
cpds.setMinPoolSize(1);
|
||||||
|
cpds.setInitialPoolSize(1);
|
||||||
|
cpds.setMaxIdleTime(300);
|
||||||
|
cpds.setMaxConnectionAge(36000);
|
||||||
|
|
||||||
|
cpds.setAcquireRetryAttempts(30);
|
||||||
|
cpds.setAcquireRetryDelay(2000);
|
||||||
|
cpds.setBreakAfterAcquireFailure(false);
|
||||||
|
|
||||||
|
cpds.setCheckoutTimeout(0);
|
||||||
|
cpds.setPreferredTestQuery("SELECT 1");
|
||||||
|
cpds.setIdleConnectionTestPeriod(60);
|
||||||
|
|
||||||
|
return cpds.getConnection();
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,153 @@
|
||||||
|
/*
|
||||||
|
* To change this license header, choose License Headers in Project Properties.
|
||||||
|
* To change this template file, choose Tools | Templates
|
||||||
|
* and open the template in the editor.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package eu.dnetlib.oa.graph.usagestatsbuild.export;
|
||||||
|
|
||||||
|
import java.text.SimpleDateFormat;
|
||||||
|
import java.util.Calendar;
|
||||||
|
import java.util.Date;
|
||||||
|
|
||||||
|
import org.apache.commons.io.IOUtils;
|
||||||
|
import org.apache.log4j.BasicConfigurator;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @author D. Pierrakos, S. Zoupanos
|
||||||
|
*/
|
||||||
|
public class ExecuteWorkflow {
|
||||||
|
|
||||||
|
// static String matomoAuthToken;
|
||||||
|
static String matomoBaseURL;
|
||||||
|
static String repoLogPath;
|
||||||
|
static String portalLogPath;
|
||||||
|
static String portalMatomoID;
|
||||||
|
// static String irusUKBaseURL;
|
||||||
|
static String irusUKReportPath;
|
||||||
|
static String sarcsReportPathArray;
|
||||||
|
static String sarcsReportPathNonArray;
|
||||||
|
static String lareferenciaLogPath;
|
||||||
|
// static String lareferenciaBaseURL;
|
||||||
|
// static String lareferenciaAuthToken;
|
||||||
|
static String dbHiveUrl;
|
||||||
|
static String dbImpalaUrl;
|
||||||
|
static String usageRawDataDBSchema;
|
||||||
|
static String usageStatsDBSchema;
|
||||||
|
static String usagestatsPermanentDBSchema;
|
||||||
|
static String statsDBSchema;
|
||||||
|
static boolean recreateDbAndTables;
|
||||||
|
|
||||||
|
static boolean processPiwikLogs;
|
||||||
|
static boolean processLaReferenciaLogs;
|
||||||
|
|
||||||
|
static boolean irusProcessStats;
|
||||||
|
|
||||||
|
static boolean sarcProcessStats;
|
||||||
|
|
||||||
|
static boolean finalizeStats;
|
||||||
|
static boolean finalTablesVisibleToImpala;
|
||||||
|
|
||||||
|
static int numberOfDownloadThreads;
|
||||||
|
|
||||||
|
private static final Logger logger = LoggerFactory.getLogger(PiwikStatsDB.class);
|
||||||
|
|
||||||
|
public static void main(String args[]) throws Exception {
|
||||||
|
|
||||||
|
// Sending the logs to the console
|
||||||
|
BasicConfigurator.configure();
|
||||||
|
|
||||||
|
final ArgumentApplicationParser parser = new ArgumentApplicationParser(
|
||||||
|
IOUtils
|
||||||
|
.toString(
|
||||||
|
UsageStatsExporter.class
|
||||||
|
.getResourceAsStream(
|
||||||
|
"/eu/dnetlib/dhp/oa/graph/usagestatsbuild/export/usagestatsbuild_parameters.json")));
|
||||||
|
parser.parseArgument(args);
|
||||||
|
|
||||||
|
// Setting up the initial parameters
|
||||||
|
// matomoAuthToken = parser.get("matomoAuthToken");
|
||||||
|
// matomoBaseURL = parser.get("matomoBaseURL");
|
||||||
|
repoLogPath = parser.get("repoLogPath");
|
||||||
|
portalLogPath = parser.get("portalLogPath");
|
||||||
|
portalMatomoID = parser.get("portalMatomoID");
|
||||||
|
// irusUKBaseURL = parser.get("irusUKBaseURL");
|
||||||
|
irusUKReportPath = parser.get("irusUKReportPath");
|
||||||
|
sarcsReportPathArray = parser.get("sarcsReportPathArray");
|
||||||
|
sarcsReportPathNonArray = parser.get("sarcsReportPathNonArray");
|
||||||
|
lareferenciaLogPath = parser.get("lareferenciaLogPath");
|
||||||
|
// lareferenciaBaseURL = parser.get("lareferenciaBaseURL");
|
||||||
|
// lareferenciaAuthToken = parser.get("lareferenciaAuthToken");
|
||||||
|
|
||||||
|
dbHiveUrl = parser.get("dbHiveUrl");
|
||||||
|
dbImpalaUrl = parser.get("dbImpalaUrl");
|
||||||
|
usageRawDataDBSchema = parser.get("usageRawDataDBSchema");
|
||||||
|
usageStatsDBSchema = parser.get("usageStatsDBSchema");
|
||||||
|
usagestatsPermanentDBSchema = parser.get("usagestatsPermanentDBSchema");
|
||||||
|
statsDBSchema = parser.get("statsDBSchema");
|
||||||
|
|
||||||
|
if (parser.get("processPiwikLogs").toLowerCase().equals("true")) {
|
||||||
|
processPiwikLogs = true;
|
||||||
|
} else {
|
||||||
|
processPiwikLogs = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// String startingLogPeriodStr = parser.get("startingLogPeriod");
|
||||||
|
// Date startingLogPeriodDate = new SimpleDateFormat("MM/yyyy").parse(startingLogPeriodStr);
|
||||||
|
// startingLogPeriod = startingLogPeriodStr(startingLogPeriodDate);
|
||||||
|
//
|
||||||
|
// String endingLogPeriodStr = parser.get("endingLogPeriod");
|
||||||
|
// Date endingLogPeriodDate = new SimpleDateFormat("MM/yyyy").parse(endingLogPeriodStr);
|
||||||
|
// endingLogPeriod = startingLogPeriodStr(endingLogPeriodDate);
|
||||||
|
|
||||||
|
if (parser.get("recreateDbAndTables").toLowerCase().equals("true")) {
|
||||||
|
recreateDbAndTables = true;
|
||||||
|
} else {
|
||||||
|
recreateDbAndTables = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (parser.get("processLaReferenciaLogs").toLowerCase().equals("true")) {
|
||||||
|
processLaReferenciaLogs = true;
|
||||||
|
} else {
|
||||||
|
processLaReferenciaLogs = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (parser.get("irusProcessStats").toLowerCase().equals("true")) {
|
||||||
|
irusProcessStats = true;
|
||||||
|
} else {
|
||||||
|
irusProcessStats = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (parser.get("sarcProcessStats").toLowerCase().equals("true")) {
|
||||||
|
sarcProcessStats = true;
|
||||||
|
} else {
|
||||||
|
sarcProcessStats = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (parser.get("finalizeStats").toLowerCase().equals("true")) {
|
||||||
|
finalizeStats = true;
|
||||||
|
} else {
|
||||||
|
finalizeStats = false;
|
||||||
|
}
|
||||||
|
if (parser.get("finalTablesVisibleToImpala").toLowerCase().equals("true")) {
|
||||||
|
finalTablesVisibleToImpala = true;
|
||||||
|
} else {
|
||||||
|
numberOfDownloadThreads = Integer.parseInt(parser.get("numberOfDownloadThreads"));
|
||||||
|
}
|
||||||
|
|
||||||
|
UsageStatsExporter usagestatsExport = new UsageStatsExporter();
|
||||||
|
usagestatsExport.export();
|
||||||
|
}
|
||||||
|
|
||||||
|
private static Calendar startingLogPeriodStr(Date date) {
|
||||||
|
|
||||||
|
Calendar calendar = Calendar.getInstance();
|
||||||
|
calendar.setTime(date);
|
||||||
|
return calendar;
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,69 @@
|
||||||
|
|
||||||
|
package eu.dnetlib.oa.graph.usagestatsbuild.export;
|
||||||
|
|
||||||
|
import java.io.*;
|
||||||
|
import java.net.URL;
|
||||||
|
import java.net.URLConnection;
|
||||||
|
import java.sql.PreparedStatement;
|
||||||
|
import java.sql.ResultSet;
|
||||||
|
import java.sql.Statement;
|
||||||
|
import java.text.SimpleDateFormat;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.Calendar;
|
||||||
|
import java.util.Date;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
import org.apache.hadoop.conf.Configuration;
|
||||||
|
import org.apache.hadoop.fs.FSDataOutputStream;
|
||||||
|
import org.apache.hadoop.fs.FileSystem;
|
||||||
|
import org.apache.hadoop.fs.Path;
|
||||||
|
import org.json.simple.JSONArray;
|
||||||
|
import org.json.simple.JSONObject;
|
||||||
|
import org.json.simple.parser.JSONParser;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @author D. Pierrakos, S. Zoupanos
|
||||||
|
*/
|
||||||
|
public class IrusStats {
|
||||||
|
|
||||||
|
private String irusUKURL;
|
||||||
|
|
||||||
|
private static final Logger logger = LoggerFactory.getLogger(IrusStats.class);
|
||||||
|
|
||||||
|
public IrusStats() throws Exception {
|
||||||
|
}
|
||||||
|
|
||||||
|
public void processIrusStats() throws Exception {
|
||||||
|
Statement stmt = ConnectDB.getHiveConnection().createStatement();
|
||||||
|
ConnectDB.getHiveConnection().setAutoCommit(false);
|
||||||
|
|
||||||
|
logger.info("Creating irus_downloads_stats_tmp table");
|
||||||
|
String createDownloadsStats = "CREATE TABLE IF NOT EXISTS " + ConnectDB.getUsageStatsDBSchema()
|
||||||
|
+ ".irus_downloads_stats_tmp "
|
||||||
|
+ "(`source` string, "
|
||||||
|
+ "`repository_id` string, "
|
||||||
|
+ "`result_id` string, "
|
||||||
|
+ "`date` string, "
|
||||||
|
+ "`count` bigint, "
|
||||||
|
+ "`openaire` bigint)";
|
||||||
|
stmt.executeUpdate(createDownloadsStats);
|
||||||
|
logger.info("Created irus_downloads_stats_tmp table");
|
||||||
|
|
||||||
|
logger.info("Inserting into irus_downloads_stats_tmp");
|
||||||
|
String insertDStats = "INSERT INTO " + ConnectDB.getUsageStatsDBSchema() + ".irus_downloads_stats_tmp "
|
||||||
|
+ "SELECT s.source, d.id AS repository_id, "
|
||||||
|
+ "ro.id as result_id, CONCAT(YEAR(date), '/', LPAD(MONTH(date), 2, '0')) as date, s.count, '0' "
|
||||||
|
+ "FROM " + ConnectDB.getUsageRawDataDBSchema() + ".sushilog s, "
|
||||||
|
+ ConnectDB.getStatsDBSchema() + ".datasource_oids d, "
|
||||||
|
+ ConnectDB.getStatsDBSchema() + ".result_oids ro "
|
||||||
|
+ "WHERE s.repository=d.oid AND s.rid=ro.oid AND metric_type='ft_total' AND s.source='IRUS-UK'";
|
||||||
|
stmt.executeUpdate(insertDStats);
|
||||||
|
logger.info("Inserted into irus_downloads_stats_tmp");
|
||||||
|
|
||||||
|
stmt.close();
|
||||||
|
// ConnectDB.getHiveConnection().close();
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,145 @@
|
||||||
|
|
||||||
|
package eu.dnetlib.oa.graph.usagestatsbuild.export;
|
||||||
|
|
||||||
|
import java.io.*;
|
||||||
|
import java.net.URLDecoder;
|
||||||
|
import java.sql.Connection;
|
||||||
|
import java.sql.PreparedStatement;
|
||||||
|
import java.sql.SQLException;
|
||||||
|
import java.sql.Statement;
|
||||||
|
import java.sql.Timestamp;
|
||||||
|
import java.text.SimpleDateFormat;
|
||||||
|
import java.util.*;
|
||||||
|
import java.util.regex.Matcher;
|
||||||
|
import java.util.regex.Pattern;
|
||||||
|
|
||||||
|
import org.apache.hadoop.conf.Configuration;
|
||||||
|
import org.apache.hadoop.fs.FileSystem;
|
||||||
|
import org.apache.hadoop.fs.LocatedFileStatus;
|
||||||
|
import org.apache.hadoop.fs.Path;
|
||||||
|
import org.apache.hadoop.fs.RemoteIterator;
|
||||||
|
import org.json.simple.JSONArray;
|
||||||
|
import org.json.simple.JSONObject;
|
||||||
|
import org.json.simple.parser.JSONParser;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @author D. Pierrakos, S. Zoupanos
|
||||||
|
*/
|
||||||
|
public class LaReferenciaStats {
|
||||||
|
|
||||||
|
private static final Logger logger = LoggerFactory.getLogger(LaReferenciaStats.class);
|
||||||
|
|
||||||
|
private String logRepoPath;
|
||||||
|
|
||||||
|
private Statement stmt = null;
|
||||||
|
|
||||||
|
private String CounterRobotsURL;
|
||||||
|
private ArrayList robotsList;
|
||||||
|
|
||||||
|
public LaReferenciaStats() throws Exception {
|
||||||
|
}
|
||||||
|
|
||||||
|
public void processLogs() throws Exception {
|
||||||
|
try {
|
||||||
|
logger.info("LaReferencia creating viewsStats");
|
||||||
|
viewsStats();
|
||||||
|
logger.info("LaReferencia created viewsStats");
|
||||||
|
logger.info("LaReferencia creating downloadsStats");
|
||||||
|
downloadsStats();
|
||||||
|
logger.info("LaReferencia created downloadsStats");
|
||||||
|
|
||||||
|
// logger.info("LaReferencia updating Production Tables");
|
||||||
|
// updateProdTables();
|
||||||
|
// logger.info("LaReferencia updated Production Tables");
|
||||||
|
|
||||||
|
} catch (Exception e) {
|
||||||
|
logger.error("Failed to process logs: " + e);
|
||||||
|
throw new Exception("Failed to process logs: " + e.toString(), e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void viewsStats() throws Exception {
|
||||||
|
|
||||||
|
Statement stmt = ConnectDB.getHiveConnection().createStatement();
|
||||||
|
ConnectDB.getHiveConnection().setAutoCommit(false);
|
||||||
|
|
||||||
|
logger.info("Creating la_result_views_monthly_tmp view");
|
||||||
|
String sql = "CREATE OR REPLACE VIEW " + ConnectDB.getUsageStatsDBSchema() + ".la_result_views_monthly_tmp AS "
|
||||||
|
+
|
||||||
|
"SELECT entity_id AS id, COUNT(entity_id) as views, SUM(CASE WHEN referrer_name LIKE '%openaire%' " +
|
||||||
|
"THEN 1 ELSE 0 END) AS openaire_referrer, " +
|
||||||
|
"CONCAT(YEAR(timestamp), '/', LPAD(MONTH(timestamp), 2, '0')) AS month, source " +
|
||||||
|
"FROM " + ConnectDB.getUsageRawDataDBSchema() + ".lareferencialog where action='action' and " +
|
||||||
|
"(source_item_type='oaItem' or source_item_type='repItem') " +
|
||||||
|
"GROUP BY entity_id, CONCAT(YEAR(timestamp), '/', LPAD(MONTH(timestamp), 2, '0')), " +
|
||||||
|
"source ORDER BY source, entity_id";
|
||||||
|
stmt.executeUpdate(sql);
|
||||||
|
logger.info("Created la_result_views_monthly_tmp view");
|
||||||
|
|
||||||
|
logger.info("Dropping la_views_stats_tmp table");
|
||||||
|
sql = "DROP TABLE IF EXISTS " +
|
||||||
|
ConnectDB.getUsageStatsDBSchema() +
|
||||||
|
".la_views_stats_tmp";
|
||||||
|
stmt.executeUpdate(sql);
|
||||||
|
logger.info("Dropped la_views_stats_tmp table");
|
||||||
|
|
||||||
|
logger.info("Creating la_views_stats_tmp table");
|
||||||
|
sql = "CREATE TABLE IF NOT EXISTS " + ConnectDB.getUsageStatsDBSchema() + ".la_views_stats_tmp " +
|
||||||
|
"AS SELECT 'LaReferencia' as source, d.id as repository_id, ro.id as result_id, month as date, " +
|
||||||
|
"max(views) AS count, max(openaire_referrer) AS openaire " +
|
||||||
|
"FROM " + ConnectDB.getUsageStatsDBSchema() + ".la_result_views_monthly_tmp p, " +
|
||||||
|
ConnectDB.getStatsDBSchema() + ".datasource_oids d, " + ConnectDB.getStatsDBSchema() + ".result_oids ro " +
|
||||||
|
"WHERE p.source=d.oid AND p.id=ro.oid " +
|
||||||
|
"GROUP BY d.id, ro.id, month " +
|
||||||
|
"ORDER BY d.id, ro.id, month";
|
||||||
|
stmt.executeUpdate(sql);
|
||||||
|
logger.info("Created la_views_stats_tmp table");
|
||||||
|
|
||||||
|
stmt.close();
|
||||||
|
// ConnectDB.getHiveConnection().close();
|
||||||
|
}
|
||||||
|
|
||||||
|
private void downloadsStats() throws Exception {
|
||||||
|
|
||||||
|
Statement stmt = ConnectDB.getHiveConnection().createStatement();
|
||||||
|
ConnectDB.getHiveConnection().setAutoCommit(false);
|
||||||
|
|
||||||
|
logger.info("Creating la_result_downloads_monthly_tmp view");
|
||||||
|
String sql = "CREATE OR REPLACE VIEW " + ConnectDB.getUsageStatsDBSchema()
|
||||||
|
+ ".la_result_downloads_monthly_tmp AS " +
|
||||||
|
"SELECT entity_id AS id, COUNT(entity_id) as downloads, SUM(CASE WHEN referrer_name LIKE '%openaire%' " +
|
||||||
|
"THEN 1 ELSE 0 END) AS openaire_referrer, " +
|
||||||
|
"CONCAT(YEAR(timestamp), '/', LPAD(MONTH(timestamp), 2, '0')) AS month, source " +
|
||||||
|
"FROM " + ConnectDB.getUsageRawDataDBSchema() + ".lareferencialog where action='download' and " +
|
||||||
|
"(source_item_type='oaItem' or source_item_type='repItem') " +
|
||||||
|
"GROUP BY entity_id, CONCAT(YEAR(timestamp), '/', LPAD(MONTH(timestamp), 2, '0')), " +
|
||||||
|
"source ORDER BY source, entity_id";
|
||||||
|
stmt.executeUpdate(sql);
|
||||||
|
logger.info("Created la_result_downloads_monthly_tmp view");
|
||||||
|
|
||||||
|
logger.info("Dropping la_downloads_stats_tmp table");
|
||||||
|
sql = "DROP TABLE IF EXISTS " +
|
||||||
|
ConnectDB.getUsageStatsDBSchema() +
|
||||||
|
".la_downloads_stats_tmp";
|
||||||
|
stmt.executeUpdate(sql);
|
||||||
|
logger.info("Dropped la_downloads_stats_tmp table");
|
||||||
|
|
||||||
|
logger.info("Creating la_downloads_stats_tmp table");
|
||||||
|
sql = "CREATE TABLE IF NOT EXISTS " + ConnectDB.getUsageStatsDBSchema() + ".la_downloads_stats_tmp " +
|
||||||
|
"AS SELECT 'LaReferencia' as source, d.id as repository_id, ro.id as result_id, month as date, " +
|
||||||
|
"max(downloads) AS count, max(openaire_referrer) AS openaire " +
|
||||||
|
"FROM " + ConnectDB.getUsageStatsDBSchema() + ".la_result_downloads_monthly_tmp p, " +
|
||||||
|
ConnectDB.getStatsDBSchema() + ".datasource_oids d, " + ConnectDB.getStatsDBSchema() + ".result_oids ro " +
|
||||||
|
"WHERE p.source=d.oid AND p.id=ro.oid " +
|
||||||
|
"GROUP BY d.id, ro.id, month " +
|
||||||
|
"ORDER BY d.id, ro.id, month";
|
||||||
|
stmt.executeUpdate(sql);
|
||||||
|
logger.info("Created la_downloads_stats_tmp table");
|
||||||
|
|
||||||
|
stmt.close();
|
||||||
|
// ConnectDB.getHiveConnection().close();
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,559 @@
|
||||||
|
|
||||||
|
package eu.dnetlib.oa.graph.usagestatsbuild.export;
|
||||||
|
|
||||||
|
import java.sql.Connection;
|
||||||
|
import java.sql.SQLException;
|
||||||
|
import java.sql.Statement;
|
||||||
|
import java.sql.Timestamp;
|
||||||
|
import java.text.SimpleDateFormat;
|
||||||
|
import java.util.*;
|
||||||
|
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @author D. Pierrakos, S. Zoupanos
|
||||||
|
*/
|
||||||
|
public class PiwikStatsDB {
|
||||||
|
|
||||||
|
private String logPath;
|
||||||
|
|
||||||
|
private Statement stmt = null;
|
||||||
|
|
||||||
|
private static final Logger logger = LoggerFactory.getLogger(PiwikStatsDB.class);
|
||||||
|
|
||||||
|
public PiwikStatsDB() throws Exception {
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
public void recreateDBAndTables() throws Exception {
|
||||||
|
this.createDatabase();
|
||||||
|
// The piwiklog table is not needed since it is built
|
||||||
|
// on top of JSON files
|
||||||
|
//////////// this.createTmpTables();
|
||||||
|
}
|
||||||
|
|
||||||
|
private void createDatabase() throws Exception {
|
||||||
|
|
||||||
|
// try {
|
||||||
|
//
|
||||||
|
// stmt = ConnectDB.getHiveConnection().createStatement();
|
||||||
|
//
|
||||||
|
// logger.info("Dropping usagestats DB: " + ConnectDB.getUsageStatsDBSchema());
|
||||||
|
// String dropDatabase = "DROP DATABASE IF EXISTS " + ConnectDB.getUsageStatsDBSchema() + " CASCADE";
|
||||||
|
// stmt.executeUpdate(dropDatabase);
|
||||||
|
// } catch (Exception e) {
|
||||||
|
// logger.error("Failed to drop database: " + e);
|
||||||
|
// throw new Exception("Failed to drop database: " + e.toString(), e);
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
try {
|
||||||
|
stmt = ConnectDB.getHiveConnection().createStatement();
|
||||||
|
logger.info("Creating usagestats DB: " + ConnectDB.getUsageStatsDBSchema());
|
||||||
|
String createDatabase = "CREATE DATABASE IF NOT EXISTS " + ConnectDB.getUsageStatsDBSchema();
|
||||||
|
stmt.executeUpdate(createDatabase);
|
||||||
|
logger.info("Usagestats DB created: " + ConnectDB.getUsageStatsDBSchema());
|
||||||
|
|
||||||
|
} catch (Exception e) {
|
||||||
|
logger.error("Failed to create database: " + e);
|
||||||
|
throw new Exception("Failed to create database: " + e.toString(), e);
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
stmt = ConnectDB.getHiveConnection().createStatement();
|
||||||
|
|
||||||
|
logger.info("Creating permanent usagestats DB: " + ConnectDB.getUsagestatsPermanentDBSchema());
|
||||||
|
String createPermanentDatabase = "CREATE DATABASE IF NOT EXISTS "
|
||||||
|
+ ConnectDB.getUsagestatsPermanentDBSchema();
|
||||||
|
stmt.executeUpdate(createPermanentDatabase);
|
||||||
|
logger.info("Created permanent usagestats DB: " + ConnectDB.getUsagestatsPermanentDBSchema());
|
||||||
|
|
||||||
|
} catch (Exception e) {
|
||||||
|
logger.error("Failed to create database: " + e);
|
||||||
|
throw new Exception("Failed to create database: " + e.toString(), e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void processLogs() throws Exception {
|
||||||
|
try {
|
||||||
|
|
||||||
|
logger.info("ViewsStats processing starts at: " + new Timestamp(System.currentTimeMillis()));
|
||||||
|
viewsStats();
|
||||||
|
logger.info("ViewsStats processing ends at: " + new Timestamp(System.currentTimeMillis()));
|
||||||
|
|
||||||
|
logger.info("DownloadsStats processing starts at: " + new Timestamp(System.currentTimeMillis()));
|
||||||
|
downloadsStats();
|
||||||
|
logger.info("DownloadsStats processing ends at: " + new Timestamp(System.currentTimeMillis()));
|
||||||
|
|
||||||
|
} catch (Exception e) {
|
||||||
|
logger.error("Failed to process logs: " + e);
|
||||||
|
throw new Exception("Failed to process logs: " + e.toString(), e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void viewsStats() throws Exception {
|
||||||
|
Statement stmt = ConnectDB.getHiveConnection().createStatement();
|
||||||
|
ConnectDB.getHiveConnection().setAutoCommit(false);
|
||||||
|
|
||||||
|
logger.info("Dropping openaire_result_views_monthly_tmp view");
|
||||||
|
String drop_result_views_monthly = "DROP VIEW IF EXISTS "
|
||||||
|
+ ConnectDB.getUsageStatsDBSchema()
|
||||||
|
+ ".openaire_piwikresult_views_monthly_tmp";
|
||||||
|
stmt.executeUpdate(drop_result_views_monthly);
|
||||||
|
logger.info("Dropped openaire_result_views_monthly_tmp view");
|
||||||
|
|
||||||
|
logger.info("Creating openaire_result_views_monthly_tmp view");
|
||||||
|
String create_result_views_monthly = "CREATE OR REPLACE VIEW " + ConnectDB.getUsageStatsDBSchema()
|
||||||
|
+ ".openaire_result_views_monthly_tmp "
|
||||||
|
+ "AS SELECT entity_id, "
|
||||||
|
+ "reflect('java.net.URLDecoder', 'decode', entity_id) AS id,"
|
||||||
|
+ "COUNT(entity_id) as views, SUM(CASE WHEN referrer_name LIKE '%openaire%' THEN 1 ELSE 0 END) "
|
||||||
|
+ "AS openaire_referrer, "
|
||||||
|
+ "CONCAT(YEAR(timestamp), '/', LPAD(MONTH(timestamp), 2, '0')) AS month, source "
|
||||||
|
+ "FROM " + ConnectDB.getUsageRawDataDBSchema()
|
||||||
|
+ ".piwiklog where action='action' and (source_item_type='oaItem' or "
|
||||||
|
+ "source_item_type='repItem') "
|
||||||
|
+ "GROUP BY entity_id, CONCAT(YEAR(timestamp), '/', LPAD(MONTH(timestamp), 2, '0')), "
|
||||||
|
+ "source ORDER BY source, entity_id";
|
||||||
|
stmt.executeUpdate(create_result_views_monthly);
|
||||||
|
logger.info("Created openaire_result_views_monthly_tmp table");
|
||||||
|
|
||||||
|
logger.info("Dropping openaire_views_stats_tmp table");
|
||||||
|
String drop_views_stats = "DROP TABLE IF EXISTS "
|
||||||
|
+ ConnectDB.getUsageStatsDBSchema()
|
||||||
|
+ ".openaire_views_stats_tmp";
|
||||||
|
stmt.executeUpdate(drop_views_stats);
|
||||||
|
logger.info("Dropped openaire_views_stats_tmp table");
|
||||||
|
|
||||||
|
logger.info("Creating openaire_views_stats_tmp table");
|
||||||
|
String create_views_stats = "CREATE TABLE IF NOT EXISTS " + ConnectDB.getUsageStatsDBSchema()
|
||||||
|
+ ".openaire_views_stats_tmp "
|
||||||
|
+ "AS SELECT 'OpenAIRE' as source, d.id as repository_id, ro.id as result_id, month as date, "
|
||||||
|
+ "max(views) AS count, max(openaire_referrer) AS openaire "
|
||||||
|
+ "FROM " + ConnectDB.getUsageStatsDBSchema() + ".openaire_result_views_monthly_tmp p, "
|
||||||
|
+ ConnectDB.getStatsDBSchema() + ".datasource d, " + ConnectDB.getStatsDBSchema() + ".result_oids ro "
|
||||||
|
+ "WHERE p.source=d.piwik_id AND p.id=ro.oid AND ro.oid!='200' "
|
||||||
|
+ "GROUP BY d.id, ro.id, month "
|
||||||
|
+ "ORDER BY d.id, ro.id, month ";
|
||||||
|
stmt.executeUpdate(create_views_stats);
|
||||||
|
logger.info("Created openaire_views_stats_tmp table");
|
||||||
|
|
||||||
|
logger.info("Creating openaire_pageviews_stats_tmp table");
|
||||||
|
String create_pageviews_stats = "CREATE TABLE IF NOT EXISTS " + ConnectDB.getUsageStatsDBSchema()
|
||||||
|
+ ".openaire_pageviews_stats_tmp AS SELECT "
|
||||||
|
+ "'OpenAIRE' as source, d.id as repository_id, ro.id as result_id, month as date, max(views) AS count "
|
||||||
|
+ "FROM " + ConnectDB.getUsageStatsDBSchema() + ".openaire_result_views_monthly_tmp p, "
|
||||||
|
+ ConnectDB.getStatsDBSchema() + ".datasource d, " + ConnectDB.getStatsDBSchema() + ".result_oids ro "
|
||||||
|
+ "WHERE p.source=" + ExecuteWorkflow.portalMatomoID
|
||||||
|
+ " AND p.source=d.piwik_id and p.id=ro.id AND ro.oid!='200' "
|
||||||
|
+ "GROUP BY d.id, ro.id, month "
|
||||||
|
+ "ORDER BY d.id, ro.id, month ";
|
||||||
|
stmt.executeUpdate(create_pageviews_stats);
|
||||||
|
logger.info("Created pageviews_stats table");
|
||||||
|
|
||||||
|
stmt.close();
|
||||||
|
// ConnectDB.getHiveConnection().close();
|
||||||
|
}
|
||||||
|
|
||||||
|
private void downloadsStats() throws Exception {
|
||||||
|
Statement stmt = ConnectDB.getHiveConnection().createStatement();
|
||||||
|
ConnectDB.getHiveConnection().setAutoCommit(false);
|
||||||
|
|
||||||
|
logger.info("Dropping openaire_result_downloads_monthly_tmp view");
|
||||||
|
String drop_result_downloads_monthly = "DROP VIEW IF EXISTS "
|
||||||
|
+ ConnectDB.getUsageStatsDBSchema()
|
||||||
|
+ ".openaire_result_downloads_monthly_tmp";
|
||||||
|
stmt.executeUpdate(drop_result_downloads_monthly);
|
||||||
|
logger.info("Dropped openaire_result_downloads_monthly_tmp view");
|
||||||
|
|
||||||
|
logger.info("Creating openaire_result_downloads_monthly_tmp view");
|
||||||
|
String sql = "CREATE OR REPLACE VIEW " + ConnectDB.getUsageStatsDBSchema()
|
||||||
|
+ ".openaire_result_downloads_monthly_tmp "
|
||||||
|
+ "AS SELECT entity_id, "
|
||||||
|
+ "reflect('java.net.URLDecoder', 'decode', entity_id) AS id,"
|
||||||
|
+ "COUNT(entity_id) as downloads, "
|
||||||
|
+ "SUM(CASE WHEN referrer_name LIKE '%openaire%' THEN 1 ELSE 0 END) AS openaire_referrer, "
|
||||||
|
+ "CONCAT(YEAR(timestamp), '/', LPAD(MONTH(timestamp), 2, '0')) AS month, source "
|
||||||
|
+ "FROM " + ConnectDB.getUsageRawDataDBSchema() + ".piwiklog where action='download' "
|
||||||
|
+ "AND (source_item_type='oaItem' OR source_item_type='repItem') "
|
||||||
|
+ "GROUP BY entity_id, CONCAT(YEAR(timestamp), '/', LPAD(MONTH(timestamp), 2, '0')) , source "
|
||||||
|
+ "ORDER BY source, entity_id, month";
|
||||||
|
stmt.executeUpdate(sql);
|
||||||
|
logger.info("Created openaire_result_downloads_monthly_tmp view");
|
||||||
|
|
||||||
|
logger.info("Dropping openaire_downloads_stats_tmp table");
|
||||||
|
String drop_views_stats = "DROP TABLE IF EXISTS "
|
||||||
|
+ ConnectDB.getUsageStatsDBSchema()
|
||||||
|
+ ".openaire_downloads_stats_tmp";
|
||||||
|
stmt.executeUpdate(drop_views_stats);
|
||||||
|
logger.info("Dropped openaire_downloads_stats_tmp table");
|
||||||
|
|
||||||
|
logger.info("Creating openaire_downloads_stats_tmp table");
|
||||||
|
sql = "CREATE TABLE IF NOT EXISTS " + ConnectDB.getUsageStatsDBSchema() + ".openaire_downloads_stats_tmp AS "
|
||||||
|
+ "SELECT 'OpenAIRE' as source, d.id as repository_id, ro.id as result_id, month as date, "
|
||||||
|
+ "max(downloads) AS count, max(openaire_referrer) AS openaire "
|
||||||
|
+ "FROM " + ConnectDB.getUsageStatsDBSchema() + ".openaire_result_downloads_monthly_tmp p, "
|
||||||
|
+ ConnectDB.getStatsDBSchema() + ".datasource d, " + ConnectDB.getStatsDBSchema() + ".result_oids ro "
|
||||||
|
+ "WHERE p.source=d.piwik_id and p.id=ro.oid AND ro.oid!='200' "
|
||||||
|
+ "GROUP BY d.id, ro.id, month "
|
||||||
|
+ "ORDER BY d.id, ro.id, month ";
|
||||||
|
stmt.executeUpdate(sql);
|
||||||
|
logger.info("Created downloads_stats table");
|
||||||
|
|
||||||
|
logger.info("Dropping openaire_result_downloads_monthly_tmp view");
|
||||||
|
sql = "DROP VIEW IF EXISTS " + ConnectDB.getUsageStatsDBSchema() + ".openaire_result_downloads_monthly_tmp";
|
||||||
|
logger.info("Dropped openaire_result_downloads_monthly_tmp view ");
|
||||||
|
stmt.executeUpdate(sql);
|
||||||
|
|
||||||
|
stmt.close();
|
||||||
|
// ConnectDB.getHiveConnection().close();
|
||||||
|
}
|
||||||
|
|
||||||
|
public void uploadOldPedocs() throws Exception {
|
||||||
|
stmt = ConnectDB.getHiveConnection().createStatement();
|
||||||
|
ConnectDB.getHiveConnection().setAutoCommit(false);
|
||||||
|
|
||||||
|
// Dropping Pedocs pedocs_views_stats_tmp table
|
||||||
|
logger.info("Dropping Pedocs pedocs_views_stats_tmp table");
|
||||||
|
String sql = "DROP TABLE IF EXISTS " + ConnectDB.getUsageStatsDBSchema() + ".pedocs_views_stats_tmp";
|
||||||
|
logger.info("Dropped pedocs_views_stats_tmp table ");
|
||||||
|
stmt.executeUpdate(sql);
|
||||||
|
|
||||||
|
// Dropping Pedocs pedocs_downloads_stats table
|
||||||
|
logger.info("Dropping pedocs_downloads_stats table");
|
||||||
|
sql = "DROP TABLE IF EXISTS " + ConnectDB.getUsageStatsDBSchema() + ".pedocs_downloads_stats";
|
||||||
|
logger.info("Dropped pedocs_downloads_stats table ");
|
||||||
|
stmt.executeUpdate(sql);
|
||||||
|
|
||||||
|
// Creating Pedocs pedocs_views_stats_tmp table
|
||||||
|
logger.info("Creating Pedocs pedocs_views_stats_tmp table");
|
||||||
|
sql = "CREATE TABLE IF NOT EXISTS " + ConnectDB.getUsageStatsDBSchema() + ".pedocs_views_stats_tmp AS "
|
||||||
|
+ "SELECT 'OpenAIRE' as source, 'opendoar____::ab1a4d0dd4d48a2ba1077c4494791306' as repository_id,"
|
||||||
|
+ "r.id as result_id,date,counter_abstract as count, 0 as openaire "
|
||||||
|
+ "FROM " + ConnectDB.getUsageRawDataDBSchema() + ".pedocsoldviews p, " + ConnectDB.getStatsDBSchema()
|
||||||
|
+ ".result_oids r where r.oid=p.identifier";
|
||||||
|
stmt.executeUpdate(sql);
|
||||||
|
logger.info("Created pedocs_views_stats_tmp table ");
|
||||||
|
|
||||||
|
// Creating Pedocs pedocs_downloads_stats_tmp table
|
||||||
|
logger.info("Creating Pedocs pedocs_downloads_stats_tmp table");
|
||||||
|
sql = "CREATE TABLE IF NOT EXISTS " + ConnectDB.getUsageStatsDBSchema() + ".pedocs_downloads_stats_tmp AS "
|
||||||
|
+ "SELECT 'OpenAIRE' as source, 'opendoar____::ab1a4d0dd4d48a2ba1077c4494791306' as repository_id,"
|
||||||
|
+ "r.id as result_id, date, counter as count, 0 as openaire "
|
||||||
|
+ "FROM " + ConnectDB.getUsageRawDataDBSchema() + ".pedocsolddownloads p, " + ConnectDB.getStatsDBSchema()
|
||||||
|
+ ".result_oids r where r.oid=p.identifier";
|
||||||
|
stmt.executeUpdate(sql);
|
||||||
|
logger.info("Created pedocs_downloads_stats_tmp table ");
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
public void uploadTUDELFTStats() throws Exception {
|
||||||
|
stmt = ConnectDB.getHiveConnection().createStatement();
|
||||||
|
ConnectDB.getHiveConnection().setAutoCommit(false);
|
||||||
|
|
||||||
|
// Dropping TUDELFT tudelft_result_views_monthly_tmp view
|
||||||
|
logger.info("Dropping TUDELFT tudelft_result_views_monthly_tmp view");
|
||||||
|
String sql = "DROP view IF EXISTS " + ConnectDB.getUsageStatsDBSchema() + ".tudelft_result_views_monthly_tmp";
|
||||||
|
logger.info("Dropped tudelft_result_views_monthly_tmp view ");
|
||||||
|
stmt.executeUpdate(sql);
|
||||||
|
|
||||||
|
// Dropping TUDELFT tudelft_result_views_monthly_tmp view
|
||||||
|
logger.info("Dropping TUDELFT tudelft_result_downloads_monthly_tmp view");
|
||||||
|
sql = "DROP view IF EXISTS " + ConnectDB.getUsageStatsDBSchema() + ".tudelft_result_downloads_monthly_tmp";
|
||||||
|
logger.info("Dropped tudelft_result_downloads_monthly_tmp view ");
|
||||||
|
stmt.executeUpdate(sql);
|
||||||
|
|
||||||
|
// Dropping TUDELFT tudelft_views_stats_tmp table
|
||||||
|
logger.info("Dropping TUDELFT tudelft_views_stats_tmp table");
|
||||||
|
sql = "DROP TABLE IF EXISTS " + ConnectDB.getUsageStatsDBSchema() + ".tudelft_views_stats_tmp";
|
||||||
|
logger.info("Dropped tudelft_views_stats_tmp table ");
|
||||||
|
stmt.executeUpdate(sql);
|
||||||
|
|
||||||
|
// Dropping TUDELFT tudelft_downloads_stats_tmp table
|
||||||
|
logger.info("Dropping TUDELFT tudelft_downloads_stats_tmp table");
|
||||||
|
sql = "DROP TABLE IF EXISTS " + ConnectDB.getUsageStatsDBSchema() + ".tudelft_downloads_stats_tmp";
|
||||||
|
logger.info("Dropped tudelft_downloads_stats_tmp table ");
|
||||||
|
stmt.executeUpdate(sql);
|
||||||
|
|
||||||
|
// Creating TUDELFT tudelft_result_views_monthly_tmp view
|
||||||
|
logger.info("Creating TUDELFT tudelft_result_views_monthly_tmp view");
|
||||||
|
sql = "CREATE OR REPLACE VIEW " + ConnectDB.getUsageStatsDBSchema() + ".tudelft_result_views_monthly_tmp "
|
||||||
|
+ "AS SELECT entity_id, reflect('java.net.URLDecoder', 'decode', entity_id) AS id, "
|
||||||
|
+ "COUNT(entity_id) as views, SUM(CASE WHEN referrer_name LIKE '%openaire%' THEN 1 ELSE 0 END) AS openaire_referrer, "
|
||||||
|
+ "CONCAT(YEAR(timestamp), '/', LPAD(MONTH(timestamp), 2, '0')) AS month, source "
|
||||||
|
+ "FROM " + ConnectDB.getUsageRawDataDBSchema() + ".piwiklog "
|
||||||
|
+ "WHERE action='action' and (source_item_type='oaItem' or source_item_type='repItem') and source=252 "
|
||||||
|
+ "GROUP BY entity_id, CONCAT(YEAR(timestamp), '/', LPAD(MONTH(timestamp), 2, '0')), source ORDER BY source, entity_id";
|
||||||
|
stmt.executeUpdate(sql);
|
||||||
|
logger.info("Created tudelft_result_views_monthly_tmp view ");
|
||||||
|
|
||||||
|
// Creating TUDELFT tudelft_views_stats_tmp table
|
||||||
|
logger.info("Creating TUDELFT tudelft_views_stats_tmp table");
|
||||||
|
sql = "CREATE TABLE IF NOT EXISTS " + ConnectDB.getUsageStatsDBSchema() + ".tudelft_views_stats_tmp AS "
|
||||||
|
+ "SELECT 'OpenAIRE' as source, d.id as repository_id, ro.id as result_id, month as date, "
|
||||||
|
+ "max(views) AS count, max(openaire_referrer) AS openaire FROM " + ConnectDB.getUsageStatsDBSchema()
|
||||||
|
+ ".tudelft_result_views_monthly_tmp p, "
|
||||||
|
+ ConnectDB.getStatsDBSchema() + ".datasource d, " + ConnectDB.getStatsDBSchema() + ".result_oids ro "
|
||||||
|
+ "WHERE concat('tud:',p.id)=ro.oid and d.id='opendoar____::c9892a989183de32e976c6f04e700201' "
|
||||||
|
+ "GROUP BY d.id, ro.id, month ORDER BY d.id, ro.id";
|
||||||
|
stmt.executeUpdate(sql);
|
||||||
|
logger.info("Created TUDELFT tudelft_views_stats_tmp table");
|
||||||
|
|
||||||
|
// Creating TUDELFT tudelft_result_downloads_monthly_tmp view
|
||||||
|
logger.info("Creating TUDELFT tudelft_result_downloads_monthly_tmp view");
|
||||||
|
sql = "CREATE OR REPLACE VIEW " + ConnectDB.getUsageStatsDBSchema() + ".tudelft_result_downloads_monthly_tmp "
|
||||||
|
+ "AS SELECT entity_id, reflect('java.net.URLDecoder', 'decode', entity_id) AS id, "
|
||||||
|
+ "COUNT(entity_id) as views, SUM(CASE WHEN referrer_name LIKE '%openaire%' THEN 1 ELSE 0 END) AS openaire_referrer, "
|
||||||
|
+ "CONCAT(YEAR(timestamp), '/', LPAD(MONTH(timestamp), 2, '0')) AS month, source "
|
||||||
|
+ "FROM " + ConnectDB.getUsageRawDataDBSchema() + ".piwiklog "
|
||||||
|
+ "WHERE action='download' and (source_item_type='oaItem' or source_item_type='repItem') and source=252 "
|
||||||
|
+ "GROUP BY entity_id, CONCAT(YEAR(timestamp), '/', LPAD(MONTH(timestamp), 2, '0')), source ORDER BY source, entity_id";
|
||||||
|
stmt.executeUpdate(sql);
|
||||||
|
logger.info("Created tudelft_result_downloads_monthly_tmp view ");
|
||||||
|
|
||||||
|
// Creating TUDELFT tudelft_downloads_stats_tmp table
|
||||||
|
logger.info("Creating TUDELFT tudelft_downloads_stats_tmp table");
|
||||||
|
sql = "CREATE TABLE IF NOT EXISTS " + ConnectDB.getUsageStatsDBSchema() + ".tudelft_downloads_stats_tmp AS "
|
||||||
|
+ "SELECT 'OpenAIRE' as source, d.id as repository_id, ro.id as result_id, month as date, "
|
||||||
|
+ "max(views) AS count, max(openaire_referrer) AS openaire FROM " + ConnectDB.getUsageStatsDBSchema()
|
||||||
|
+ ".tudelft_result_downloads_monthly_tmp p, "
|
||||||
|
+ ConnectDB.getStatsDBSchema() + ".datasource d, " + ConnectDB.getStatsDBSchema() + ".result_oids ro "
|
||||||
|
+ "WHERE concat('tud:',p.id)=ro.oid and d.id='opendoar____::c9892a989183de32e976c6f04e700201' "
|
||||||
|
+ "GROUP BY d.id, ro.id, month ORDER BY d.id, ro.id";
|
||||||
|
stmt.executeUpdate(sql);
|
||||||
|
logger.info("Created TUDELFT tudelft_downloads_stats_tmp table");
|
||||||
|
|
||||||
|
// Dropping TUDELFT tudelft_result_views_monthly_tmp view
|
||||||
|
logger.info("Dropping TUDELFT tudelft_result_views_monthly_tmp view");
|
||||||
|
sql = "DROP view IF EXISTS " + ConnectDB.getUsageStatsDBSchema() + ".tudelft_result_views_monthly_tmp";
|
||||||
|
logger.info("Dropped tudelft_result_views_monthly_tmp view ");
|
||||||
|
stmt.executeUpdate(sql);
|
||||||
|
|
||||||
|
// Dropping TUDELFT tudelft_result_views_monthly_tmp view
|
||||||
|
logger.info("Dropping TUDELFT tudelft_result_downloads_monthly_tmp view");
|
||||||
|
sql = "DROP view IF EXISTS " + ConnectDB.getUsageStatsDBSchema() + ".tudelft_result_downloads_monthly_tmp";
|
||||||
|
logger.info("Dropped tudelft_result_downloads_monthly_tmp view ");
|
||||||
|
stmt.executeUpdate(sql);
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
public void finalizeStats() throws Exception {
|
||||||
|
stmt = ConnectDB.getHiveConnection().createStatement();
|
||||||
|
ConnectDB.getHiveConnection().setAutoCommit(false);
|
||||||
|
|
||||||
|
// Dropping views_stats table
|
||||||
|
logger.info("Dropping views_stats table");
|
||||||
|
String sql = "DROP TABLE IF EXISTS " + ConnectDB.getUsageStatsDBSchema() + ".views_stats";
|
||||||
|
logger.info("Dropped views_stats table ");
|
||||||
|
stmt.executeUpdate(sql);
|
||||||
|
|
||||||
|
// Dropping downloads_stats table
|
||||||
|
logger.info("Dropping downloads_stats table");
|
||||||
|
sql = "DROP TABLE IF EXISTS " + ConnectDB.getUsageStatsDBSchema() + ".downloads_stats";
|
||||||
|
logger.info("Dropped downloads_stats table ");
|
||||||
|
stmt.executeUpdate(sql);
|
||||||
|
|
||||||
|
// Dropping page_views_stats table
|
||||||
|
logger.info("Dropping pageviews_stats table");
|
||||||
|
sql = "DROP TABLE IF EXISTS " + ConnectDB.getUsageStatsDBSchema() + ".pageviews_stats";
|
||||||
|
logger.info("Dropped pageviews_stats table ");
|
||||||
|
stmt.executeUpdate(sql);
|
||||||
|
|
||||||
|
// Dropping usage_stats table
|
||||||
|
logger.info("Dropping usage_stats table");
|
||||||
|
sql = "DROP TABLE IF EXISTS " + ConnectDB.getUsageStatsDBSchema() + ".usage_stats";
|
||||||
|
logger.info("Dropped usage_stats table ");
|
||||||
|
stmt.executeUpdate(sql);
|
||||||
|
|
||||||
|
// Creating views_stats table
|
||||||
|
logger.info("Creating views_stats table");
|
||||||
|
String createViewsStats = "CREATE TABLE IF NOT EXISTS "
|
||||||
|
+ ConnectDB.getUsageStatsDBSchema()
|
||||||
|
+ ".views_stats "
|
||||||
|
+ "LIKE " + ConnectDB.getUsageStatsDBSchema() + ".openaire_views_stats_tmp STORED AS PARQUET";
|
||||||
|
stmt.executeUpdate(createViewsStats);
|
||||||
|
logger.info("Created views_stats table");
|
||||||
|
|
||||||
|
// Inserting OpenAIRE views stats
|
||||||
|
logger.info("Inserting Openaire data to views_stats");
|
||||||
|
sql = "INSERT INTO " + ConnectDB.getUsageStatsDBSchema() + ".views_stats "
|
||||||
|
+ "SELECT * FROM " + ConnectDB.getUsageStatsDBSchema() + ".openaire_views_stats_tmp";
|
||||||
|
stmt.executeUpdate(sql);
|
||||||
|
logger.info("Openaire views updated to views_stats");
|
||||||
|
|
||||||
|
// Inserting Pedocs old views stats
|
||||||
|
logger.info("Inserting Pedocs old data to views_stats");
|
||||||
|
sql = "INSERT INTO " + ConnectDB.getUsageStatsDBSchema() + ".views_stats "
|
||||||
|
+ "SELECT * FROM " + ConnectDB.getUsageStatsDBSchema() + ".pedocs_views_stats_tmp";
|
||||||
|
stmt.executeUpdate(sql);
|
||||||
|
logger.info("Pedocs views updated to views_stats");
|
||||||
|
|
||||||
|
// Inserting TUDELFT views stats
|
||||||
|
logger.info("Inserting TUDELFT data to views_stats");
|
||||||
|
sql = "INSERT INTO " + ConnectDB.getUsageStatsDBSchema() + ".views_stats "
|
||||||
|
+ "SELECT * FROM " + ConnectDB.getUsageStatsDBSchema() + ".tudelft_views_stats_tmp";
|
||||||
|
stmt.executeUpdate(sql);
|
||||||
|
logger.info("TUDELFT views updated to views_stats");
|
||||||
|
|
||||||
|
// Inserting Lareferencia views stats
|
||||||
|
logger.info("Inserting LaReferencia data to views_stats");
|
||||||
|
sql = "INSERT INTO " + ConnectDB.getUsageStatsDBSchema() + ".views_stats "
|
||||||
|
+ "SELECT * FROM " + ConnectDB.getUsageStatsDBSchema() + ".la_views_stats_tmp";
|
||||||
|
stmt.executeUpdate(sql);
|
||||||
|
logger.info("LaReferencia views updated to views_stats");
|
||||||
|
|
||||||
|
logger.info("Creating downloads_stats table");
|
||||||
|
String createDownloadsStats = "CREATE TABLE IF NOT EXISTS "
|
||||||
|
+ ConnectDB.getUsageStatsDBSchema()
|
||||||
|
+ ".downloads_stats "
|
||||||
|
+ "LIKE " + ConnectDB.getUsageStatsDBSchema() + ".openaire_downloads_stats_tmp STORED AS PARQUET";
|
||||||
|
stmt.executeUpdate(createDownloadsStats);
|
||||||
|
logger.info("Created downloads_stats table");
|
||||||
|
|
||||||
|
// Inserting OpenAIRE downloads stats
|
||||||
|
logger.info("Inserting OpenAIRE data to downloads_stats");
|
||||||
|
sql = "INSERT INTO " + ConnectDB.getUsageStatsDBSchema() + ".downloads_stats "
|
||||||
|
+ "SELECT * FROM " + ConnectDB.getUsageStatsDBSchema() + ".openaire_downloads_stats_tmp";
|
||||||
|
stmt.executeUpdate(sql);
|
||||||
|
logger.info("Inserted OpenAIRE data to downloads_stats");
|
||||||
|
|
||||||
|
// Inserting Pedocs old downloads stats
|
||||||
|
logger.info("Inserting PeDocs old data to downloads_stats");
|
||||||
|
sql = "INSERT INTO " + ConnectDB.getUsageStatsDBSchema() + ".downloads_stats "
|
||||||
|
+ "SELECT * FROM " + ConnectDB.getUsageStatsDBSchema() + ".pedocs_downloads_stats_tmp";
|
||||||
|
stmt.executeUpdate(sql);
|
||||||
|
logger.info("Inserted Pedocs data to downloads_stats");
|
||||||
|
|
||||||
|
// Inserting TUDELFT downloads stats
|
||||||
|
logger.info("Inserting TUDELFT old data to downloads_stats");
|
||||||
|
sql = "INSERT INTO " + ConnectDB.getUsageStatsDBSchema() + ".downloads_stats "
|
||||||
|
+ "SELECT * FROM " + ConnectDB.getUsageStatsDBSchema() + ".tudelft_downloads_stats_tmp";
|
||||||
|
stmt.executeUpdate(sql);
|
||||||
|
logger.info("Inserted TUDELFT data to downloads_stats");
|
||||||
|
|
||||||
|
// Inserting Lareferencia downloads stats
|
||||||
|
logger.info("Inserting LaReferencia data to downloads_stats");
|
||||||
|
sql = "INSERT INTO " + ConnectDB.getUsageStatsDBSchema() + ".downloads_stats "
|
||||||
|
+ "SELECT * FROM " + ConnectDB.getUsageStatsDBSchema() + ".la_downloads_stats_tmp";
|
||||||
|
stmt.executeUpdate(sql);
|
||||||
|
logger.info("Lareferencia downloads updated to downloads_stats");
|
||||||
|
|
||||||
|
// Inserting IRUS downloads stats
|
||||||
|
logger.info("Inserting IRUS data to downloads_stats");
|
||||||
|
sql = "INSERT INTO " + ConnectDB.getUsageStatsDBSchema() + ".downloads_stats "
|
||||||
|
+ "SELECT * FROM " + ConnectDB.getUsageStatsDBSchema() + ".irus_downloads_stats_tmp";
|
||||||
|
stmt.executeUpdate(sql);
|
||||||
|
logger.info("IRUS downloads updated to downloads_stats");
|
||||||
|
|
||||||
|
// Inserting SARC-OJS downloads stats
|
||||||
|
logger.info("Inserting SARC data to downloads_stats");
|
||||||
|
sql = "INSERT INTO " + ConnectDB.getUsageStatsDBSchema() + ".downloads_stats "
|
||||||
|
+ "SELECT * FROM " + ConnectDB.getUsageStatsDBSchema() + ".sarc_downloads_stats_tmp";
|
||||||
|
stmt.executeUpdate(sql);
|
||||||
|
logger.info("SARC-OJS downloads updated to downloads_stats");
|
||||||
|
|
||||||
|
logger.info("Creating pageviews_stats table");
|
||||||
|
String create_pageviews_stats = "CREATE TABLE IF NOT EXISTS " + ConnectDB.getUsageStatsDBSchema()
|
||||||
|
+ ".pageviews_stats "
|
||||||
|
+ "LIKE " + ConnectDB.getUsageStatsDBSchema() + ".openaire_pageviews_stats_tmp STORED AS PARQUET";
|
||||||
|
stmt.executeUpdate(create_pageviews_stats);
|
||||||
|
logger.info("Created pageviews_stats table");
|
||||||
|
|
||||||
|
// Inserting OpenAIRE views stats from Portal
|
||||||
|
logger.info("Inserting data to page_views_stats");
|
||||||
|
sql = "INSERT INTO " + ConnectDB.getUsageStatsDBSchema() + ".pageviews_stats "
|
||||||
|
+ "SELECT * FROM " + ConnectDB.getUsageStatsDBSchema() + ".openaire_pageviews_stats_tmp";
|
||||||
|
stmt.executeUpdate(sql);
|
||||||
|
|
||||||
|
logger.info("Dropping full_dates table");
|
||||||
|
String dropFullDates = "DROP TABLE IF EXISTS "
|
||||||
|
+ ConnectDB.getUsageStatsDBSchema()
|
||||||
|
+ ".full_dates";
|
||||||
|
stmt.executeUpdate(dropFullDates);
|
||||||
|
logger.info("Dropped full_dates table");
|
||||||
|
|
||||||
|
Calendar startCalendar = Calendar.getInstance();
|
||||||
|
startCalendar.setTime(new SimpleDateFormat("yyyy-MM-dd").parse("2016-01-01"));
|
||||||
|
Calendar endCalendar = Calendar.getInstance();
|
||||||
|
int diffYear = endCalendar.get(Calendar.YEAR) - startCalendar.get(Calendar.YEAR);
|
||||||
|
int diffMonth = diffYear * 12 + endCalendar.get(Calendar.MONTH) - startCalendar.get(Calendar.MONTH);
|
||||||
|
|
||||||
|
logger.info("Creating full_dates table");
|
||||||
|
sql = "CREATE TABLE IF NOT EXISTS " + ConnectDB.getUsageStatsDBSchema() + ".full_dates AS "
|
||||||
|
+ "SELECT from_unixtime(unix_timestamp(cast(add_months(from_date,i) AS DATE)), 'yyyy/MM') AS txn_date "
|
||||||
|
+ "FROM (SELECT DATE '2016-01-01' AS from_date) p "
|
||||||
|
+ "LATERAL VIEW "
|
||||||
|
+ "posexplode(split(space(" + diffMonth + "),' ')) pe AS i,x";
|
||||||
|
stmt.executeUpdate(sql);
|
||||||
|
logger.info("Created full_dates table");
|
||||||
|
|
||||||
|
logger.info("Inserting data to usage_stats");
|
||||||
|
sql = "CREATE TABLE IF NOT EXISTS " + ConnectDB.getUsageStatsDBSchema() + ".usage_stats AS "
|
||||||
|
+ "SELECT coalesce(ds.source, vs.source) as source, "
|
||||||
|
+ "coalesce(ds.repository_id, vs.repository_id) as repository_id, "
|
||||||
|
+ "coalesce(ds.result_id, vs.result_id) as result_id, coalesce(ds.date, vs.date) as date, "
|
||||||
|
+ "coalesce(ds.count, 0) as downloads, coalesce(vs.count, 0) as views, "
|
||||||
|
+ "coalesce(ds.openaire, 0) as openaire_downloads, "
|
||||||
|
+ "coalesce(vs.openaire, 0) as openaire_views "
|
||||||
|
+ "FROM " + ConnectDB.getUsageStatsDBSchema() + ".downloads_stats AS ds FULL OUTER JOIN "
|
||||||
|
+ ConnectDB.getUsageStatsDBSchema() + ".views_stats AS vs ON ds.source=vs.source "
|
||||||
|
+ "AND ds.repository_id=vs.repository_id AND ds.result_id=vs.result_id AND ds.date=vs.date";
|
||||||
|
stmt.executeUpdate(sql);
|
||||||
|
logger.info("Inserted data to usage_stats");
|
||||||
|
|
||||||
|
logger.info("Building views at permanent DB starts at: " + new Timestamp(System.currentTimeMillis()));
|
||||||
|
|
||||||
|
logger.info("Dropping view views_stats on permanent usagestats DB");
|
||||||
|
sql = "DROP VIEW IF EXISTS " + ConnectDB.getUsagestatsPermanentDBSchema() + ".views_stats";
|
||||||
|
stmt.executeUpdate(sql);
|
||||||
|
logger.info("Dropped view views_stats on permanent usagestats DB");
|
||||||
|
|
||||||
|
logger.info("Create view views_stats on permanent usagestats DB");
|
||||||
|
sql = "CREATE VIEW IF NOT EXISTS " + ConnectDB.getUsagestatsPermanentDBSchema() + ".views_stats"
|
||||||
|
+ " AS SELECT * FROM " + ConnectDB.getUsageStatsDBSchema() + ".views_stats";
|
||||||
|
stmt.executeUpdate(sql);
|
||||||
|
logger.info("Created view views_stats on permanent usagestats DB");
|
||||||
|
|
||||||
|
logger.info("Dropping view pageviews_stats on permanent usagestats DB");
|
||||||
|
sql = "DROP VIEW IF EXISTS " + ConnectDB.getUsagestatsPermanentDBSchema() + ".pageviews_stats";
|
||||||
|
stmt.executeUpdate(sql);
|
||||||
|
logger.info("Dropped view pageviews_stats on permanent usagestats DB");
|
||||||
|
|
||||||
|
logger.info("Create view pageviews_stats on permanent usagestats DB");
|
||||||
|
sql = "CREATE VIEW IF NOT EXISTS " + ConnectDB.getUsagestatsPermanentDBSchema() + ".pageviews_stats"
|
||||||
|
+ " AS SELECT * FROM " + ConnectDB.getUsageStatsDBSchema() + ".pageviews_stats";
|
||||||
|
stmt.executeUpdate(sql);
|
||||||
|
logger.info("Created view pageviews_stats on permanent usagestats DB");
|
||||||
|
|
||||||
|
logger.info("Dropping view downloads_stats on permanent usagestats DB");
|
||||||
|
sql = "DROP VIEW IF EXISTS " + ConnectDB.getUsagestatsPermanentDBSchema() + ".downloads_stats";
|
||||||
|
stmt.executeUpdate(sql);
|
||||||
|
logger.info("Dropped view on downloads_stats on permanent usagestats DB");
|
||||||
|
|
||||||
|
logger.info("Create view on downloads_stats on permanent usagestats DB");
|
||||||
|
sql = "CREATE VIEW IF NOT EXISTS " + ConnectDB.getUsagestatsPermanentDBSchema() + ".downloads_stats"
|
||||||
|
+ " AS SELECT * FROM " + ConnectDB.getUsageStatsDBSchema() + ".downloads_stats";
|
||||||
|
stmt.executeUpdate(sql);
|
||||||
|
logger.info("Created view on downloads_stats on permanent usagestats DB");
|
||||||
|
|
||||||
|
logger.info("Dropping view usage_stats on permanent usagestats DB");
|
||||||
|
sql = "DROP VIEW IF EXISTS " + ConnectDB.getUsagestatsPermanentDBSchema() + ".usage_stats";
|
||||||
|
stmt.executeUpdate(sql);
|
||||||
|
logger.info("Dropped view on usage_stats on permanent usagestats DB");
|
||||||
|
|
||||||
|
logger.info("Create view on usage_stats on permanent usagestats DB");
|
||||||
|
sql = "CREATE VIEW IF NOT EXISTS " + ConnectDB.getUsagestatsPermanentDBSchema() + ".usage_stats"
|
||||||
|
+ " AS SELECT * FROM " + ConnectDB.getUsageStatsDBSchema() + ".usage_stats";
|
||||||
|
stmt.executeUpdate(sql);
|
||||||
|
logger.info("Created view on usage_stats on permanent usagestats DB");
|
||||||
|
|
||||||
|
logger.info("Building views at permanent DB ends at: " + new Timestamp(System.currentTimeMillis()));
|
||||||
|
|
||||||
|
stmt.close();
|
||||||
|
ConnectDB.getHiveConnection().close();
|
||||||
|
}
|
||||||
|
|
||||||
|
private Connection getConnection() throws SQLException {
|
||||||
|
return ConnectDB.getHiveConnection();
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,107 @@
|
||||||
|
|
||||||
|
package eu.dnetlib.oa.graph.usagestatsbuild.export;
|
||||||
|
|
||||||
|
import java.io.*;
|
||||||
|
// import java.io.BufferedReader;
|
||||||
|
// import java.io.InputStreamReader;
|
||||||
|
import java.net.URL;
|
||||||
|
import java.net.URLConnection;
|
||||||
|
import java.sql.PreparedStatement;
|
||||||
|
import java.sql.ResultSet;
|
||||||
|
import java.sql.SQLException;
|
||||||
|
import java.sql.Statement;
|
||||||
|
import java.text.SimpleDateFormat;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.Calendar;
|
||||||
|
import java.util.Date;
|
||||||
|
import java.util.HashSet;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Set;
|
||||||
|
|
||||||
|
import org.apache.hadoop.conf.Configuration;
|
||||||
|
import org.apache.hadoop.fs.FSDataOutputStream;
|
||||||
|
import org.apache.hadoop.fs.FileSystem;
|
||||||
|
import org.apache.hadoop.fs.Path;
|
||||||
|
import org.json.simple.JSONArray;
|
||||||
|
import org.json.simple.JSONObject;
|
||||||
|
import org.json.simple.parser.JSONParser;
|
||||||
|
import org.json.simple.parser.ParseException;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @author D. Pierrakos, S. Zoupanos
|
||||||
|
*/
|
||||||
|
public class SarcStats {
|
||||||
|
|
||||||
|
private Statement stmtHive = null;
|
||||||
|
private Statement stmtImpala = null;
|
||||||
|
|
||||||
|
private static final Logger logger = LoggerFactory.getLogger(SarcStats.class);
|
||||||
|
|
||||||
|
public SarcStats() throws Exception {
|
||||||
|
// createTables();
|
||||||
|
}
|
||||||
|
|
||||||
|
private void createTables() throws Exception {
|
||||||
|
try {
|
||||||
|
|
||||||
|
stmtHive = ConnectDB.getHiveConnection().createStatement();
|
||||||
|
String sqlCreateTableSushiLog = "CREATE TABLE IF NOT EXISTS sushilog(source TEXT, repository TEXT, rid TEXT, date TEXT, metric_type TEXT, count INT, PRIMARY KEY(source, repository, rid, date, metric_type));";
|
||||||
|
stmtHive.executeUpdate(sqlCreateTableSushiLog);
|
||||||
|
|
||||||
|
// String sqlCopyPublicSushiLog="INSERT INTO sushilog SELECT * FROM public.sushilog;";
|
||||||
|
// stmt.executeUpdate(sqlCopyPublicSushiLog);
|
||||||
|
String sqlcreateRuleSushiLog = "CREATE OR REPLACE RULE ignore_duplicate_inserts AS "
|
||||||
|
+ " ON INSERT TO sushilog "
|
||||||
|
+ " WHERE (EXISTS ( SELECT sushilog.source, sushilog.repository,"
|
||||||
|
+ "sushilog.rid, sushilog.date "
|
||||||
|
+ "FROM sushilog "
|
||||||
|
+ "WHERE sushilog.source = new.source AND sushilog.repository = new.repository AND sushilog.rid = new.rid AND sushilog.date = new.date AND sushilog.metric_type = new.metric_type)) DO INSTEAD NOTHING;";
|
||||||
|
stmtHive.executeUpdate(sqlcreateRuleSushiLog);
|
||||||
|
String createSushiIndex = "create index if not exists sushilog_duplicates on sushilog(source, repository, rid, date, metric_type);";
|
||||||
|
stmtHive.executeUpdate(createSushiIndex);
|
||||||
|
|
||||||
|
stmtHive.close();
|
||||||
|
ConnectDB.getHiveConnection().close();
|
||||||
|
logger.info("Sushi Tables Created");
|
||||||
|
} catch (Exception e) {
|
||||||
|
logger.error("Failed to create tables: " + e);
|
||||||
|
throw new Exception("Failed to create tables: " + e.toString(), e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void processSarc() throws Exception {
|
||||||
|
Statement stmt = ConnectDB.getHiveConnection().createStatement();
|
||||||
|
ConnectDB.getHiveConnection().setAutoCommit(false);
|
||||||
|
|
||||||
|
logger.info("Creating sarc_downloads_stats_tmp table");
|
||||||
|
String createDownloadsStats = "CREATE TABLE IF NOT EXISTS " + ConnectDB.getUsageStatsDBSchema()
|
||||||
|
+ ".sarc_downloads_stats_tmp "
|
||||||
|
+ "(`source` string, "
|
||||||
|
+ "`repository_id` string, "
|
||||||
|
+ "`result_id` string, "
|
||||||
|
+ "`date` string, "
|
||||||
|
+ "`count` bigint, "
|
||||||
|
+ "`openaire` bigint)";
|
||||||
|
stmt.executeUpdate(createDownloadsStats);
|
||||||
|
logger.info("Created sarc_downloads_stats_tmp table");
|
||||||
|
|
||||||
|
logger.info("Inserting into sarc_downloads_stats_tmp");
|
||||||
|
String insertSarcStats = "INSERT INTO " + ConnectDB.getUsageStatsDBSchema() + ".sarc_downloads_stats_tmp "
|
||||||
|
+ "SELECT s.source, d.id AS repository_id, "
|
||||||
|
+ "ro.id as result_id, CONCAT(CAST(YEAR(`date`) AS STRING), '/', "
|
||||||
|
+ "LPAD(CAST(MONTH(`date`) AS STRING), 2, '0')) AS `date`, s.count, '0' "
|
||||||
|
+ "FROM " + ConnectDB.getUsageRawDataDBSchema() + ".sushilog s, "
|
||||||
|
+ ConnectDB.getStatsDBSchema() + ".datasource_oids d, "
|
||||||
|
+ ConnectDB.getStatsDBSchema() + ".result_pids ro "
|
||||||
|
+ "WHERE d.oid LIKE CONCAT('%', s.repository, '%') AND d.id like CONCAT('%', 'sarcservicod', '%') "
|
||||||
|
+ "AND s.rid=ro.pid AND ro.type='Digital Object Identifier' AND s.metric_type='ft_total' AND s.source='SARC-OJS'";
|
||||||
|
stmt.executeUpdate(insertSarcStats);
|
||||||
|
logger.info("Inserted into sarc_downloads_stats_tmp");
|
||||||
|
|
||||||
|
stmt.close();
|
||||||
|
// ConnectDB.getHiveConnection().close();
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,127 @@
|
||||||
|
|
||||||
|
package eu.dnetlib.oa.graph.usagestatsbuild.export;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.sql.SQLException;
|
||||||
|
import java.sql.Statement;
|
||||||
|
|
||||||
|
import org.apache.hadoop.conf.Configuration;
|
||||||
|
import org.apache.hadoop.fs.FileSystem;
|
||||||
|
import org.apache.hadoop.fs.Path;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Main class for downloading and processing Usage statistics
|
||||||
|
*
|
||||||
|
* @author D. Pierrakos, S. Zoupanos
|
||||||
|
*/
|
||||||
|
public class UsageStatsExporter {
|
||||||
|
|
||||||
|
public UsageStatsExporter() {
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
private static final Logger logger = LoggerFactory.getLogger(UsageStatsExporter.class);
|
||||||
|
|
||||||
|
public void export() throws Exception {
|
||||||
|
|
||||||
|
logger.info("Initialising DB properties");
|
||||||
|
ConnectDB.init();
|
||||||
|
|
||||||
|
// runImpalaQuery();
|
||||||
|
PiwikStatsDB piwikstatsdb = new PiwikStatsDB();
|
||||||
|
|
||||||
|
logger.info("Re-creating database and tables");
|
||||||
|
if (ExecuteWorkflow.recreateDbAndTables) {
|
||||||
|
piwikstatsdb.recreateDBAndTables();
|
||||||
|
logger.info("DB-Tables are created ");
|
||||||
|
}
|
||||||
|
// else {
|
||||||
|
// piwikstatsdb.createTmpTables();
|
||||||
|
// logger.info("TmpTables are created ");
|
||||||
|
// }
|
||||||
|
if (ExecuteWorkflow.processPiwikLogs) {
|
||||||
|
logger.info("Processing Piwik logs");
|
||||||
|
piwikstatsdb.processLogs();
|
||||||
|
logger.info("Piwik logs Done");
|
||||||
|
logger.info("Processing Pedocs Old Stats");
|
||||||
|
piwikstatsdb.uploadOldPedocs();
|
||||||
|
logger.info("Processing Pedocs Old Stats Done");
|
||||||
|
logger.info("Processing TUDELFT Stats");
|
||||||
|
piwikstatsdb.uploadTUDELFTStats();
|
||||||
|
logger.info("Processing TUDELFT Stats Done");
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
LaReferenciaStats lastats = new LaReferenciaStats();
|
||||||
|
|
||||||
|
if (ExecuteWorkflow.processLaReferenciaLogs) {
|
||||||
|
logger.info("Processing LaReferencia logs");
|
||||||
|
lastats.processLogs();
|
||||||
|
logger.info("LaReferencia logs done");
|
||||||
|
}
|
||||||
|
|
||||||
|
IrusStats irusstats = new IrusStats();
|
||||||
|
|
||||||
|
if (ExecuteWorkflow.irusProcessStats) {
|
||||||
|
logger.info("Processing IRUS");
|
||||||
|
irusstats.processIrusStats();
|
||||||
|
logger.info("Irus done");
|
||||||
|
}
|
||||||
|
|
||||||
|
SarcStats sarcStats = new SarcStats();
|
||||||
|
|
||||||
|
if (ExecuteWorkflow.sarcProcessStats) {
|
||||||
|
sarcStats.processSarc();
|
||||||
|
}
|
||||||
|
logger.info("Sarc done");
|
||||||
|
|
||||||
|
// finalize usagestats
|
||||||
|
if (ExecuteWorkflow.finalizeStats) {
|
||||||
|
piwikstatsdb.finalizeStats();
|
||||||
|
logger.info("Finalized stats");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Make the tables available to Impala
|
||||||
|
if (ExecuteWorkflow.finalTablesVisibleToImpala) {
|
||||||
|
logger.info("Making tables visible to Impala");
|
||||||
|
invalidateMetadata();
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info("End");
|
||||||
|
}
|
||||||
|
|
||||||
|
private void invalidateMetadata() throws SQLException {
|
||||||
|
Statement stmt = null;
|
||||||
|
|
||||||
|
stmt = ConnectDB.getImpalaConnection().createStatement();
|
||||||
|
|
||||||
|
String sql = "INVALIDATE METADATA " + ConnectDB.getUsageStatsDBSchema() + ".downloads_stats";
|
||||||
|
stmt.executeUpdate(sql);
|
||||||
|
|
||||||
|
sql = "INVALIDATE METADATA " + ConnectDB.getUsageStatsDBSchema() + ".views_stats";
|
||||||
|
stmt.executeUpdate(sql);
|
||||||
|
|
||||||
|
sql = "INVALIDATE METADATA " + ConnectDB.getUsageStatsDBSchema() + ".usage_stats";
|
||||||
|
stmt.executeUpdate(sql);
|
||||||
|
|
||||||
|
sql = "INVALIDATE METADATA " + ConnectDB.getUsageStatsDBSchema() + ".pageviews_stats";
|
||||||
|
stmt.executeUpdate(sql);
|
||||||
|
|
||||||
|
sql = "INVALIDATE METADATA " + ConnectDB.getUsagestatsPermanentDBSchema() + ".downloads_stats";
|
||||||
|
stmt.executeUpdate(sql);
|
||||||
|
|
||||||
|
sql = "INVALIDATE METADATA " + ConnectDB.getUsagestatsPermanentDBSchema() + ".views_stats";
|
||||||
|
stmt.executeUpdate(sql);
|
||||||
|
|
||||||
|
sql = "INVALIDATE METADATA " + ConnectDB.getUsagestatsPermanentDBSchema() + ".usage_stats";
|
||||||
|
stmt.executeUpdate(sql);
|
||||||
|
|
||||||
|
sql = "INVALIDATE METADATA " + ConnectDB.getUsagestatsPermanentDBSchema() + ".pageviews_stats";
|
||||||
|
stmt.executeUpdate(sql);
|
||||||
|
|
||||||
|
stmt.close();
|
||||||
|
ConnectDB.getHiveConnection().close();
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,128 @@
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"paramName": "rlp",
|
||||||
|
"paramLongName": "repoLogPath",
|
||||||
|
"paramDescription": "nameNode of the source cluster",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "plp",
|
||||||
|
"paramLongName": "portalLogPath",
|
||||||
|
"paramDescription": "namoNode of the target cluster",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "pmi",
|
||||||
|
"paramLongName": "portalMatomoID",
|
||||||
|
"paramDescription": "namoNode of the target cluster",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "iukrp",
|
||||||
|
"paramLongName": "irusUKReportPath",
|
||||||
|
"paramDescription": "maximum number of map tasks used in the distcp process",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "srpa",
|
||||||
|
"paramLongName": "sarcsReportPathArray",
|
||||||
|
"paramDescription": "memory for distcp action copying actionsets from remote cluster",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "srpna",
|
||||||
|
"paramLongName": "sarcsReportPathNonArray",
|
||||||
|
"paramDescription": "timeout for distcp copying actions from remote cluster",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "llp",
|
||||||
|
"paramLongName": "lareferenciaLogPath",
|
||||||
|
"paramDescription": "activate tranform-only mode. Only apply transformation step",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "dbhu",
|
||||||
|
"paramLongName": "dbHiveUrl",
|
||||||
|
"paramDescription": "activate tranform-only mode. Only apply transformation step",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "dbiu",
|
||||||
|
"paramLongName": "dbImpalaUrl",
|
||||||
|
"paramDescription": "activate tranform-only mode. Only apply transformation step",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "urdbs",
|
||||||
|
"paramLongName": "usageRawDataDBSchema",
|
||||||
|
"paramDescription": "activate tranform-only mode. Only apply transformation step",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "usdbs",
|
||||||
|
"paramLongName": "usageStatsDBSchema",
|
||||||
|
"paramDescription": "activate tranform-only mode. Only apply transformation step",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "sdbs",
|
||||||
|
"paramLongName": "statsDBSchema",
|
||||||
|
"paramDescription": "activate tranform-only mode. Only apply transformation step",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "uspdbs",
|
||||||
|
"paramLongName": "usagestatsPermanentDBSchema",
|
||||||
|
"paramDescription": "activate tranform-only mode. Only apply transformation step",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "rdbt",
|
||||||
|
"paramLongName": "recreateDbAndTables",
|
||||||
|
"paramDescription": "Re-create database and initial tables?",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "ppwl",
|
||||||
|
"paramLongName": "processPiwikLogs",
|
||||||
|
"paramDescription": "Process the piwiklogs (create & fill in the needed tables and process the data) based on the downloaded data",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "plrl",
|
||||||
|
"paramLongName": "processLaReferenciaLogs",
|
||||||
|
"paramDescription": "Process the La Referencia logs (create & fill in the needed tables and process the data) based on the downloaded data",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "ipr",
|
||||||
|
"paramLongName": "irusProcessStats",
|
||||||
|
"paramDescription": "Irus section: Process stats?",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "ipr",
|
||||||
|
"paramLongName": "sarcProcessStats",
|
||||||
|
"paramDescription": "Sarc section: Process stats?",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "fs",
|
||||||
|
"paramLongName": "finalizeStats",
|
||||||
|
"paramDescription": "Create the usage_stats table?",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "ftvi",
|
||||||
|
"paramLongName": "finalTablesVisibleToImpala",
|
||||||
|
"paramDescription": "Make the usage_stats, views_stats and downloads_stats tables visible to Impala",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "nodt",
|
||||||
|
"paramLongName": "numberOfDownloadThreads",
|
||||||
|
"paramDescription": "Number of download threads",
|
||||||
|
"paramRequired": true
|
||||||
|
}
|
||||||
|
]
|
|
@ -0,0 +1,38 @@
|
||||||
|
<configuration>
|
||||||
|
<property>
|
||||||
|
<name>jobTracker</name>
|
||||||
|
<value>${jobTracker}</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>nameNode</name>
|
||||||
|
<value>${nameNode}</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>oozie.use.system.libpath</name>
|
||||||
|
<value>true</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>oozie.action.sharelib.for.spark</name>
|
||||||
|
<value>spark2</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>hiveMetastoreUris</name>
|
||||||
|
<value>thrift://iis-cdh5-test-m3.ocean.icm.edu.pl:9083</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>hiveJdbcUrl</name>
|
||||||
|
<value>jdbc:hive2://iis-cdh5-test-m3.ocean.icm.edu.pl:10000/;UseNativeQuery=1</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>impalaJdbcUrl</name>
|
||||||
|
<value>jdbc:hive2://iis-cdh5-test-gw.ocean.icm.edu.pl:21050/;auth=noSasl;</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>oozie.wf.workflow.notification.url</name>
|
||||||
|
<value>{serviceUrl}/v1/oozieNotification/jobUpdate?jobId=$jobId%26status=$status</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>oozie.use.system.libpath</name>
|
||||||
|
<value>true</value>
|
||||||
|
</property>
|
||||||
|
</configuration>
|
|
@ -0,0 +1,73 @@
|
||||||
|
<workflow-app name="Usage Graph Stats" xmlns="uri:oozie:workflow:0.5">
|
||||||
|
<parameters>
|
||||||
|
<property>
|
||||||
|
<name>hiveMetastoreUris</name>
|
||||||
|
<description>Hive server metastore URIs</description>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>hiveJdbcUrl</name>
|
||||||
|
<description>Hive server jdbc url</description>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>impalaJdbcUrl</name>
|
||||||
|
<description>Impala server jdbc url</description>
|
||||||
|
</property>
|
||||||
|
</parameters>
|
||||||
|
|
||||||
|
<global>
|
||||||
|
<job-tracker>${jobTracker}</job-tracker>
|
||||||
|
<name-node>${nameNode}</name-node>
|
||||||
|
<configuration>
|
||||||
|
<property>
|
||||||
|
<name>hive.metastore.uris</name>
|
||||||
|
<value>${hiveMetastoreUris}</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>mapreduce.job.queuename</name>
|
||||||
|
<value>${queueName}</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>oozie.launcher.mapred.job.queue.name</name>
|
||||||
|
<value>${oozieLauncherQueueName}</value>
|
||||||
|
</property>
|
||||||
|
</configuration>
|
||||||
|
</global>
|
||||||
|
|
||||||
|
<start to="Step1"/>
|
||||||
|
|
||||||
|
<kill name="Kill">
|
||||||
|
<message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
|
||||||
|
</kill>
|
||||||
|
|
||||||
|
<action name='Step1'>
|
||||||
|
<java>
|
||||||
|
<main-class>eu.dnetlib.oa.graph.usagestatsbuild.export.ExecuteWorkflow</main-class>
|
||||||
|
<arg>--repoLogPath</arg><arg>${repoLogPath}</arg>
|
||||||
|
<arg>--portalLogPath</arg><arg>${portalLogPath}</arg>
|
||||||
|
<arg>--portalMatomoID</arg><arg>${portalMatomoID}</arg>
|
||||||
|
<arg>--irusUKReportPath</arg><arg>${irusUKReportPath}</arg>
|
||||||
|
<arg>--sarcsReportPathArray</arg><arg>${sarcsReportPathArray}</arg>
|
||||||
|
<arg>--sarcsReportPathNonArray</arg><arg>${sarcsReportPathNonArray}</arg>
|
||||||
|
<arg>--lareferenciaLogPath</arg><arg>${lareferenciaLogPath}</arg>
|
||||||
|
<arg>--dbHiveUrl</arg><arg>${hiveJdbcUrl}</arg>
|
||||||
|
<arg>--dbImpalaUrl</arg><arg>${impalaJdbcUrl}</arg>
|
||||||
|
<arg>--usageRawDataDBSchema</arg><arg>${usageRawDataDBSchema}</arg>
|
||||||
|
<arg>--usageStatsDBSchema</arg><arg>${usageStatsDBSchema}</arg>
|
||||||
|
<arg>--usagestatsPermanentDBSchema</arg><arg>${usagestatsPermanentDBSchema}</arg>
|
||||||
|
<arg>--statsDBSchema</arg><arg>${statsDBSchema}</arg>
|
||||||
|
<arg>--recreateDbAndTables</arg><arg>${recreateDbAndTables}</arg>
|
||||||
|
<arg>--processPiwikLogs</arg><arg>${processPiwikLogs}</arg>
|
||||||
|
<arg>--processLaReferenciaLogs</arg><arg>${processLaReferenciaLogs}</arg>
|
||||||
|
<arg>--irusProcessStats</arg><arg>${irusProcessStats}</arg>
|
||||||
|
<arg>--sarcProcessStats</arg><arg>${sarcProcessStats}</arg>
|
||||||
|
<arg>--finalizeStats</arg><arg>${finalizeStats}</arg>
|
||||||
|
<arg>--finalTablesVisibleToImpala</arg><arg>${finalTablesVisibleToImpala}</arg>
|
||||||
|
<arg>--numberOfDownloadThreads</arg><arg>${numberOfDownloadThreads}</arg>
|
||||||
|
<capture-output/>
|
||||||
|
</java>
|
||||||
|
<ok to="End" />
|
||||||
|
<error to="Kill" />
|
||||||
|
</action>
|
||||||
|
|
||||||
|
<end name="End"/>
|
||||||
|
</workflow-app>
|
Loading…
Reference in New Issue