forked from D-Net/dnet-hadoop
mergin with branch beta
This commit is contained in:
commit
8e9493fad9
|
@ -40,6 +40,7 @@ public class Constants {
|
||||||
public static final String SDG_CLASS_NAME = "Sustainable Development Goals";
|
public static final String SDG_CLASS_NAME = "Sustainable Development Goals";
|
||||||
|
|
||||||
public static final String NULL = "NULL";
|
public static final String NULL = "NULL";
|
||||||
|
public static final String NA = "N/A";
|
||||||
|
|
||||||
public static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
|
public static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
|
||||||
|
|
||||||
|
@ -61,10 +62,16 @@ public class Constants {
|
||||||
.map((MapFunction<String, R>) value -> OBJECT_MAPPER.readValue(value, clazz), Encoders.bean(clazz));
|
.map((MapFunction<String, R>) value -> OBJECT_MAPPER.readValue(value, clazz), Encoders.bean(clazz));
|
||||||
}
|
}
|
||||||
|
|
||||||
public static Subject getSubject(String sbj, String classid, String classname,
|
public static Subject getSubject(String sbj, String classid, String classname, String diqualifierclassid,
|
||||||
String diqualifierclassid) {
|
Boolean split) {
|
||||||
if (sbj == null || sbj.equals(NULL))
|
if (sbj == null || sbj.equals(NULL) || sbj.startsWith(NA))
|
||||||
return null;
|
return null;
|
||||||
|
String trust = "";
|
||||||
|
String subject = sbj;
|
||||||
|
if (split) {
|
||||||
|
sbj = subject.split("@@")[0];
|
||||||
|
trust = subject.split("@@")[1];
|
||||||
|
}
|
||||||
Subject s = new Subject();
|
Subject s = new Subject();
|
||||||
s.setValue(sbj);
|
s.setValue(sbj);
|
||||||
s
|
s
|
||||||
|
@ -89,9 +96,14 @@ public class Constants {
|
||||||
UPDATE_CLASS_NAME,
|
UPDATE_CLASS_NAME,
|
||||||
ModelConstants.DNET_PROVENANCE_ACTIONS,
|
ModelConstants.DNET_PROVENANCE_ACTIONS,
|
||||||
ModelConstants.DNET_PROVENANCE_ACTIONS),
|
ModelConstants.DNET_PROVENANCE_ACTIONS),
|
||||||
""));
|
trust));
|
||||||
|
|
||||||
return s;
|
return s;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static Subject getSubject(String sbj, String classid, String classname,
|
||||||
|
String diqualifierclassid) {
|
||||||
|
return getSubject(sbj, classid, classname, diqualifierclassid, false);
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -75,9 +75,12 @@ public class GetFOSSparkJob implements Serializable {
|
||||||
fosData.map((MapFunction<Row, FOSDataModel>) r -> {
|
fosData.map((MapFunction<Row, FOSDataModel>) r -> {
|
||||||
FOSDataModel fosDataModel = new FOSDataModel();
|
FOSDataModel fosDataModel = new FOSDataModel();
|
||||||
fosDataModel.setDoi(r.getString(0).toLowerCase());
|
fosDataModel.setDoi(r.getString(0).toLowerCase());
|
||||||
fosDataModel.setLevel1(r.getString(1));
|
fosDataModel.setLevel1(r.getString(2));
|
||||||
fosDataModel.setLevel2(r.getString(2));
|
fosDataModel.setLevel2(r.getString(3));
|
||||||
fosDataModel.setLevel3(r.getString(3));
|
fosDataModel.setLevel3(r.getString(4));
|
||||||
|
fosDataModel.setLevel4(r.getString(5));
|
||||||
|
fosDataModel.setScoreL3(String.valueOf(r.getDouble(6)));
|
||||||
|
fosDataModel.setScoreL4(String.valueOf(r.getDouble(7)));
|
||||||
return fosDataModel;
|
return fosDataModel;
|
||||||
}, Encoders.bean(FOSDataModel.class))
|
}, Encoders.bean(FOSDataModel.class))
|
||||||
.write()
|
.write()
|
||||||
|
|
|
@ -1,178 +0,0 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.actionmanager.createunresolvedentities;
|
|
||||||
|
|
||||||
import static eu.dnetlib.dhp.actionmanager.Constants.*;
|
|
||||||
import static eu.dnetlib.dhp.actionmanager.Constants.UPDATE_CLASS_NAME;
|
|
||||||
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
|
|
||||||
|
|
||||||
import java.io.Serializable;
|
|
||||||
import java.util.Arrays;
|
|
||||||
import java.util.List;
|
|
||||||
import java.util.Optional;
|
|
||||||
import java.util.stream.Collectors;
|
|
||||||
|
|
||||||
import org.apache.commons.io.IOUtils;
|
|
||||||
import org.apache.spark.SparkConf;
|
|
||||||
import org.apache.spark.api.java.JavaRDD;
|
|
||||||
import org.apache.spark.api.java.JavaSparkContext;
|
|
||||||
import org.apache.spark.api.java.function.MapFunction;
|
|
||||||
import org.apache.spark.sql.Encoders;
|
|
||||||
import org.apache.spark.sql.SaveMode;
|
|
||||||
import org.apache.spark.sql.SparkSession;
|
|
||||||
import org.slf4j.Logger;
|
|
||||||
import org.slf4j.LoggerFactory;
|
|
||||||
|
|
||||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
|
||||||
|
|
||||||
import eu.dnetlib.dhp.actionmanager.bipmodel.BipScore;
|
|
||||||
import eu.dnetlib.dhp.actionmanager.bipmodel.score.deserializers.BipResultModel;
|
|
||||||
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
|
||||||
import eu.dnetlib.dhp.common.HdfsSupport;
|
|
||||||
import eu.dnetlib.dhp.schema.common.ModelConstants;
|
|
||||||
import eu.dnetlib.dhp.schema.oaf.Instance;
|
|
||||||
import eu.dnetlib.dhp.schema.oaf.KeyValue;
|
|
||||||
import eu.dnetlib.dhp.schema.oaf.Measure;
|
|
||||||
import eu.dnetlib.dhp.schema.oaf.Result;
|
|
||||||
import eu.dnetlib.dhp.schema.oaf.utils.CleaningFunctions;
|
|
||||||
import eu.dnetlib.dhp.schema.oaf.utils.OafMapperUtils;
|
|
||||||
import eu.dnetlib.dhp.utils.DHPUtils;
|
|
||||||
|
|
||||||
public class PrepareBipFinder implements Serializable {
|
|
||||||
|
|
||||||
private static final Logger log = LoggerFactory.getLogger(PrepareBipFinder.class);
|
|
||||||
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
|
|
||||||
|
|
||||||
public static void main(String[] args) throws Exception {
|
|
||||||
|
|
||||||
String jsonConfiguration = IOUtils
|
|
||||||
.toString(
|
|
||||||
PrepareBipFinder.class
|
|
||||||
.getResourceAsStream(
|
|
||||||
"/eu/dnetlib/dhp/actionmanager/createunresolvedentities/prepare_parameters.json"));
|
|
||||||
|
|
||||||
final ArgumentApplicationParser parser = new ArgumentApplicationParser(jsonConfiguration);
|
|
||||||
|
|
||||||
parser.parseArgument(args);
|
|
||||||
|
|
||||||
Boolean isSparkSessionManaged = Optional
|
|
||||||
.ofNullable(parser.get("isSparkSessionManaged"))
|
|
||||||
.map(Boolean::valueOf)
|
|
||||||
.orElse(Boolean.TRUE);
|
|
||||||
|
|
||||||
log.info("isSparkSessionManaged: {}", isSparkSessionManaged);
|
|
||||||
|
|
||||||
final String sourcePath = parser.get("sourcePath");
|
|
||||||
log.info("sourcePath {}: ", sourcePath);
|
|
||||||
|
|
||||||
final String outputPath = parser.get("outputPath");
|
|
||||||
log.info("outputPath {}: ", outputPath);
|
|
||||||
|
|
||||||
SparkConf conf = new SparkConf();
|
|
||||||
|
|
||||||
runWithSparkSession(
|
|
||||||
conf,
|
|
||||||
isSparkSessionManaged,
|
|
||||||
spark -> {
|
|
||||||
HdfsSupport.remove(outputPath, spark.sparkContext().hadoopConfiguration());
|
|
||||||
prepareResults(spark, sourcePath, outputPath);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
private static void prepareResults(SparkSession spark, String inputPath, String outputPath) {
|
|
||||||
|
|
||||||
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
|
|
||||||
|
|
||||||
JavaRDD<BipResultModel> bipDeserializeJavaRDD = sc
|
|
||||||
.textFile(inputPath)
|
|
||||||
.map(item -> OBJECT_MAPPER.readValue(item, BipResultModel.class));
|
|
||||||
|
|
||||||
spark
|
|
||||||
.createDataset(bipDeserializeJavaRDD.flatMap(entry -> entry.keySet().stream().map(key -> {
|
|
||||||
BipScore bs = new BipScore();
|
|
||||||
bs.setId(key);
|
|
||||||
bs.setScoreList(entry.get(key));
|
|
||||||
|
|
||||||
return bs;
|
|
||||||
}).collect(Collectors.toList()).iterator()).rdd(), Encoders.bean(BipScore.class))
|
|
||||||
.map((MapFunction<BipScore, Result>) v -> {
|
|
||||||
Result r = new Result();
|
|
||||||
final String cleanedPid = CleaningFunctions.normalizePidValue(DOI, v.getId());
|
|
||||||
|
|
||||||
r.setId(DHPUtils.generateUnresolvedIdentifier(v.getId(), DOI));
|
|
||||||
Instance inst = new Instance();
|
|
||||||
inst.setMeasures(getMeasure(v));
|
|
||||||
|
|
||||||
inst
|
|
||||||
.setPid(
|
|
||||||
Arrays
|
|
||||||
.asList(
|
|
||||||
OafMapperUtils
|
|
||||||
.structuredProperty(
|
|
||||||
cleanedPid,
|
|
||||||
OafMapperUtils
|
|
||||||
.qualifier(
|
|
||||||
DOI, DOI_CLASSNAME,
|
|
||||||
ModelConstants.DNET_PID_TYPES,
|
|
||||||
ModelConstants.DNET_PID_TYPES),
|
|
||||||
null)));
|
|
||||||
r.setInstance(Arrays.asList(inst));
|
|
||||||
r
|
|
||||||
.setDataInfo(
|
|
||||||
OafMapperUtils
|
|
||||||
.dataInfo(
|
|
||||||
false, null, true,
|
|
||||||
false,
|
|
||||||
OafMapperUtils
|
|
||||||
.qualifier(
|
|
||||||
ModelConstants.PROVENANCE_ENRICH,
|
|
||||||
null,
|
|
||||||
ModelConstants.DNET_PROVENANCE_ACTIONS,
|
|
||||||
ModelConstants.DNET_PROVENANCE_ACTIONS),
|
|
||||||
null));
|
|
||||||
return r;
|
|
||||||
}, Encoders.bean(Result.class))
|
|
||||||
.write()
|
|
||||||
.mode(SaveMode.Overwrite)
|
|
||||||
.option("compression", "gzip")
|
|
||||||
.json(outputPath + "/bip");
|
|
||||||
}
|
|
||||||
|
|
||||||
private static List<Measure> getMeasure(BipScore value) {
|
|
||||||
return value
|
|
||||||
.getScoreList()
|
|
||||||
.stream()
|
|
||||||
.map(score -> {
|
|
||||||
Measure m = new Measure();
|
|
||||||
m.setId(score.getId());
|
|
||||||
m
|
|
||||||
.setUnit(
|
|
||||||
score
|
|
||||||
.getUnit()
|
|
||||||
.stream()
|
|
||||||
.map(unit -> {
|
|
||||||
KeyValue kv = new KeyValue();
|
|
||||||
kv.setValue(unit.getValue());
|
|
||||||
kv.setKey(unit.getKey());
|
|
||||||
kv
|
|
||||||
.setDataInfo(
|
|
||||||
OafMapperUtils
|
|
||||||
.dataInfo(
|
|
||||||
false,
|
|
||||||
UPDATE_DATA_INFO_TYPE,
|
|
||||||
true,
|
|
||||||
false,
|
|
||||||
OafMapperUtils
|
|
||||||
.qualifier(
|
|
||||||
UPDATE_MEASURE_BIP_CLASS_ID,
|
|
||||||
UPDATE_CLASS_NAME,
|
|
||||||
ModelConstants.DNET_PROVENANCE_ACTIONS,
|
|
||||||
ModelConstants.DNET_PROVENANCE_ACTIONS),
|
|
||||||
""));
|
|
||||||
return kv;
|
|
||||||
})
|
|
||||||
.collect(Collectors.toList()));
|
|
||||||
return m;
|
|
||||||
})
|
|
||||||
.collect(Collectors.toList());
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -78,12 +78,20 @@ public class PrepareFOSSparkJob implements Serializable {
|
||||||
HashSet<String> level1 = new HashSet<>();
|
HashSet<String> level1 = new HashSet<>();
|
||||||
HashSet<String> level2 = new HashSet<>();
|
HashSet<String> level2 = new HashSet<>();
|
||||||
HashSet<String> level3 = new HashSet<>();
|
HashSet<String> level3 = new HashSet<>();
|
||||||
addLevels(level1, level2, level3, first);
|
HashSet<String> level4 = new HashSet<>();
|
||||||
it.forEachRemaining(v -> addLevels(level1, level2, level3, v));
|
addLevels(level1, level2, level3, level4, first);
|
||||||
|
it.forEachRemaining(v -> addLevels(level1, level2, level3, level4, v));
|
||||||
List<Subject> sbjs = new ArrayList<>();
|
List<Subject> sbjs = new ArrayList<>();
|
||||||
level1.forEach(l -> sbjs.add(getSubject(l, FOS_CLASS_ID, FOS_CLASS_NAME, UPDATE_SUBJECT_FOS_CLASS_ID)));
|
level1
|
||||||
level2.forEach(l -> sbjs.add(getSubject(l, FOS_CLASS_ID, FOS_CLASS_NAME, UPDATE_SUBJECT_FOS_CLASS_ID)));
|
.forEach(l -> add(sbjs, getSubject(l, FOS_CLASS_ID, FOS_CLASS_NAME, UPDATE_SUBJECT_FOS_CLASS_ID)));
|
||||||
level3.forEach(l -> sbjs.add(getSubject(l, FOS_CLASS_ID, FOS_CLASS_NAME, UPDATE_SUBJECT_FOS_CLASS_ID)));
|
level2
|
||||||
|
.forEach(l -> add(sbjs, getSubject(l, FOS_CLASS_ID, FOS_CLASS_NAME, UPDATE_SUBJECT_FOS_CLASS_ID)));
|
||||||
|
level3
|
||||||
|
.forEach(
|
||||||
|
l -> add(sbjs, getSubject(l, FOS_CLASS_ID, FOS_CLASS_NAME, UPDATE_SUBJECT_FOS_CLASS_ID, true)));
|
||||||
|
level4
|
||||||
|
.forEach(
|
||||||
|
l -> add(sbjs, getSubject(l, FOS_CLASS_ID, FOS_CLASS_NAME, UPDATE_SUBJECT_FOS_CLASS_ID, true)));
|
||||||
r.setSubject(sbjs);
|
r.setSubject(sbjs);
|
||||||
r
|
r
|
||||||
.setDataInfo(
|
.setDataInfo(
|
||||||
|
@ -106,11 +114,18 @@ public class PrepareFOSSparkJob implements Serializable {
|
||||||
.json(outputPath + "/fos");
|
.json(outputPath + "/fos");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private static void add(List<Subject> sbsjs, Subject sbj) {
|
||||||
|
if (sbj != null)
|
||||||
|
sbsjs.add(sbj);
|
||||||
|
}
|
||||||
|
|
||||||
private static void addLevels(HashSet<String> level1, HashSet<String> level2, HashSet<String> level3,
|
private static void addLevels(HashSet<String> level1, HashSet<String> level2, HashSet<String> level3,
|
||||||
|
HashSet<String> level4,
|
||||||
FOSDataModel first) {
|
FOSDataModel first) {
|
||||||
level1.add(first.getLevel1());
|
level1.add(first.getLevel1());
|
||||||
level2.add(first.getLevel2());
|
level2.add(first.getLevel2());
|
||||||
level3.add(first.getLevel3());
|
level3.add(first.getLevel3() + "@@" + first.getScoreL3());
|
||||||
|
level4.add(first.getLevel4() + "@@" + first.getScoreL4());
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -69,9 +69,9 @@ public class SparkSaveUnresolved implements Serializable {
|
||||||
.mapGroups((MapGroupsFunction<String, Result, Result>) (k, it) -> {
|
.mapGroups((MapGroupsFunction<String, Result, Result>) (k, it) -> {
|
||||||
Result ret = it.next();
|
Result ret = it.next();
|
||||||
it.forEachRemaining(r -> {
|
it.forEachRemaining(r -> {
|
||||||
if (r.getInstance() != null) {
|
// if (r.getInstance() != null) {
|
||||||
ret.setInstance(r.getInstance());
|
// ret.setInstance(r.getInstance());
|
||||||
}
|
// }
|
||||||
if (r.getSubject() != null) {
|
if (r.getSubject() != null) {
|
||||||
if (ret.getSubject() != null)
|
if (ret.getSubject() != null)
|
||||||
ret.getSubject().addAll(r.getSubject());
|
ret.getSubject().addAll(r.getSubject());
|
||||||
|
|
|
@ -11,21 +11,43 @@ public class FOSDataModel implements Serializable {
|
||||||
private String doi;
|
private String doi;
|
||||||
|
|
||||||
@CsvBindByPosition(position = 1)
|
@CsvBindByPosition(position = 1)
|
||||||
|
// @CsvBindByName(column = "doi")
|
||||||
|
private String oaid;
|
||||||
|
@CsvBindByPosition(position = 2)
|
||||||
// @CsvBindByName(column = "level1")
|
// @CsvBindByName(column = "level1")
|
||||||
private String level1;
|
private String level1;
|
||||||
|
|
||||||
@CsvBindByPosition(position = 2)
|
@CsvBindByPosition(position = 3)
|
||||||
// @CsvBindByName(column = "level2")
|
// @CsvBindByName(column = "level2")
|
||||||
private String level2;
|
private String level2;
|
||||||
|
|
||||||
@CsvBindByPosition(position = 3)
|
@CsvBindByPosition(position = 4)
|
||||||
// @CsvBindByName(column = "level3")
|
// @CsvBindByName(column = "level3")
|
||||||
private String level3;
|
private String level3;
|
||||||
|
|
||||||
|
@CsvBindByPosition(position = 5)
|
||||||
|
// @CsvBindByName(column = "level3")
|
||||||
|
private String level4;
|
||||||
|
@CsvBindByPosition(position = 6)
|
||||||
|
private String scoreL3;
|
||||||
|
@CsvBindByPosition(position = 7)
|
||||||
|
private String scoreL4;
|
||||||
|
|
||||||
public FOSDataModel() {
|
public FOSDataModel() {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public FOSDataModel(String doi, String level1, String level2, String level3, String level4, String l3score,
|
||||||
|
String l4score) {
|
||||||
|
this.doi = doi;
|
||||||
|
this.level1 = level1;
|
||||||
|
this.level2 = level2;
|
||||||
|
this.level3 = level3;
|
||||||
|
this.level4 = level4;
|
||||||
|
this.scoreL3 = l3score;
|
||||||
|
this.scoreL4 = l4score;
|
||||||
|
}
|
||||||
|
|
||||||
public FOSDataModel(String doi, String level1, String level2, String level3) {
|
public FOSDataModel(String doi, String level1, String level2, String level3) {
|
||||||
this.doi = doi;
|
this.doi = doi;
|
||||||
this.level1 = level1;
|
this.level1 = level1;
|
||||||
|
@ -33,8 +55,41 @@ public class FOSDataModel implements Serializable {
|
||||||
this.level3 = level3;
|
this.level3 = level3;
|
||||||
}
|
}
|
||||||
|
|
||||||
public static FOSDataModel newInstance(String d, String level1, String level2, String level3) {
|
public static FOSDataModel newInstance(String d, String level1, String level2, String level3, String level4,
|
||||||
return new FOSDataModel(d, level1, level2, level3);
|
String scorel3, String scorel4) {
|
||||||
|
return new FOSDataModel(d, level1, level2, level3, level4, scorel3, scorel4);
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getOaid() {
|
||||||
|
return oaid;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setOaid(String oaid) {
|
||||||
|
this.oaid = oaid;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getLevel4() {
|
||||||
|
return level4;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setLevel4(String level4) {
|
||||||
|
this.level4 = level4;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getScoreL3() {
|
||||||
|
return scoreL3;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setScoreL3(String scoreL3) {
|
||||||
|
this.scoreL3 = scoreL3;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getScoreL4() {
|
||||||
|
return scoreL4;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setScoreL4(String scoreL4) {
|
||||||
|
this.scoreL4 = scoreL4;
|
||||||
}
|
}
|
||||||
|
|
||||||
public String getDoi() {
|
public String getDoi() {
|
||||||
|
|
|
@ -5,11 +5,6 @@
|
||||||
<name>fosPath</name>
|
<name>fosPath</name>
|
||||||
<description>the input path of the resources to be extended</description>
|
<description>the input path of the resources to be extended</description>
|
||||||
</property>
|
</property>
|
||||||
|
|
||||||
<property>
|
|
||||||
<name>bipScorePath</name>
|
|
||||||
<description>the path where to find the bipFinder scores</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
<property>
|
||||||
<name>outputPath</name>
|
<name>outputPath</name>
|
||||||
<description>the path where to store the actionset</description>
|
<description>the path where to store the actionset</description>
|
||||||
|
@ -77,35 +72,10 @@
|
||||||
|
|
||||||
|
|
||||||
<fork name="prepareInfo">
|
<fork name="prepareInfo">
|
||||||
<path start="prepareBip"/>
|
|
||||||
<path start="getFOS"/>
|
<path start="getFOS"/>
|
||||||
<path start="getSDG"/>
|
<path start="getSDG"/>
|
||||||
</fork>
|
</fork>
|
||||||
|
|
||||||
<action name="prepareBip">
|
|
||||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
||||||
<master>yarn</master>
|
|
||||||
<mode>cluster</mode>
|
|
||||||
<name>Produces the unresolved from BIP! Finder</name>
|
|
||||||
<class>eu.dnetlib.dhp.actionmanager.createunresolvedentities.PrepareBipFinder</class>
|
|
||||||
<jar>dhp-aggregation-${projectVersion}.jar</jar>
|
|
||||||
<spark-opts>
|
|
||||||
--executor-memory=${sparkExecutorMemory}
|
|
||||||
--executor-cores=${sparkExecutorCores}
|
|
||||||
--driver-memory=${sparkDriverMemory}
|
|
||||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
||||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
||||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
||||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
||||||
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
|
||||||
</spark-opts>
|
|
||||||
<arg>--sourcePath</arg><arg>${bipScorePath}</arg>
|
|
||||||
<arg>--outputPath</arg><arg>${workingDir}/prepared</arg>
|
|
||||||
</spark>
|
|
||||||
<ok to="join"/>
|
|
||||||
<error to="Kill"/>
|
|
||||||
</action>
|
|
||||||
|
|
||||||
<action name="getFOS">
|
<action name="getFOS">
|
||||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||||
<master>yarn</master>
|
<master>yarn</master>
|
||||||
|
@ -125,6 +95,7 @@
|
||||||
</spark-opts>
|
</spark-opts>
|
||||||
<arg>--sourcePath</arg><arg>${fosPath}</arg>
|
<arg>--sourcePath</arg><arg>${fosPath}</arg>
|
||||||
<arg>--outputPath</arg><arg>${workingDir}/input/fos</arg>
|
<arg>--outputPath</arg><arg>${workingDir}/input/fos</arg>
|
||||||
|
<arg>--delimiter</arg><arg>${delimiter}</arg>
|
||||||
</spark>
|
</spark>
|
||||||
<ok to="prepareFos"/>
|
<ok to="prepareFos"/>
|
||||||
<error to="Kill"/>
|
<error to="Kill"/>
|
||||||
|
@ -213,7 +184,7 @@
|
||||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||||
<master>yarn</master>
|
<master>yarn</master>
|
||||||
<mode>cluster</mode>
|
<mode>cluster</mode>
|
||||||
<name>Saves the result produced for bip and fos by grouping results with the same id</name>
|
<name>Save the unresolved entities grouping results with the same id</name>
|
||||||
<class>eu.dnetlib.dhp.actionmanager.createunresolvedentities.SparkSaveUnresolved</class>
|
<class>eu.dnetlib.dhp.actionmanager.createunresolvedentities.SparkSaveUnresolved</class>
|
||||||
<jar>dhp-aggregation-${projectVersion}.jar</jar>
|
<jar>dhp-aggregation-${projectVersion}.jar</jar>
|
||||||
<spark-opts>
|
<spark-opts>
|
||||||
|
|
|
@ -13,10 +13,7 @@ import org.apache.spark.SparkConf;
|
||||||
import org.apache.spark.api.java.JavaRDD;
|
import org.apache.spark.api.java.JavaRDD;
|
||||||
import org.apache.spark.api.java.JavaSparkContext;
|
import org.apache.spark.api.java.JavaSparkContext;
|
||||||
import org.apache.spark.sql.SparkSession;
|
import org.apache.spark.sql.SparkSession;
|
||||||
import org.junit.jupiter.api.AfterAll;
|
import org.junit.jupiter.api.*;
|
||||||
import org.junit.jupiter.api.Assertions;
|
|
||||||
import org.junit.jupiter.api.BeforeAll;
|
|
||||||
import org.junit.jupiter.api.Test;
|
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
@ -68,6 +65,7 @@ public class GetFosTest {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
@Disabled
|
||||||
void test3() throws Exception {
|
void test3() throws Exception {
|
||||||
final String sourcePath = getClass()
|
final String sourcePath = getClass()
|
||||||
.getResource("/eu/dnetlib/dhp/actionmanager/createunresolvedentities/fos/fos_sbs.tsv")
|
.getResource("/eu/dnetlib/dhp/actionmanager/createunresolvedentities/fos/fos_sbs.tsv")
|
||||||
|
@ -96,4 +94,37 @@ public class GetFosTest {
|
||||||
tmp.foreach(t -> Assertions.assertTrue(t.getLevel3() != null));
|
tmp.foreach(t -> Assertions.assertTrue(t.getLevel3() != null));
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void test4() throws Exception {
|
||||||
|
final String sourcePath = getClass()
|
||||||
|
.getResource("/eu/dnetlib/dhp/actionmanager/createunresolvedentities/fos/fos_sbs2.csv")
|
||||||
|
.getPath();
|
||||||
|
|
||||||
|
final String outputPath = workingDir.toString() + "/fos.json";
|
||||||
|
GetFOSSparkJob
|
||||||
|
.main(
|
||||||
|
new String[] {
|
||||||
|
"--isSparkSessionManaged", Boolean.FALSE.toString(),
|
||||||
|
"--sourcePath", sourcePath,
|
||||||
|
"--delimiter", ",",
|
||||||
|
"-outputPath", outputPath
|
||||||
|
|
||||||
|
});
|
||||||
|
|
||||||
|
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
|
||||||
|
|
||||||
|
JavaRDD<FOSDataModel> tmp = sc
|
||||||
|
.textFile(outputPath)
|
||||||
|
.map(item -> OBJECT_MAPPER.readValue(item, FOSDataModel.class));
|
||||||
|
|
||||||
|
tmp.foreach(t -> Assertions.assertTrue(t.getDoi() != null));
|
||||||
|
tmp.foreach(t -> Assertions.assertTrue(t.getLevel1() != null));
|
||||||
|
tmp.foreach(t -> Assertions.assertTrue(t.getLevel2() != null));
|
||||||
|
tmp.foreach(t -> Assertions.assertTrue(t.getLevel3() != null));
|
||||||
|
tmp.foreach(t -> Assertions.assertTrue(t.getLevel4() != null));
|
||||||
|
tmp.foreach(t -> Assertions.assertTrue(t.getScoreL3() != null));
|
||||||
|
tmp.foreach(t -> Assertions.assertTrue(t.getScoreL4() != null));
|
||||||
|
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -67,92 +67,6 @@ public class PrepareTest {
|
||||||
spark.stop();
|
spark.stop();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
|
||||||
void bipPrepareTest() throws Exception {
|
|
||||||
final String sourcePath = getClass()
|
|
||||||
.getResource("/eu/dnetlib/dhp/actionmanager/createunresolvedentities/bip/bip.json")
|
|
||||||
.getPath();
|
|
||||||
|
|
||||||
PrepareBipFinder
|
|
||||||
.main(
|
|
||||||
new String[] {
|
|
||||||
"--isSparkSessionManaged", Boolean.FALSE.toString(),
|
|
||||||
"--sourcePath", sourcePath,
|
|
||||||
"--outputPath", workingDir.toString() + "/work"
|
|
||||||
|
|
||||||
});
|
|
||||||
|
|
||||||
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
|
|
||||||
|
|
||||||
JavaRDD<Result> tmp = sc
|
|
||||||
.textFile(workingDir.toString() + "/work/bip")
|
|
||||||
.map(item -> OBJECT_MAPPER.readValue(item, Result.class));
|
|
||||||
|
|
||||||
Assertions.assertEquals(86, tmp.count());
|
|
||||||
|
|
||||||
String doi1 = "unresolved::10.0000/096020199389707::doi";
|
|
||||||
|
|
||||||
Assertions.assertEquals(1, tmp.filter(r -> r.getId().equals(doi1)).count());
|
|
||||||
Assertions.assertEquals(1, tmp.filter(r -> r.getId().equals(doi1)).collect().get(0).getInstance().size());
|
|
||||||
Assertions
|
|
||||||
.assertEquals(
|
|
||||||
3, tmp.filter(r -> r.getId().equals(doi1)).collect().get(0).getInstance().get(0).getMeasures().size());
|
|
||||||
Assertions
|
|
||||||
.assertEquals(
|
|
||||||
"6.34596412687e-09", tmp
|
|
||||||
.filter(r -> r.getId().equals(doi1))
|
|
||||||
.collect()
|
|
||||||
.get(0)
|
|
||||||
.getInstance()
|
|
||||||
.get(0)
|
|
||||||
.getMeasures()
|
|
||||||
.stream()
|
|
||||||
.filter(sl -> sl.getId().equals("influence"))
|
|
||||||
.collect(Collectors.toList())
|
|
||||||
.get(0)
|
|
||||||
.getUnit()
|
|
||||||
.get(0)
|
|
||||||
.getValue());
|
|
||||||
Assertions
|
|
||||||
.assertEquals(
|
|
||||||
"0.641151896994", tmp
|
|
||||||
.filter(r -> r.getId().equals(doi1))
|
|
||||||
.collect()
|
|
||||||
.get(0)
|
|
||||||
.getInstance()
|
|
||||||
.get(0)
|
|
||||||
.getMeasures()
|
|
||||||
.stream()
|
|
||||||
.filter(sl -> sl.getId().equals("popularity_alt"))
|
|
||||||
.collect(Collectors.toList())
|
|
||||||
.get(0)
|
|
||||||
.getUnit()
|
|
||||||
.get(0)
|
|
||||||
.getValue());
|
|
||||||
Assertions
|
|
||||||
.assertEquals(
|
|
||||||
"2.33375102921e-09", tmp
|
|
||||||
.filter(r -> r.getId().equals(doi1))
|
|
||||||
.collect()
|
|
||||||
.get(0)
|
|
||||||
.getInstance()
|
|
||||||
.get(0)
|
|
||||||
.getMeasures()
|
|
||||||
.stream()
|
|
||||||
.filter(sl -> sl.getId().equals("popularity"))
|
|
||||||
.collect(Collectors.toList())
|
|
||||||
.get(0)
|
|
||||||
.getUnit()
|
|
||||||
.get(0)
|
|
||||||
.getValue());
|
|
||||||
|
|
||||||
final String doi2 = "unresolved::10.3390/s18072310::doi";
|
|
||||||
|
|
||||||
Assertions.assertEquals(1, tmp.filter(r -> r.getId().equals(doi2)).count());
|
|
||||||
Assertions.assertEquals(1, tmp.filter(r -> r.getId().equals(doi2)).collect().get(0).getInstance().size());
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
void fosPrepareTest() throws Exception {
|
void fosPrepareTest() throws Exception {
|
||||||
final String sourcePath = getClass()
|
final String sourcePath = getClass()
|
||||||
|
@ -222,6 +136,76 @@ public class PrepareTest {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void fosPrepareTest2() throws Exception {
|
||||||
|
final String sourcePath = getClass()
|
||||||
|
.getResource("/eu/dnetlib/dhp/actionmanager/createunresolvedentities/fos/fos_sbs_2.json")
|
||||||
|
.getPath();
|
||||||
|
|
||||||
|
PrepareFOSSparkJob
|
||||||
|
.main(
|
||||||
|
new String[] {
|
||||||
|
"--isSparkSessionManaged", Boolean.FALSE.toString(),
|
||||||
|
"--sourcePath", sourcePath,
|
||||||
|
|
||||||
|
"-outputPath", workingDir.toString() + "/work"
|
||||||
|
|
||||||
|
});
|
||||||
|
|
||||||
|
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
|
||||||
|
|
||||||
|
JavaRDD<Result> tmp = sc
|
||||||
|
.textFile(workingDir.toString() + "/work/fos")
|
||||||
|
.map(item -> OBJECT_MAPPER.readValue(item, Result.class));
|
||||||
|
|
||||||
|
String doi1 = "unresolved::10.1016/j.revmed.2006.07.012::doi";
|
||||||
|
|
||||||
|
assertEquals(13, tmp.count());
|
||||||
|
assertEquals(1, tmp.filter(row -> row.getId().equals(doi1)).count());
|
||||||
|
|
||||||
|
Result result = tmp
|
||||||
|
.filter(r -> r.getId().equals(doi1))
|
||||||
|
.first();
|
||||||
|
|
||||||
|
result.getSubject().forEach(s -> System.out.println(s.getValue() + " trust = " + s.getDataInfo().getTrust()));
|
||||||
|
Assertions.assertEquals(6, result.getSubject().size());
|
||||||
|
|
||||||
|
assertTrue(
|
||||||
|
result
|
||||||
|
.getSubject()
|
||||||
|
.stream()
|
||||||
|
.anyMatch(
|
||||||
|
s -> s.getValue().contains("03 medical and health sciences")
|
||||||
|
&& s.getDataInfo().getTrust().equals("")));
|
||||||
|
|
||||||
|
assertTrue(
|
||||||
|
result
|
||||||
|
.getSubject()
|
||||||
|
.stream()
|
||||||
|
.anyMatch(
|
||||||
|
s -> s.getValue().contains("0302 clinical medicine") && s.getDataInfo().getTrust().equals("")));
|
||||||
|
|
||||||
|
assertTrue(
|
||||||
|
result
|
||||||
|
.getSubject()
|
||||||
|
.stream()
|
||||||
|
.anyMatch(
|
||||||
|
s -> s
|
||||||
|
.getValue()
|
||||||
|
.contains("030204 cardiovascular system & hematology")
|
||||||
|
&& s.getDataInfo().getTrust().equals("0.5101401805877686")));
|
||||||
|
assertTrue(
|
||||||
|
result
|
||||||
|
.getSubject()
|
||||||
|
.stream()
|
||||||
|
.anyMatch(
|
||||||
|
s -> s
|
||||||
|
.getValue()
|
||||||
|
.contains("03020409 Hematology/Coagulopathies")
|
||||||
|
&& s.getDataInfo().getTrust().equals("0.0546871414174914")));
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
void sdgPrepareTest() throws Exception {
|
void sdgPrepareTest() throws Exception {
|
||||||
final String sourcePath = getClass()
|
final String sourcePath = getClass()
|
||||||
|
@ -268,57 +252,4 @@ public class PrepareTest {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// @Test
|
|
||||||
// void test3() throws Exception {
|
|
||||||
// final String sourcePath = "/Users/miriam.baglioni/Downloads/doi_fos_results_20_12_2021.csv.gz";
|
|
||||||
//
|
|
||||||
// final String outputPath = workingDir.toString() + "/fos.json";
|
|
||||||
// GetFOSSparkJob
|
|
||||||
// .main(
|
|
||||||
// new String[] {
|
|
||||||
// "--isSparkSessionManaged", Boolean.FALSE.toString(),
|
|
||||||
// "--sourcePath", sourcePath,
|
|
||||||
//
|
|
||||||
// "-outputPath", outputPath
|
|
||||||
//
|
|
||||||
// });
|
|
||||||
//
|
|
||||||
// final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
|
|
||||||
//
|
|
||||||
// JavaRDD<FOSDataModel> tmp = sc
|
|
||||||
// .textFile(outputPath)
|
|
||||||
// .map(item -> OBJECT_MAPPER.readValue(item, FOSDataModel.class));
|
|
||||||
//
|
|
||||||
// tmp.foreach(t -> Assertions.assertTrue(t.getDoi() != null));
|
|
||||||
// tmp.foreach(t -> Assertions.assertTrue(t.getLevel1() != null));
|
|
||||||
// tmp.foreach(t -> Assertions.assertTrue(t.getLevel2() != null));
|
|
||||||
// tmp.foreach(t -> Assertions.assertTrue(t.getLevel3() != null));
|
|
||||||
//
|
|
||||||
// }
|
|
||||||
//
|
|
||||||
// @Test
|
|
||||||
// void test4() throws Exception {
|
|
||||||
// final String sourcePath = "/Users/miriam.baglioni/Downloads/doi_sdg_results_20_12_21.csv.gz";
|
|
||||||
//
|
|
||||||
// final String outputPath = workingDir.toString() + "/sdg.json";
|
|
||||||
// GetSDGSparkJob
|
|
||||||
// .main(
|
|
||||||
// new String[] {
|
|
||||||
// "--isSparkSessionManaged", Boolean.FALSE.toString(),
|
|
||||||
// "--sourcePath", sourcePath,
|
|
||||||
//
|
|
||||||
// "-outputPath", outputPath
|
|
||||||
//
|
|
||||||
// });
|
|
||||||
//
|
|
||||||
// final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
|
|
||||||
//
|
|
||||||
// JavaRDD<SDGDataModel> tmp = sc
|
|
||||||
// .textFile(outputPath)
|
|
||||||
// .map(item -> OBJECT_MAPPER.readValue(item, SDGDataModel.class));
|
|
||||||
//
|
|
||||||
// tmp.foreach(t -> Assertions.assertTrue(t.getDoi() != null));
|
|
||||||
// tmp.foreach(t -> Assertions.assertTrue(t.getSbj() != null));
|
|
||||||
//
|
|
||||||
// }
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -340,18 +340,7 @@ public class ProduceTest {
|
||||||
}
|
}
|
||||||
|
|
||||||
private JavaRDD<Result> getResultJavaRDD() throws Exception {
|
private JavaRDD<Result> getResultJavaRDD() throws Exception {
|
||||||
final String bipPath = getClass()
|
|
||||||
.getResource("/eu/dnetlib/dhp/actionmanager/createunresolvedentities/bip/bip.json")
|
|
||||||
.getPath();
|
|
||||||
|
|
||||||
PrepareBipFinder
|
|
||||||
.main(
|
|
||||||
new String[] {
|
|
||||||
"--isSparkSessionManaged", Boolean.FALSE.toString(),
|
|
||||||
"--sourcePath", bipPath,
|
|
||||||
"--outputPath", workingDir.toString() + "/work"
|
|
||||||
|
|
||||||
});
|
|
||||||
final String fosPath = getClass()
|
final String fosPath = getClass()
|
||||||
.getResource("/eu/dnetlib/dhp/actionmanager/createunresolvedentities/fos/fos.json")
|
.getResource("/eu/dnetlib/dhp/actionmanager/createunresolvedentities/fos/fos.json")
|
||||||
.getPath();
|
.getPath();
|
||||||
|
@ -379,6 +368,40 @@ public class ProduceTest {
|
||||||
.map(item -> OBJECT_MAPPER.readValue(item, Result.class));
|
.map(item -> OBJECT_MAPPER.readValue(item, Result.class));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public JavaRDD<Result> getResultFosJavaRDD() throws Exception {
|
||||||
|
|
||||||
|
final String fosPath = getClass()
|
||||||
|
.getResource("/eu/dnetlib/dhp/actionmanager/createunresolvedentities/fos/fos_sbs_2.json")
|
||||||
|
.getPath();
|
||||||
|
|
||||||
|
PrepareFOSSparkJob
|
||||||
|
.main(
|
||||||
|
new String[] {
|
||||||
|
"--isSparkSessionManaged", Boolean.FALSE.toString(),
|
||||||
|
"--sourcePath", fosPath,
|
||||||
|
"-outputPath", workingDir.toString() + "/work"
|
||||||
|
});
|
||||||
|
|
||||||
|
SparkSaveUnresolved.main(new String[] {
|
||||||
|
"--isSparkSessionManaged", Boolean.FALSE.toString(),
|
||||||
|
"--sourcePath", workingDir.toString() + "/work",
|
||||||
|
|
||||||
|
"-outputPath", workingDir.toString() + "/unresolved"
|
||||||
|
|
||||||
|
});
|
||||||
|
|
||||||
|
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
|
||||||
|
|
||||||
|
JavaRDD<Result> tmp = sc
|
||||||
|
.textFile(workingDir.toString() + "/unresolved")
|
||||||
|
.map(item -> OBJECT_MAPPER.readValue(item, Result.class));
|
||||||
|
tmp.foreach(r -> System.out.println(new ObjectMapper().writeValueAsString(r)));
|
||||||
|
|
||||||
|
return tmp;
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
void prepareTest5Subjects() throws Exception {
|
void prepareTest5Subjects() throws Exception {
|
||||||
final String doi = "unresolved::10.1063/5.0032658::doi";
|
final String doi = "unresolved::10.1063/5.0032658::doi";
|
||||||
|
@ -415,18 +438,7 @@ public class ProduceTest {
|
||||||
}
|
}
|
||||||
|
|
||||||
private JavaRDD<Result> getResultJavaRDDPlusSDG() throws Exception {
|
private JavaRDD<Result> getResultJavaRDDPlusSDG() throws Exception {
|
||||||
final String bipPath = getClass()
|
|
||||||
.getResource("/eu/dnetlib/dhp/actionmanager/createunresolvedentities/bip/bip.json")
|
|
||||||
.getPath();
|
|
||||||
|
|
||||||
PrepareBipFinder
|
|
||||||
.main(
|
|
||||||
new String[] {
|
|
||||||
"--isSparkSessionManaged", Boolean.FALSE.toString(),
|
|
||||||
"--sourcePath", bipPath,
|
|
||||||
"--outputPath", workingDir.toString() + "/work"
|
|
||||||
|
|
||||||
});
|
|
||||||
final String fosPath = getClass()
|
final String fosPath = getClass()
|
||||||
.getResource("/eu/dnetlib/dhp/actionmanager/createunresolvedentities/fos/fos.json")
|
.getResource("/eu/dnetlib/dhp/actionmanager/createunresolvedentities/fos/fos.json")
|
||||||
.getPath();
|
.getPath();
|
||||||
|
@ -483,14 +495,6 @@ public class ProduceTest {
|
||||||
.filter(row -> row.getSubject() != null)
|
.filter(row -> row.getSubject() != null)
|
||||||
.count());
|
.count());
|
||||||
|
|
||||||
Assertions
|
|
||||||
.assertEquals(
|
|
||||||
85,
|
|
||||||
tmp
|
|
||||||
.filter(row -> !row.getId().equals(doi))
|
|
||||||
.filter(r -> r.getInstance() != null && r.getInstance().size() > 0)
|
|
||||||
.count());
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
|
|
@ -0,0 +1,26 @@
|
||||||
|
DOI,OAID,level1,level2,level3,level4,score_for_L3,score_for_L4
|
||||||
|
10.1016/j.anucene.2006.02.004,doi_________::00059d9963edf633bec756fb21b5bd72,02 engineering and technology,"0202 electrical engineering, electronic engineering, information engineering",020209 energy,02020908 Climate change policy/Ethanol fuel,0.5,0.5
|
||||||
|
10.1016/j.anucene.2006.02.004,doi_________::00059d9963edf633bec756fb21b5bd72,02 engineering and technology,0211 other engineering and technologies,021108 energy,02110808 Climate change policy/Ethanol fuel,0.5,0.5
|
||||||
|
10.1016/j.revmed.2006.07.010,doi_________::0026476c1651a92c933d752ff12496c7,03 medical and health sciences,0302 clinical medicine,030220 oncology & carcinogenesis,N/A,0.5036656856536865,0.0
|
||||||
|
10.1016/j.revmed.2006.07.010,doi_________::0026476c1651a92c933d752ff12496c7,03 medical and health sciences,0302 clinical medicine,030212 general & internal medicine,N/A,0.4963343143463135,0.0
|
||||||
|
10.20965/jrm.2006.p0312,doi_________::0028336a2f3826cc83c47dbefac71543,02 engineering and technology,0209 industrial biotechnology,020901 industrial engineering & automation,02090104 Robotics/Robots,0.6111094951629639,0.5053805979936855
|
||||||
|
10.20965/jrm.2006.p0312,doi_________::0028336a2f3826cc83c47dbefac71543,01 natural sciences,0104 chemical sciences,010401 analytical chemistry,N/A,0.3888905048370361,0.0
|
||||||
|
10.1111/j.1747-7379.2006.040_1.x,doi_________::002c7077e7c114a8304eb90f59e45fa4,05 social sciences,0506 political science,050602 political science & public administration,05060202 Ethnic groups/Ethnicity,0.6159052848815918,0.7369035568037298
|
||||||
|
10.1111/j.1747-7379.2006.040_1.x,doi_________::002c7077e7c114a8304eb90f59e45fa4,05 social sciences,0502 economics and business,050207 economics,N/A,0.3840946555137634,0.0
|
||||||
|
10.1007/s10512-006-0049-9,doi_________::003f29f9254819cf4c78558b1bc25f10,02 engineering and technology,"0202 electrical engineering, electronic engineering, information engineering",020209 energy,02020908 Climate change policy/Ethanol fuel,0.5,0.5
|
||||||
|
10.1007/s10512-006-0049-9,doi_________::003f29f9254819cf4c78558b1bc25f10,02 engineering and technology,0211 other engineering and technologies,021108 energy,02110808 Climate change policy/Ethanol fuel,0.5,0.5
|
||||||
|
10.1111/j.1365-2621.2005.01045.x,doi_________::00419355b4c3e0646bd0e1b301164c8e,04 agricultural and veterinary sciences,0404 agricultural biotechnology,040401 food science,04040102 Food science/Food industry,0.5,0.5
|
||||||
|
10.1111/j.1365-2621.2005.01045.x,doi_________::00419355b4c3e0646bd0e1b301164c8e,04 agricultural and veterinary sciences,0405 other agricultural sciences,040502 food science,04050202 Food science/Food industry,0.5,0.5
|
||||||
|
10.1002/chin.200617262,doi_________::004c8cef80668904961b9e62841793c8,01 natural sciences,0104 chemical sciences,010405 organic chemistry,01040508 Functional groups/Ethers,0.5566747188568115,0.5582916736602783
|
||||||
|
10.1002/chin.200617262,doi_________::004c8cef80668904961b9e62841793c8,01 natural sciences,0104 chemical sciences,010402 general chemistry,01040207 Chemical synthesis/Total synthesis,0.4433253407478332,0.4417082965373993
|
||||||
|
10.1016/j.revmed.2006.07.012,doi_________::005b1d0fb650b680abaf6cfe26a21604,03 medical and health sciences,0302 clinical medicine,030204 cardiovascular system & hematology,03020409 Hematology/Coagulopathies,0.5101401805877686,0.0546871414174914
|
||||||
|
10.1016/j.revmed.2006.07.012,doi_________::005b1d0fb650b680abaf6cfe26a21604,03 medical and health sciences,0301 basic medicine,030105 genetics & heredity,N/A,0.4898599088191986,0.0
|
||||||
|
10.4109/jslab.17.132,doi_________::00889baa06de363e37930daaf8e800c0,03 medical and health sciences,0301 basic medicine,030104 developmental biology,N/A,0.5,0.0
|
||||||
|
10.4109/jslab.17.132,doi_________::00889baa06de363e37930daaf8e800c0,03 medical and health sciences,0303 health sciences,030304 developmental biology,N/A,0.5,0.0
|
||||||
|
10.1108/00251740610715687,doi_________::0092cb1b1920d556719385a26363ecaa,05 social sciences,0502 economics and business,050203 business & management,05020311 International business/International trade,0.605047881603241,0.2156608108845153
|
||||||
|
10.1108/00251740610715687,doi_________::0092cb1b1920d556719385a26363ecaa,05 social sciences,0502 economics and business,050211 marketing,N/A,0.394952118396759,0.0
|
||||||
|
10.1080/03067310500248098,doi_________::00a76678d230e3f20b6356804448028f,04 agricultural and veterinary sciences,0404 agricultural biotechnology,040401 food science,04040102 Food science/Food industry,0.5,0.5
|
||||||
|
10.1080/03067310500248098,doi_________::00a76678d230e3f20b6356804448028f,04 agricultural and veterinary sciences,0405 other agricultural sciences,040502 food science,04050202 Food science/Food industry,0.5,0.5
|
||||||
|
10.3152/147154306781778533,doi_________::00acc520f3939e5a6675343881fed4f2,05 social sciences,0502 economics and business,050203 business & management,05020307 Innovation/Product management,0.5293408632278442,0.5326762795448303
|
||||||
|
10.3152/147154306781778533,doi_________::00acc520f3939e5a6675343881fed4f2,05 social sciences,0509 other social sciences,050905 science studies,05090502 Social philosophy/Capitalism,0.4706590473651886,0.4673237204551697
|
||||||
|
10.1785/0120050806,doi_________::00d5831d329e7ae4523d78bfc3042e98,02 engineering and technology,0211 other engineering and technologies,021101 geological & geomatics engineering,02110103 Concrete/Building materials,0.5343400835990906,0.3285667930180677
|
|
|
@ -0,0 +1,25 @@
|
||||||
|
{"doi":"10.1016/j.anucene.2006.02.004","level1":"02 engineering and technology","level2":"0202 electrical engineering, electronic engineering, information engineering","level3":"020209 energy","level4":"02020908 Climate change policy/Ethanol fuel","scoreL3":"0.5","scoreL4":"0.5"}
|
||||||
|
{"doi":"10.1016/j.anucene.2006.02.004","level1":"02 engineering and technology","level2":"0211 other engineering and technologies","level3":"021108 energy","level4":"02110808 Climate change policy/Ethanol fuel","scoreL3":"0.5","scoreL4":"0.5"}
|
||||||
|
{"doi":"10.1016/j.revmed.2006.07.010","level1":"03 medical and health sciences","level2":"0302 clinical medicine","level3":"030220 oncology & carcinogenesis","level4":"N/A","scoreL3":"0.5036656856536865","scoreL4":"0.0"}
|
||||||
|
{"doi":"10.1016/j.revmed.2006.07.010","level1":"03 medical and health sciences","level2":"0302 clinical medicine","level3":"030212 general & internal medicine","level4":"N/A","scoreL3":"0.4963343143463135","scoreL4":"0.0"}
|
||||||
|
{"doi":"10.20965/jrm.2006.p0312","level1":"02 engineering and technology","level2":"0209 industrial biotechnology","level3":"020901 industrial engineering & automation","level4":"02090104 Robotics/Robots","scoreL3":"0.6111094951629639","scoreL4":"0.5053805979936855"}
|
||||||
|
{"doi":"10.20965/jrm.2006.p0312","level1":"01 natural sciences","level2":"0104 chemical sciences","level3":"010401 analytical chemistry","level4":"N/A","scoreL3":"0.3888905048370361","scoreL4":"0.0"}
|
||||||
|
{"doi":"10.1111/j.1747-7379.2006.040_1.x","level1":"05 social sciences","level2":"0506 political science","level3":"050602 political science & public administration","level4":"05060202 Ethnic groups/Ethnicity","scoreL3":"0.6159052848815918","scoreL4":"0.7369035568037298"}
|
||||||
|
{"doi":"10.1111/j.1747-7379.2006.040_1.x","level1":"05 social sciences","level2":"0502 economics and business","level3":"050207 economics","level4":"N/A","scoreL3":"0.3840946555137634","scoreL4":"0.0"}
|
||||||
|
{"doi":"10.1007/s10512-006-0049-9","level1":"02 engineering and technology","level2":"0202 electrical engineering, electronic engineering, information engineering","level3":"020209 energy","level4":"02020908 Climate change policy/Ethanol fuel","scoreL3":"0.5","scoreL4":"0.5"}
|
||||||
|
{"doi":"10.1007/s10512-006-0049-9","level1":"02 engineering and technology","level2":"0211 other engineering and technologies","level3":"021108 energy","level4":"02110808 Climate change policy/Ethanol fuel","scoreL3":"0.5","scoreL4":"0.5"}
|
||||||
|
{"doi":"10.1111/j.1365-2621.2005.01045.x","level1":"04 agricultural and veterinary sciences","level2":"0404 agricultural biotechnology","level3":"040401 food science","level4":"04040102 Food science/Food industry","scoreL3":"0.5","scoreL4":"0.5"}
|
||||||
|
{"doi":"10.1111/j.1365-2621.2005.01045.x","level1":"04 agricultural and veterinary sciences","level2":"0405 other agricultural sciences","level3":"040502 food science","level4":"04050202 Food science/Food industry","scoreL3":"0.5","scoreL4":"0.5"}
|
||||||
|
{"doi":"10.1002/chin.200617262","level1":"01 natural sciences","level2":"0104 chemical sciences","level3":"010405 organic chemistry","level4":"01040508 Functional groups/Ethers","scoreL3":"0.5566747188568115","scoreL4":"0.5582916736602783"}
|
||||||
|
{"doi":"10.1002/chin.200617262","level1":"01 natural sciences","level2":"0104 chemical sciences","level3":"010402 general chemistry","level4":"01040207 Chemical synthesis/Total synthesis","scoreL3":"0.4433253407478332","scoreL4":"0.4417082965373993"}
|
||||||
|
{"doi":"10.1016/j.revmed.2006.07.012","level1":"03 medical and health sciences","level2":"0302 clinical medicine","level3":"030204 cardiovascular system & hematology","level4":"03020409 Hematology/Coagulopathies","scoreL3":"0.5101401805877686","scoreL4":"0.0546871414174914"}
|
||||||
|
{"doi":"10.1016/j.revmed.2006.07.012","level1":"03 medical and health sciences","level2":"0301 basic medicine","level3":"030105 genetics & heredity","level4":"N/A","scoreL3":"0.4898599088191986","scoreL4":"0.0"}
|
||||||
|
{"doi":"10.4109/jslab.17.132","level1":"03 medical and health sciences","level2":"0301 basic medicine","level3":"030104 developmental biology","level4":"N/A","scoreL3":"0.5","scoreL4":"0.0"}
|
||||||
|
{"doi":"10.4109/jslab.17.132","level1":"03 medical and health sciences","level2":"0303 health sciences","level3":"030304 developmental biology","level4":"N/A","scoreL3":"0.5","scoreL4":"0.0"}
|
||||||
|
{"doi":"10.1108/00251740610715687","level1":"05 social sciences","level2":"0502 economics and business","level3":"050203 business & management","level4":"05020311 International business/International trade","scoreL3":"0.605047881603241","scoreL4":"0.2156608108845153"}
|
||||||
|
{"doi":"10.1108/00251740610715687","level1":"05 social sciences","level2":"0502 economics and business","level3":"050211 marketing","level4":"N/A","scoreL3":"0.394952118396759","scoreL4":"0.0"}
|
||||||
|
{"doi":"10.1080/03067310500248098","level1":"04 agricultural and veterinary sciences","level2":"0404 agricultural biotechnology","level3":"040401 food science","level4":"04040102 Food science/Food industry","scoreL3":"0.5","scoreL4":"0.5"}
|
||||||
|
{"doi":"10.1080/03067310500248098","level1":"04 agricultural and veterinary sciences","level2":"0405 other agricultural sciences","level3":"040502 food science","level4":"04050202 Food science/Food industry","scoreL3":"0.5","scoreL4":"0.5"}
|
||||||
|
{"doi":"10.3152/147154306781778533","level1":"05 social sciences","level2":"0502 economics and business","level3":"050203 business & management","level4":"05020307 Innovation/Product management","scoreL3":"0.5293408632278442","scoreL4":"0.5326762795448303"}
|
||||||
|
{"doi":"10.3152/147154306781778533","level1":"05 social sciences","level2":"0509 other social sciences","level3":"050905 science studies","level4":"05090502 Social philosophy/Capitalism","scoreL3":"0.4706590473651886","scoreL4":"0.4673237204551697"}
|
||||||
|
{"doi":"10.1785/0120050806","level1":"02 engineering and technology","level2":"0211 other engineering and technologies","level3":"021101 geological & geomatics engineering","level4":"02110103 Concrete/Building materials","scoreL3":"0.5343400835990906","scoreL4":"0.3285667930180677"}
|
|
@ -24,13 +24,13 @@ function copydb() {
|
||||||
# drop tables from db
|
# drop tables from db
|
||||||
for i in `impala-shell --user $HADOOP_USER_NAME -i impala-cluster-dn1.openaire.eu -d ${db} --delimited -q "show tables"`;
|
for i in `impala-shell --user $HADOOP_USER_NAME -i impala-cluster-dn1.openaire.eu -d ${db} --delimited -q "show tables"`;
|
||||||
do
|
do
|
||||||
`impala-shell -i impala-cluster-dn1.openaire.eu -d -d ${db} -q "drop table $i;"`;
|
`impala-shell -i impala-cluster-dn1.openaire.eu -d ${db} -q "drop table $i;"`;
|
||||||
done
|
done
|
||||||
|
|
||||||
# drop views from db
|
# drop views from db
|
||||||
for i in `impala-shell --user $HADOOP_USER_NAME -i impala-cluster-dn1.openaire.eu -d ${db} --delimited -q "show tables"`;
|
for i in `impala-shell --user $HADOOP_USER_NAME -i impala-cluster-dn1.openaire.eu -d ${db} --delimited -q "show tables"`;
|
||||||
do
|
do
|
||||||
`impala-shell -i impala-cluster-dn1.openaire.eu -d -d ${db} -q "drop view $i;"`;
|
`impala-shell -i impala-cluster-dn1.openaire.eu -d ${db} -q "drop view $i;"`;
|
||||||
done
|
done
|
||||||
|
|
||||||
# delete the database
|
# delete the database
|
||||||
|
@ -82,12 +82,12 @@ copydb $USAGE_STATS_DB
|
||||||
copydb $PROD_USAGE_STATS_DB
|
copydb $PROD_USAGE_STATS_DB
|
||||||
copydb $EXT_DB
|
copydb $EXT_DB
|
||||||
copydb $STATS_DB
|
copydb $STATS_DB
|
||||||
#copydb $MONITOR_DB
|
copydb $MONITOR_DB
|
||||||
copydb $OBSERVATORY_DB
|
copydb $OBSERVATORY_DB
|
||||||
|
|
||||||
copydb $MONITOR_DB'_funded'
|
copydb $MONITOR_DB'_funded'
|
||||||
copydb $MONITOR_DB'_institutions'
|
copydb $MONITOR_DB'_institutions'
|
||||||
copydb $MONITOR_DB'_RIs_tail'
|
copydb $MONITOR_DB'_ris_tail'
|
||||||
|
|
||||||
contexts="knowmad::other dh-ch::other enermaps::other gotriple::other neanias-atmospheric::other rural-digital-europe::other covid-19::other aurora::other neanias-space::other north-america-studies::other north-american-studies::other eutopia::other"
|
contexts="knowmad::other dh-ch::other enermaps::other gotriple::other neanias-atmospheric::other rural-digital-europe::other covid-19::other aurora::other neanias-space::other north-america-studies::other north-american-studies::other eutopia::other"
|
||||||
for i in ${contexts}
|
for i in ${contexts}
|
||||||
|
|
|
@ -13,7 +13,7 @@ function createShadowDB() {
|
||||||
# drop views from db
|
# drop views from db
|
||||||
for i in `impala-shell -i impala-cluster-dn1.openaire.eu -d ${SHADOW} --delimited -q "show tables"`;
|
for i in `impala-shell -i impala-cluster-dn1.openaire.eu -d ${SHADOW} --delimited -q "show tables"`;
|
||||||
do
|
do
|
||||||
`impala-shell -i impala-cluster-dn1.openaire.eu -d -d ${SHADOW} -q "drop view $i;"`;
|
`impala-shell -i impala-cluster-dn1.openaire.eu -d ${SHADOW} -q "drop view $i;"`;
|
||||||
done
|
done
|
||||||
|
|
||||||
impala-shell -i impala-cluster-dn1.openaire.eu -q "drop database ${SHADOW} CASCADE";
|
impala-shell -i impala-cluster-dn1.openaire.eu -q "drop database ${SHADOW} CASCADE";
|
||||||
|
@ -36,13 +36,13 @@ createShadowDB $MONITOR_DB $MONITOR_DB_SHADOW
|
||||||
createShadowDB $OBSERVATORY_DB $OBSERVATORY_DB_SHADOW
|
createShadowDB $OBSERVATORY_DB $OBSERVATORY_DB_SHADOW
|
||||||
createShadowDB USAGE_STATS_DB USAGE_STATS_DB_SHADOW
|
createShadowDB USAGE_STATS_DB USAGE_STATS_DB_SHADOW
|
||||||
|
|
||||||
createShadowDB $MONITOR_DB'_funded' $MONITOR_DB'_funded_shadow'
|
createShadowDB $MONITOR_DB'_funded' $MONITOR_DB_SHADOW'_shadow_funded'
|
||||||
createShadowDB $MONITOR_DB'_institutions' $MONITOR_DB'_institutions_shadow'
|
createShadowDB $MONITOR_DB'_institutions' $MONITOR_DB_SHADOW'_shadow_institutions'
|
||||||
createShadowDB $MONITOR_DB'_RIs_tail' $MONITOR_DB'_RIs_tail_shadow'
|
createShadowDB $MONITOR_DB'_ris_tail' $MONITOR_DB_SHADOW'_shadow_ris_tail'
|
||||||
|
|
||||||
contexts="knowmad::other dh-ch::other enermaps::other gotriple::other neanias-atmospheric::other rural-digital-europe::other covid-19::other aurora::other neanias-space::other north-america-studies::other north-american-studies::other eutopia::other"
|
contexts="knowmad::other dh-ch::other enermaps::other gotriple::other neanias-atmospheric::other rural-digital-europe::other covid-19::other aurora::other neanias-space::other north-america-studies::other north-american-studies::other eutopia::other"
|
||||||
for i in ${contexts}
|
for i in ${contexts}
|
||||||
do
|
do
|
||||||
tmp=`echo "$i" | sed 's/'-'/'_'/g' | sed 's/'::'/'_'/g'`
|
tmp=`echo "$i" | sed 's/'-'/'_'/g' | sed 's/'::'/'_'/g'`
|
||||||
createShadowDB ${MONITOR_DB}'_'${tmp} ${MONITOR_DB}'_'${tmp}'_shadow'
|
createShadowDB ${MONITOR_DB}'_'${tmp} ${MONITOR_DB_SHADOW}'_shadow_'${tmp}
|
||||||
done
|
done
|
22
dhp-workflows/dhp-stats-update/src/main/resources/eu/dnetlib/dhp/oa/graph/stats/oozie_app/monitor.sh
Normal file → Executable file
22
dhp-workflows/dhp-stats-update/src/main/resources/eu/dnetlib/dhp/oa/graph/stats/oozie_app/monitor.sh
Normal file → Executable file
|
@ -14,6 +14,7 @@ export SCRIPT_PATH2=$5
|
||||||
export SCRIPT_PATH3=$6
|
export SCRIPT_PATH3=$6
|
||||||
export SCRIPT_PATH4=$7
|
export SCRIPT_PATH4=$7
|
||||||
export SCRIPT_PATH5=$8
|
export SCRIPT_PATH5=$8
|
||||||
|
export SCRIPT_PATH6=$9
|
||||||
|
|
||||||
export HIVE_OPTS="-hiveconf mapred.job.queue.name=analytics -hiveconf hive.spark.client.connect.timeout=120000ms -hiveconf hive.spark.client.server.connect.timeout=300000ms -hiveconf spark.executor.memory=19166291558 -hiveconf spark.yarn.executor.memoryOverhead=3225 -hiveconf spark.driver.memory=11596411699 -hiveconf spark.yarn.driver.memoryOverhead=1228"
|
export HIVE_OPTS="-hiveconf mapred.job.queue.name=analytics -hiveconf hive.spark.client.connect.timeout=120000ms -hiveconf hive.spark.client.server.connect.timeout=300000ms -hiveconf spark.executor.memory=19166291558 -hiveconf spark.yarn.executor.memoryOverhead=3225 -hiveconf spark.driver.memory=11596411699 -hiveconf spark.yarn.driver.memoryOverhead=1228"
|
||||||
export HADOOP_USER_NAME="oozie"
|
export HADOOP_USER_NAME="oozie"
|
||||||
|
@ -33,12 +34,19 @@ hdfs dfs -copyToLocal $7
|
||||||
echo "Getting file from " $8
|
echo "Getting file from " $8
|
||||||
hdfs dfs -copyToLocal $8
|
hdfs dfs -copyToLocal $8
|
||||||
|
|
||||||
|
echo "Getting file from " $9
|
||||||
|
hdfs dfs -copyToLocal $9
|
||||||
|
|
||||||
|
|
||||||
echo "Creating monitor database"
|
echo "Creating monitor database"
|
||||||
|
cat step20-createMonitorDBAll.sql | sed "s/SOURCE/$1/g" | sed "s/TARGET/$2/g1" > foo
|
||||||
|
hive $HIVE_OPTS -f foo
|
||||||
|
|
||||||
cat step20-createMonitorDB_funded.sql | sed "s/SOURCE/$1/g" | sed "s/TARGET/$2_funded/g1" > foo
|
cat step20-createMonitorDB_funded.sql | sed "s/SOURCE/$1/g" | sed "s/TARGET/$2_funded/g1" > foo
|
||||||
hive $HIVE_OPTS -f foo
|
hive $HIVE_OPTS -f foo
|
||||||
cat step20-createMonitorDB.sql | sed "s/SOURCE/$1/g" | sed "s/TARGET/$2_funded/g1" > foo
|
cat step20-createMonitorDB.sql | sed "s/SOURCE/$1/g" | sed "s/TARGET/$2_funded/g1" > foo
|
||||||
hive $HIVE_OPTS -f foo
|
hive $HIVE_OPTS -f foo
|
||||||
#
|
|
||||||
cat step20-createMonitorDB_institutions.sql | sed "s/SOURCE/$1/g" | sed "s/TARGET/$2_institutions/g1" > foo
|
cat step20-createMonitorDB_institutions.sql | sed "s/SOURCE/$1/g" | sed "s/TARGET/$2_institutions/g1" > foo
|
||||||
hive $HIVE_OPTS -f foo
|
hive $HIVE_OPTS -f foo
|
||||||
cat step20-createMonitorDB.sql | sed "s/SOURCE/$1/g" | sed "s/TARGET/$2_institutions/g1" > foo
|
cat step20-createMonitorDB.sql | sed "s/SOURCE/$1/g" | sed "s/TARGET/$2_institutions/g1" > foo
|
||||||
|
@ -56,14 +64,20 @@ do
|
||||||
hive $HIVE_OPTS -f foo
|
hive $HIVE_OPTS -f foo
|
||||||
done
|
done
|
||||||
|
|
||||||
|
cat step20-createMonitorDB_RIs_tail.sql | sed "s/SOURCE/$1/g" | sed "s/TARGET/$2_ris_tail/g1" | sed "s/CONTEXTS/\"'knowmad::other','dh-ch::other', 'enermaps::other', 'gotriple::other', 'neanias-atmospheric::other', 'rural-digital-europe::other', 'covid-19::other', 'aurora::other', 'neanias-space::other', 'north-america-studies::other', 'north-american-studies::other', 'eutopia::other'\"/g" > foo
|
||||||
cat step20-createMonitorDB_RIs_tail.sql | sed "s/SOURCE/$1/g" | sed "s/TARGET/$2_RIs_tail/g1" | sed "s/CONTEXTS/\"'knowmad::other','dh-ch::other', 'enermaps::other', 'gotriple::other', 'neanias-atmospheric::other', 'rural-digital-europe::other', 'covid-19::other', 'aurora::other', 'neanias-space::other', 'north-america-studies::other', 'north-american-studies::other', 'eutopia::other'\"/g" > foo
|
|
||||||
hive $HIVE_OPTS -f foo
|
hive $HIVE_OPTS -f foo
|
||||||
cat step20-createMonitorDB.sql | sed "s/SOURCE/$1/g" | sed "s/TARGET/$2_RIs_tail/g1" > foo
|
cat step20-createMonitorDB.sql | sed "s/SOURCE/$1/g" | sed "s/TARGET/$2_ris_tail/g1" > foo
|
||||||
hive $HIVE_OPTS -f foo
|
hive $HIVE_OPTS -f foo
|
||||||
|
|
||||||
echo "Hive shell finished"
|
echo "Hive shell finished"
|
||||||
|
|
||||||
|
echo "Updating shadow monitor all database"
|
||||||
|
hive -e "drop database if exists ${SHADOW} cascade"
|
||||||
|
hive -e "create database if not exists ${SHADOW}"
|
||||||
|
hive $HIVE_OPTS --database ${2} -e "show tables" | grep -v WARN | sed "s/\(.*\)/create view ${SHADOW}.\1 as select * from ${2}.\1;/" > foo
|
||||||
|
hive -f foo
|
||||||
|
echo "Updated shadow monitor all database"
|
||||||
|
|
||||||
echo "Updating shadow monitor funded database"
|
echo "Updating shadow monitor funded database"
|
||||||
hive -e "drop database if exists ${SHADOW}_funded cascade"
|
hive -e "drop database if exists ${SHADOW}_funded cascade"
|
||||||
hive -e "create database if not exists ${SHADOW}_funded"
|
hive -e "create database if not exists ${SHADOW}_funded"
|
||||||
|
|
0
dhp-workflows/dhp-stats-update/src/main/resources/eu/dnetlib/dhp/oa/graph/stats/oozie_app/scripts/step13.sql
Normal file → Executable file
0
dhp-workflows/dhp-stats-update/src/main/resources/eu/dnetlib/dhp/oa/graph/stats/oozie_app/scripts/step13.sql
Normal file → Executable file
|
@ -37,8 +37,15 @@ select * from ${stats_db_name}.otherresearchproduct_refereed;
|
||||||
|
|
||||||
create table if not exists ${stats_db_name}.indi_impact_measures STORED AS PARQUET as
|
create table if not exists ${stats_db_name}.indi_impact_measures STORED AS PARQUET as
|
||||||
select substr(id, 4) as id, measures_ids.id impactmetric, cast(measures_ids.unit.value[0] as double) score,
|
select substr(id, 4) as id, measures_ids.id impactmetric, cast(measures_ids.unit.value[0] as double) score,
|
||||||
cast(measures_ids.unit.value[0] as decimal(6,3)) score_dec, measures_ids.unit.value[1] class
|
cast(measures_ids.unit.value[0] as decimal(6,3)) score_dec, measures_ids.unit.value[1] impact_class
|
||||||
from ${openaire_db_name}.result lateral view explode(measures) measures as measures_ids
|
from ${openaire_db_name}.result lateral view explode(measures) measures as measures_ids
|
||||||
where measures_ids.id!='views' and measures_ids.id!='downloads';
|
where measures_ids.id!='views' and measures_ids.id!='downloads';
|
||||||
|
|
||||||
ANALYZE TABLE indi_impact_measures COMPUTE STATISTICS;
|
create table if not exists ${stats_db_name}.result_apc_affiliations STORED AS PARQUET as
|
||||||
|
select distinct substr(rel.target,4) id, substr(rel.source,4) organization, o.legalname.value name,
|
||||||
|
cast(rel.properties[0].value as double) apc_amount,
|
||||||
|
rel.properties[1].value apc_currency
|
||||||
|
from ${openaire_db_name}.relation rel
|
||||||
|
join ${openaire_db_name}.organization o on o.id=rel.source
|
||||||
|
join ${openaire_db_name}.result r on r.id=rel.target
|
||||||
|
where rel.subreltype = 'affiliation' and rel.datainfo.deletedbyinference = false and size(rel.properties)>0;
|
||||||
|
|
|
@ -35,6 +35,7 @@ create or replace view ${stats_db_name}.doctoratestudents as select * from stats
|
||||||
create or replace view ${stats_db_name}.totalresearchers as select * from stats_ext.totalresearchers;
|
create or replace view ${stats_db_name}.totalresearchers as select * from stats_ext.totalresearchers;
|
||||||
create or replace view ${stats_db_name}.totalresearchersft as select * from stats_ext.totalresearchersft;
|
create or replace view ${stats_db_name}.totalresearchersft as select * from stats_ext.totalresearchersft;
|
||||||
create or replace view ${stats_db_name}.hrrst as select * from stats_ext.hrrst;
|
create or replace view ${stats_db_name}.hrrst as select * from stats_ext.hrrst;
|
||||||
|
create or replace view ${stats_db_name}.graduatedoctorates as select * from stats_ext.graduatedoctorates;
|
||||||
|
|
||||||
create table if not exists ${stats_db_name}.result_instance stored as parquet as
|
create table if not exists ${stats_db_name}.result_instance stored as parquet as
|
||||||
select distinct r.*
|
select distinct r.*
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -88,85 +88,88 @@ create view if not exists TARGET.doctoratestudents as select * from SOURCE.docto
|
||||||
create view if not exists TARGET.totalresearchers as select * from SOURCE.totalresearchers;
|
create view if not exists TARGET.totalresearchers as select * from SOURCE.totalresearchers;
|
||||||
create view if not exists TARGET.totalresearchersft as select * from SOURCE.totalresearchersft;
|
create view if not exists TARGET.totalresearchersft as select * from SOURCE.totalresearchersft;
|
||||||
create view if not exists TARGET.hrrst as select * from SOURCE.hrrst;
|
create view if not exists TARGET.hrrst as select * from SOURCE.hrrst;
|
||||||
|
create view if not exists TARGET.graduatedoctorates as select * from SOURCE.graduatedoctorates;
|
||||||
|
|
||||||
create table TARGET.result_citations stored as parquet as select * from SOURCE.result_citations orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
create table TARGET.result_citations stored as parquet as select * from SOURCE.result_citations orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
||||||
ANALYZE TABLE TARGET.result_citations COMPUTE STATISTICS;
|
--ANALYZE TABLE TARGET.result_citations COMPUTE STATISTICS;
|
||||||
|
|
||||||
create table TARGET.result_references_oc stored as parquet as select * from SOURCE.result_references_oc orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
create table TARGET.result_references_oc stored as parquet as select * from SOURCE.result_references_oc orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
||||||
ANALYZE TABLE TARGET.result_references_oc COMPUTE STATISTICS;
|
--ANALYZE TABLE TARGET.result_references_oc COMPUTE STATISTICS;
|
||||||
|
|
||||||
create table TARGET.result_citations_oc stored as parquet as select * from SOURCE.result_citations_oc orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
create table TARGET.result_citations_oc stored as parquet as select * from SOURCE.result_citations_oc orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
||||||
ANALYZE TABLE TARGET.result_citations_oc COMPUTE STATISTICS;
|
--ANALYZE TABLE TARGET.result_citations_oc COMPUTE STATISTICS;
|
||||||
|
|
||||||
create table TARGET.result_classifications stored as parquet as select * from SOURCE.result_classifications orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
create table TARGET.result_classifications stored as parquet as select * from SOURCE.result_classifications orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
||||||
ANALYZE TABLE TARGET.result_classifications COMPUTE STATISTICS;
|
--ANALYZE TABLE TARGET.result_classifications COMPUTE STATISTICS;
|
||||||
|
|
||||||
create table TARGET.result_apc stored as parquet as select * from SOURCE.result_apc orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
create table TARGET.result_apc stored as parquet as select * from SOURCE.result_apc orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
||||||
ANALYZE TABLE TARGET.result_apc COMPUTE STATISTICS;
|
--ANALYZE TABLE TARGET.result_apc COMPUTE STATISTICS;
|
||||||
|
|
||||||
create table TARGET.result_concepts stored as parquet as select * from SOURCE.result_concepts orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
create table TARGET.result_concepts stored as parquet as select * from SOURCE.result_concepts orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
||||||
ANALYZE TABLE TARGET.result_concepts COMPUTE STATISTICS;
|
--ANALYZE TABLE TARGET.result_concepts COMPUTE STATISTICS;
|
||||||
|
|
||||||
create table TARGET.result_datasources stored as parquet as select * from SOURCE.result_datasources orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
create table TARGET.result_datasources stored as parquet as select * from SOURCE.result_datasources orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
||||||
ANALYZE TABLE TARGET.result_datasources COMPUTE STATISTICS;
|
--ANALYZE TABLE TARGET.result_datasources COMPUTE STATISTICS;
|
||||||
|
|
||||||
create table TARGET.result_fundercount stored as parquet as select * from SOURCE.result_fundercount orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
create table TARGET.result_fundercount stored as parquet as select * from SOURCE.result_fundercount orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
||||||
ANALYZE TABLE TARGET.result_fundercount COMPUTE STATISTICS;
|
--ANALYZE TABLE TARGET.result_fundercount COMPUTE STATISTICS;
|
||||||
|
|
||||||
create table TARGET.result_gold stored as parquet as select * from SOURCE.result_gold orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
create table TARGET.result_gold stored as parquet as select * from SOURCE.result_gold orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
||||||
ANALYZE TABLE TARGET.result_gold COMPUTE STATISTICS;
|
--ANALYZE TABLE TARGET.result_gold COMPUTE STATISTICS;
|
||||||
|
|
||||||
create table TARGET.result_greenoa stored as parquet as select * from SOURCE.result_greenoa orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
create table TARGET.result_greenoa stored as parquet as select * from SOURCE.result_greenoa orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
||||||
ANALYZE TABLE TARGET.result_greenoa COMPUTE STATISTICS;
|
--ANALYZE TABLE TARGET.result_greenoa COMPUTE STATISTICS;
|
||||||
|
|
||||||
create table TARGET.result_languages stored as parquet as select * from SOURCE.result_languages orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
create table TARGET.result_languages stored as parquet as select * from SOURCE.result_languages orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
||||||
ANALYZE TABLE TARGET.result_languages COMPUTE STATISTICS;
|
--ANALYZE TABLE TARGET.result_languages COMPUTE STATISTICS;
|
||||||
|
|
||||||
create table TARGET.result_licenses stored as parquet as select * from SOURCE.result_licenses orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
create table TARGET.result_licenses stored as parquet as select * from SOURCE.result_licenses orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
||||||
ANALYZE TABLE TARGET.result_licenses COMPUTE STATISTICS;
|
--ANALYZE TABLE TARGET.result_licenses COMPUTE STATISTICS;
|
||||||
|
|
||||||
create table TARGET.licenses_normalized STORED AS PARQUET as select * from SOURCE.licenses_normalized;
|
create table TARGET.licenses_normalized STORED AS PARQUET as select * from SOURCE.licenses_normalized;
|
||||||
ANALYZE TABLE TARGET.licenses_normalized COMPUTE STATISTICS;
|
--ANALYZE TABLE TARGET.licenses_normalized COMPUTE STATISTICS;
|
||||||
|
|
||||||
create table TARGET.result_oids stored as parquet as select * from SOURCE.result_oids orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
create table TARGET.result_oids stored as parquet as select * from SOURCE.result_oids orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
||||||
ANALYZE TABLE TARGET.result_oids COMPUTE STATISTICS;
|
--ANALYZE TABLE TARGET.result_oids COMPUTE STATISTICS;
|
||||||
|
|
||||||
create table TARGET.result_organization stored as parquet as select * from SOURCE.result_organization orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
create table TARGET.result_organization stored as parquet as select * from SOURCE.result_organization orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
||||||
ANALYZE TABLE TARGET.result_organization COMPUTE STATISTICS;
|
--ANALYZE TABLE TARGET.result_organization COMPUTE STATISTICS;
|
||||||
|
|
||||||
create table TARGET.result_peerreviewed stored as parquet as select * from SOURCE.result_peerreviewed orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
create table TARGET.result_peerreviewed stored as parquet as select * from SOURCE.result_peerreviewed orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
||||||
ANALYZE TABLE TARGET.result_peerreviewed COMPUTE STATISTICS;
|
--ANALYZE TABLE TARGET.result_peerreviewed COMPUTE STATISTICS;
|
||||||
|
|
||||||
create table TARGET.result_pids stored as parquet as select * from SOURCE.result_pids orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
create table TARGET.result_pids stored as parquet as select * from SOURCE.result_pids orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
||||||
ANALYZE TABLE TARGET.result_pids COMPUTE STATISTICS;
|
--ANALYZE TABLE TARGET.result_pids COMPUTE STATISTICS;
|
||||||
|
|
||||||
create table TARGET.result_projectcount stored as parquet as select * from SOURCE.result_projectcount orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
create table TARGET.result_projectcount stored as parquet as select * from SOURCE.result_projectcount orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
||||||
ANALYZE TABLE TARGET.result_projectcount COMPUTE STATISTICS;
|
--ANALYZE TABLE TARGET.result_projectcount COMPUTE STATISTICS;
|
||||||
|
|
||||||
create table TARGET.result_projects stored as parquet as select * from SOURCE.result_projects orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
create table TARGET.result_projects stored as parquet as select * from SOURCE.result_projects orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
||||||
ANALYZE TABLE TARGET.result_projects COMPUTE STATISTICS;
|
--ANALYZE TABLE TARGET.result_projects COMPUTE STATISTICS;
|
||||||
|
|
||||||
create table TARGET.result_refereed stored as parquet as select * from SOURCE.result_refereed orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
create table TARGET.result_refereed stored as parquet as select * from SOURCE.result_refereed orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
||||||
ANALYZE TABLE TARGET.result_refereed COMPUTE STATISTICS;
|
--ANALYZE TABLE TARGET.result_refereed COMPUTE STATISTICS;
|
||||||
|
|
||||||
create table TARGET.result_sources stored as parquet as select * from SOURCE.result_sources orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
create table TARGET.result_sources stored as parquet as select * from SOURCE.result_sources orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
||||||
ANALYZE TABLE TARGET.result_sources COMPUTE STATISTICS;
|
--ANALYZE TABLE TARGET.result_sources COMPUTE STATISTICS;
|
||||||
|
|
||||||
create table TARGET.result_topics stored as parquet as select * from SOURCE.result_topics orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
create table TARGET.result_topics stored as parquet as select * from SOURCE.result_topics orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
||||||
ANALYZE TABLE TARGET.result_topics COMPUTE STATISTICS;
|
--ANALYZE TABLE TARGET.result_topics COMPUTE STATISTICS;
|
||||||
|
|
||||||
create table TARGET.result_fos stored as parquet as select * from SOURCE.result_fos orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
create table TARGET.result_fos stored as parquet as select * from SOURCE.result_fos orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
||||||
ANALYZE TABLE TARGET.result_fos COMPUTE STATISTICS;
|
--ANALYZE TABLE TARGET.result_fos COMPUTE STATISTICS;
|
||||||
|
|
||||||
create table TARGET.result_accessroute stored as parquet as select * from SOURCE.result_accessroute orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
create table TARGET.result_accessroute stored as parquet as select * from SOURCE.result_accessroute orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
||||||
ANALYZE TABLE TARGET.result_accessroute COMPUTE STATISTICS;
|
--ANALYZE TABLE TARGET.result_accessroute COMPUTE STATISTICS;
|
||||||
|
|
||||||
|
create table TARGET.result_orcid stored as parquet as select * from SOURCE.result_orcid orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
||||||
|
|
||||||
create view TARGET.foo1 as select * from SOURCE.result_result rr where rr.source in (select id from TARGET.result);
|
create view TARGET.foo1 as select * from SOURCE.result_result rr where rr.source in (select id from TARGET.result);
|
||||||
create view TARGET.foo2 as select * from SOURCE.result_result rr where rr.target in (select id from TARGET.result);
|
create view TARGET.foo2 as select * from SOURCE.result_result rr where rr.target in (select id from TARGET.result);
|
||||||
create table TARGET.result_result STORED AS PARQUET as select distinct * from (select * from TARGET.foo1 union all select * from TARGET.foo2) foufou;
|
create table TARGET.result_result STORED AS PARQUET as select distinct * from (select * from TARGET.foo1 union all select * from TARGET.foo2) foufou;
|
||||||
drop view TARGET.foo1;
|
drop view TARGET.foo1;
|
||||||
drop view TARGET.foo2;
|
drop view TARGET.foo2;
|
||||||
ANALYZE TABLE TARGET.result_result COMPUTE STATISTICS;
|
--ANALYZE TABLE TARGET.result_result COMPUTE STATISTICS;
|
||||||
|
|
||||||
-- datasources
|
-- datasources
|
||||||
create view if not exists TARGET.datasource as select * from SOURCE.datasource;
|
create view if not exists TARGET.datasource as select * from SOURCE.datasource;
|
||||||
|
@ -175,7 +178,7 @@ create view if not exists TARGET.datasource_organizations as select * from SOURC
|
||||||
create view if not exists TARGET.datasource_sources as select * from SOURCE.datasource_sources;
|
create view if not exists TARGET.datasource_sources as select * from SOURCE.datasource_sources;
|
||||||
|
|
||||||
create table TARGET.datasource_results stored as parquet as select id as result, datasource as id from TARGET.result_datasources;
|
create table TARGET.datasource_results stored as parquet as select id as result, datasource as id from TARGET.result_datasources;
|
||||||
ANALYZE TABLE TARGET.datasource_results COMPUTE STATISTICS;
|
--ANALYZE TABLE TARGET.datasource_results COMPUTE STATISTICS;
|
||||||
|
|
||||||
-- organizations
|
-- organizations
|
||||||
create view if not exists TARGET.organization as select * from SOURCE.organization;
|
create view if not exists TARGET.organization as select * from SOURCE.organization;
|
||||||
|
@ -193,28 +196,28 @@ create view if not exists TARGET.project_classification as select * from SOURCE.
|
||||||
create view if not exists TARGET.project_organization_contribution as select * from SOURCE.project_organization_contribution;
|
create view if not exists TARGET.project_organization_contribution as select * from SOURCE.project_organization_contribution;
|
||||||
|
|
||||||
create table TARGET.project_results stored as parquet as select id as result, project as id from TARGET.result_projects;
|
create table TARGET.project_results stored as parquet as select id as result, project as id from TARGET.result_projects;
|
||||||
ANALYZE TABLE TARGET.project_results COMPUTE STATISTICS;
|
--ANALYZE TABLE TARGET.project_results COMPUTE STATISTICS;
|
||||||
|
|
||||||
-- indicators
|
-- indicators
|
||||||
-- Sprint 1 ----
|
-- Sprint 1 ----
|
||||||
create table TARGET.indi_pub_green_oa stored as parquet as select * from SOURCE.indi_pub_green_oa orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
create table TARGET.indi_pub_green_oa stored as parquet as select * from SOURCE.indi_pub_green_oa orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
||||||
ANALYZE TABLE TARGET.indi_pub_green_oa COMPUTE STATISTICS;
|
--ANALYZE TABLE TARGET.indi_pub_green_oa COMPUTE STATISTICS;
|
||||||
create table TARGET.indi_pub_grey_lit stored as parquet as select * from SOURCE.indi_pub_grey_lit orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
create table TARGET.indi_pub_grey_lit stored as parquet as select * from SOURCE.indi_pub_grey_lit orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
||||||
ANALYZE TABLE TARGET.indi_pub_grey_lit COMPUTE STATISTICS;
|
--ANALYZE TABLE TARGET.indi_pub_grey_lit COMPUTE STATISTICS;
|
||||||
create table TARGET.indi_pub_doi_from_crossref stored as parquet as select * from SOURCE.indi_pub_doi_from_crossref orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
create table TARGET.indi_pub_doi_from_crossref stored as parquet as select * from SOURCE.indi_pub_doi_from_crossref orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
||||||
ANALYZE TABLE TARGET.indi_pub_doi_from_crossref COMPUTE STATISTICS;
|
--ANALYZE TABLE TARGET.indi_pub_doi_from_crossref COMPUTE STATISTICS;
|
||||||
-- Sprint 2 ----
|
-- Sprint 2 ----
|
||||||
create table TARGET.indi_result_has_cc_licence stored as parquet as select * from SOURCE.indi_result_has_cc_licence orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
create table TARGET.indi_result_has_cc_licence stored as parquet as select * from SOURCE.indi_result_has_cc_licence orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
||||||
ANALYZE TABLE TARGET.indi_result_has_cc_licence COMPUTE STATISTICS;
|
--ANALYZE TABLE TARGET.indi_result_has_cc_licence COMPUTE STATISTICS;
|
||||||
create table TARGET.indi_result_has_cc_licence_url stored as parquet as select * from SOURCE.indi_result_has_cc_licence_url orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
create table TARGET.indi_result_has_cc_licence_url stored as parquet as select * from SOURCE.indi_result_has_cc_licence_url orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
||||||
ANALYZE TABLE TARGET.indi_result_has_cc_licence_url COMPUTE STATISTICS;
|
--ANALYZE TABLE TARGET.indi_result_has_cc_licence_url COMPUTE STATISTICS;
|
||||||
create table TARGET.indi_pub_has_abstract stored as parquet as select * from SOURCE.indi_pub_has_abstract orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
create table TARGET.indi_pub_has_abstract stored as parquet as select * from SOURCE.indi_pub_has_abstract orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
||||||
ANALYZE TABLE TARGET.indi_pub_has_abstract COMPUTE STATISTICS;
|
--ANALYZE TABLE TARGET.indi_pub_has_abstract COMPUTE STATISTICS;
|
||||||
create table TARGET.indi_result_with_orcid stored as parquet as select * from SOURCE.indi_result_with_orcid orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
create table TARGET.indi_result_with_orcid stored as parquet as select * from SOURCE.indi_result_with_orcid orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
||||||
ANALYZE TABLE TARGET.indi_result_with_orcid COMPUTE STATISTICS;
|
--ANALYZE TABLE TARGET.indi_result_with_orcid COMPUTE STATISTICS;
|
||||||
---- Sprint 3 ----
|
---- Sprint 3 ----
|
||||||
create table TARGET.indi_funded_result_with_fundref stored as parquet as select * from SOURCE.indi_funded_result_with_fundref orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
create table TARGET.indi_funded_result_with_fundref stored as parquet as select * from SOURCE.indi_funded_result_with_fundref orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
||||||
ANALYZE TABLE TARGET.indi_funded_result_with_fundref COMPUTE STATISTICS;
|
--ANALYZE TABLE TARGET.indi_funded_result_with_fundref COMPUTE STATISTICS;
|
||||||
create view TARGET.indi_result_org_collab as select * from SOURCE.indi_result_org_collab;
|
create view TARGET.indi_result_org_collab as select * from SOURCE.indi_result_org_collab;
|
||||||
create view TARGET.indi_result_org_country_collab as select * from SOURCE.indi_result_org_country_collab;
|
create view TARGET.indi_result_org_country_collab as select * from SOURCE.indi_result_org_country_collab;
|
||||||
create view TARGET.indi_project_collab_org as select * from SOURCE.indi_project_collab_org;
|
create view TARGET.indi_project_collab_org as select * from SOURCE.indi_project_collab_org;
|
||||||
|
@ -223,32 +226,32 @@ create view TARGET.indi_funder_country_collab as select * from SOURCE.indi_funde
|
||||||
create view TARGET.indi_result_country_collab as select * from SOURCE.indi_result_country_collab;
|
create view TARGET.indi_result_country_collab as select * from SOURCE.indi_result_country_collab;
|
||||||
---- Sprint 4 ----
|
---- Sprint 4 ----
|
||||||
create table TARGET.indi_pub_diamond stored as parquet as select * from SOURCE.indi_pub_diamond orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
create table TARGET.indi_pub_diamond stored as parquet as select * from SOURCE.indi_pub_diamond orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
||||||
ANALYZE TABLE TARGET.indi_pub_diamond COMPUTE STATISTICS;
|
--ANALYZE TABLE TARGET.indi_pub_diamond COMPUTE STATISTICS;
|
||||||
create table TARGET.indi_pub_in_transformative stored as parquet as select * from SOURCE.indi_pub_in_transformative orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
create table TARGET.indi_pub_in_transformative stored as parquet as select * from SOURCE.indi_pub_in_transformative orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
||||||
ANALYZE TABLE TARGET.indi_pub_in_transformative COMPUTE STATISTICS;
|
--ANALYZE TABLE TARGET.indi_pub_in_transformative COMPUTE STATISTICS;
|
||||||
create table TARGET.indi_pub_closed_other_open stored as parquet as select * from SOURCE.indi_pub_closed_other_open orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
create table TARGET.indi_pub_closed_other_open stored as parquet as select * from SOURCE.indi_pub_closed_other_open orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
||||||
ANALYZE TABLE TARGET.indi_pub_closed_other_open COMPUTE STATISTICS;
|
--ANALYZE TABLE TARGET.indi_pub_closed_other_open COMPUTE STATISTICS;
|
||||||
---- Sprint 5 ----
|
---- Sprint 5 ----
|
||||||
create table TARGET.indi_result_no_of_copies stored as parquet as select * from SOURCE.indi_result_no_of_copies orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
create table TARGET.indi_result_no_of_copies stored as parquet as select * from SOURCE.indi_result_no_of_copies orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
||||||
ANALYZE TABLE TARGET.indi_result_no_of_copies COMPUTE STATISTICS;
|
--ANALYZE TABLE TARGET.indi_result_no_of_copies COMPUTE STATISTICS;
|
||||||
---- Sprint 6 ----
|
---- Sprint 6 ----
|
||||||
create table TARGET.indi_pub_hybrid_oa_with_cc stored as parquet as select * from SOURCE.indi_pub_hybrid_oa_with_cc orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
create table TARGET.indi_pub_hybrid_oa_with_cc stored as parquet as select * from SOURCE.indi_pub_hybrid_oa_with_cc orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
||||||
ANALYZE TABLE TARGET.indi_pub_hybrid_oa_with_cc COMPUTE STATISTICS;
|
--ANALYZE TABLE TARGET.indi_pub_hybrid_oa_with_cc COMPUTE STATISTICS;
|
||||||
create table TARGET.indi_pub_bronze_oa stored as parquet as select * from SOURCE.indi_pub_bronze_oa orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
create table TARGET.indi_pub_bronze_oa stored as parquet as select * from SOURCE.indi_pub_bronze_oa orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
||||||
ANALYZE TABLE TARGET.indi_pub_bronze_oa COMPUTE STATISTICS;
|
--ANALYZE TABLE TARGET.indi_pub_bronze_oa COMPUTE STATISTICS;
|
||||||
create table TARGET.indi_pub_downloads stored as parquet as select * from SOURCE.indi_pub_downloads orig where exists (select 1 from TARGET.result r where r.id=orig.result_id);
|
create table TARGET.indi_pub_downloads stored as parquet as select * from SOURCE.indi_pub_downloads orig where exists (select 1 from TARGET.result r where r.id=orig.result_id);
|
||||||
ANALYZE TABLE TARGET.indi_pub_downloads COMPUTE STATISTICS;
|
--ANALYZE TABLE TARGET.indi_pub_downloads COMPUTE STATISTICS;
|
||||||
create table TARGET.indi_pub_downloads_datasource stored as parquet as select * from SOURCE.indi_pub_downloads_datasource orig where exists (select 1 from TARGET.result r where r.id=orig.result_id);
|
create table TARGET.indi_pub_downloads_datasource stored as parquet as select * from SOURCE.indi_pub_downloads_datasource orig where exists (select 1 from TARGET.result r where r.id=orig.result_id);
|
||||||
ANALYZE TABLE TARGET.indi_pub_downloads_datasource COMPUTE STATISTICS;
|
--ANALYZE TABLE TARGET.indi_pub_downloads_datasource COMPUTE STATISTICS;
|
||||||
create table TARGET.indi_pub_downloads_year stored as parquet as select * from SOURCE.indi_pub_downloads_year orig where exists (select 1 from TARGET.result r where r.id=orig.result_id);
|
create table TARGET.indi_pub_downloads_year stored as parquet as select * from SOURCE.indi_pub_downloads_year orig where exists (select 1 from TARGET.result r where r.id=orig.result_id);
|
||||||
ANALYZE TABLE TARGET.indi_pub_downloads_year COMPUTE STATISTICS;
|
--ANALYZE TABLE TARGET.indi_pub_downloads_year COMPUTE STATISTICS;
|
||||||
create table TARGET.indi_pub_downloads_datasource_year stored as parquet as select * from SOURCE.indi_pub_downloads_datasource_year orig where exists (select 1 from TARGET.result r where r.id=orig.result_id);
|
create table TARGET.indi_pub_downloads_datasource_year stored as parquet as select * from SOURCE.indi_pub_downloads_datasource_year orig where exists (select 1 from TARGET.result r where r.id=orig.result_id);
|
||||||
ANALYZE TABLE TARGET.indi_pub_downloads_datasource_year COMPUTE STATISTICS;
|
--ANALYZE TABLE TARGET.indi_pub_downloads_datasource_year COMPUTE STATISTICS;
|
||||||
---- Sprint 7 ----
|
---- Sprint 7 ----
|
||||||
create table TARGET.indi_pub_gold_oa stored as parquet as select * from SOURCE.indi_pub_gold_oa orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
create table TARGET.indi_pub_gold_oa stored as parquet as select * from SOURCE.indi_pub_gold_oa orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
||||||
ANALYZE TABLE TARGET.indi_pub_gold_oa COMPUTE STATISTICS;
|
--ANALYZE TABLE TARGET.indi_pub_gold_oa COMPUTE STATISTICS;
|
||||||
create table TARGET.indi_pub_hybrid stored as parquet as select * from SOURCE.indi_pub_hybrid orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
create table TARGET.indi_pub_hybrid stored as parquet as select * from SOURCE.indi_pub_hybrid orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
||||||
ANALYZE TABLE TARGET.indi_pub_hybrid COMPUTE STATISTICS;
|
--ANALYZE TABLE TARGET.indi_pub_hybrid COMPUTE STATISTICS;
|
||||||
create view TARGET.indi_org_fairness as select * from SOURCE.indi_org_fairness;
|
create view TARGET.indi_org_fairness as select * from SOURCE.indi_org_fairness;
|
||||||
create view TARGET.indi_org_fairness_pub_pr as select * from SOURCE.indi_org_fairness_pub_pr;
|
create view TARGET.indi_org_fairness_pub_pr as select * from SOURCE.indi_org_fairness_pub_pr;
|
||||||
create view TARGET.indi_org_fairness_pub_year as select * from SOURCE.indi_org_fairness_pub_year;
|
create view TARGET.indi_org_fairness_pub_year as select * from SOURCE.indi_org_fairness_pub_year;
|
||||||
|
@ -259,12 +262,22 @@ create view TARGET.indi_org_findable as select * from SOURCE.indi_org_findable;
|
||||||
create view TARGET.indi_org_openess as select * from SOURCE.indi_org_openess;
|
create view TARGET.indi_org_openess as select * from SOURCE.indi_org_openess;
|
||||||
create view TARGET.indi_org_openess_year as select * from SOURCE.indi_org_openess_year;
|
create view TARGET.indi_org_openess_year as select * from SOURCE.indi_org_openess_year;
|
||||||
create table TARGET.indi_pub_has_preprint stored as parquet as select * from SOURCE.indi_pub_has_preprint orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
create table TARGET.indi_pub_has_preprint stored as parquet as select * from SOURCE.indi_pub_has_preprint orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
||||||
ANALYZE TABLE TARGET.indi_pub_has_preprint COMPUTE STATISTICS;
|
--ANALYZE TABLE TARGET.indi_pub_has_preprint COMPUTE STATISTICS;
|
||||||
create table TARGET.indi_pub_in_subscribed stored as parquet as select * from SOURCE.indi_pub_in_subscribed orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
create table TARGET.indi_pub_in_subscribed stored as parquet as select * from SOURCE.indi_pub_in_subscribed orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
||||||
ANALYZE TABLE TARGET.indi_pub_in_subscribed COMPUTE STATISTICS;
|
--ANALYZE TABLE TARGET.indi_pub_in_subscribed COMPUTE STATISTICS;
|
||||||
create table TARGET.indi_result_with_pid stored as parquet as select * from SOURCE.indi_result_with_pid orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
create table TARGET.indi_result_with_pid stored as parquet as select * from SOURCE.indi_result_with_pid orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
||||||
ANALYZE TABLE TARGET.indi_result_with_pid COMPUTE STATISTICS;
|
--ANALYZE TABLE TARGET.indi_result_with_pid COMPUTE STATISTICS;
|
||||||
create table TARGET.indi_impact_measures stored as parquet as select * from SOURCE.indi_impact_measures orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
create table TARGET.indi_impact_measures stored as parquet as select * from SOURCE.indi_impact_measures orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
||||||
ANALYZE TABLE TARGET.indi_impact_measures COMPUTE STATISTICS;
|
--ANALYZE TABLE TARGET.indi_impact_measures COMPUTE STATISTICS;
|
||||||
create table TARGET.indi_pub_interdisciplinarity stored as parquet as select * from SOURCE.indi_pub_interdisciplinarity orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
create table TARGET.indi_pub_interdisciplinarity stored as parquet as select * from SOURCE.indi_pub_interdisciplinarity orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
||||||
ANALYZE TABLE TARGET.indi_pub_interdisciplinarity COMPUTE STATISTICS;
|
--ANALYZE TABLE TARGET.indi_pub_interdisciplinarity COMPUTE STATISTICS;
|
||||||
|
create table TARGET.result_apc_affiliations stored as parquet as select * from SOURCE.result_apc_affiliations orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
||||||
|
--ANALYZE TABLE TARGET.result_apc_affiliations COMPUTE STATISTICS;
|
||||||
|
create table TARGET.indi_is_project_result_after stored as parquet as select * from SOURCE.indi_is_project_result_after orig where exists (select 1 from TARGET.result r where r.id=orig.result_id);
|
||||||
|
create table TARGET.indi_is_funder_plan_s stored as parquet as select * from SOURCE.indi_is_funder_plan_s orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
||||||
|
create view TARGET.indi_funder_fairness as select * from SOURCE.indi_funder_fairness;
|
||||||
|
create view TARGET.indi_funder_openess as select * from SOURCE.indi_funder_openess;
|
||||||
|
create view TARGET.indi_funder_findable as select * from SOURCE.indi_funder_findable;
|
||||||
|
create view TARGET.indi_ris_fairness as select * from SOURCE.indi_ris_fairness;
|
||||||
|
create view TARGET.indi_ris_openess as select * from SOURCE.indi_ris_openess;
|
||||||
|
create view TARGET.indi_ris_findable as select * from SOURCE.indi_ris_findable;
|
|
@ -0,0 +1,292 @@
|
||||||
|
drop database if exists TARGET cascade;
|
||||||
|
create database if not exists TARGET;
|
||||||
|
|
||||||
|
create view if not exists TARGET.category as select * from SOURCE.category;
|
||||||
|
create view if not exists TARGET.concept as select * from SOURCE.concept;
|
||||||
|
create view if not exists TARGET.context as select * from SOURCE.context;
|
||||||
|
create view if not exists TARGET.country as select * from SOURCE.country;
|
||||||
|
create view if not exists TARGET.countrygdp as select * from SOURCE.countrygdp;
|
||||||
|
create view if not exists TARGET.creation_date as select * from SOURCE.creation_date;
|
||||||
|
create view if not exists TARGET.funder as select * from SOURCE.funder;
|
||||||
|
create view if not exists TARGET.fundref as select * from SOURCE.fundref;
|
||||||
|
create view if not exists TARGET.rndexpenditure as select * from SOURCE.rndexpediture;
|
||||||
|
create view if not exists TARGET.rndgdpexpenditure as select * from SOURCE.rndgdpexpenditure;
|
||||||
|
create view if not exists TARGET.doctoratestudents as select * from SOURCE.doctoratestudents;
|
||||||
|
create view if not exists TARGET.totalresearchers as select * from SOURCE.totalresearchers;
|
||||||
|
create view if not exists TARGET.totalresearchersft as select * from SOURCE.totalresearchersft;
|
||||||
|
create view if not exists TARGET.hrrst as select * from SOURCE.hrrst;
|
||||||
|
create view if not exists TARGET.graduatedoctorates as select * from SOURCE.graduatedoctorates;
|
||||||
|
|
||||||
|
create table TARGET.result stored as parquet as
|
||||||
|
select distinct * from (
|
||||||
|
select * from SOURCE.result r where exists (select 1 from SOURCE.result_projects rp join SOURCE.project p on rp.project=p.id where rp.id=r.id)
|
||||||
|
union all
|
||||||
|
select * from SOURCE.result r where exists (select 1 from SOURCE.result_concepts rc where rc.id=r.id)
|
||||||
|
union all
|
||||||
|
select * from SOURCE.result r where exists (select 1 from SOURCE.result_organization ro where ro.id=r.id and ro.organization in (
|
||||||
|
'openorgs____::b84450f9864182c67b8611b5593f4250', --"Athena Research and Innovation Center In Information Communication & Knowledge Technologies', --ARC"
|
||||||
|
'openorgs____::d41cf6bd4ab1b1362a44397e0b95c975', --National Research Council
|
||||||
|
'openorgs____::d2a09b9d5eabb10c95f9470e172d05d2', --??? Not exists ??
|
||||||
|
'openorgs____::d169c7407dd417152596908d48c11460', --Masaryk University
|
||||||
|
'openorgs____::1ec924b1759bb16d0a02f2dad8689b21', --University of Belgrade
|
||||||
|
'openorgs____::0ae431b820e4c33db8967fbb2b919150', --University of Helsinki
|
||||||
|
'openorgs____::759d59f05d77188faee99b7493b46805', --University of Minho
|
||||||
|
'openorgs____::cad284878801b9465fa51a95b1d779db', --Universidad Politécnica de Madrid
|
||||||
|
'openorgs____::eadc8da90a546e98c03f896661a2e4d4', --University of Göttingen
|
||||||
|
'openorgs____::c0286313e36479eff8676dba9b724b40', --National and Kapodistrian University of Athens
|
||||||
|
-- 'openorgs____::c80a8243a5e5c620d7931c88d93bf17a', --Université Paris Diderot
|
||||||
|
'openorgs____::c08634f0a6b0081c3dc6e6c93a4314f3', --Bielefeld University
|
||||||
|
'openorgs____::6fc85e4a8f7ecaf4b0c738d010e967ea', --University of Southern Denmark
|
||||||
|
'openorgs____::3d6122f87f9a97a99d8f6e3d73313720', --Humboldt-Universität zu Berlin
|
||||||
|
'openorgs____::16720ada63d0fa8ca41601feae7d1aa5', --TU Darmstadt
|
||||||
|
'openorgs____::ccc0a066b56d2cfaf90c2ae369df16f5', --KU Leuven
|
||||||
|
'openorgs____::4c6f119632adf789746f0a057ed73e90', --University of the Western Cape
|
||||||
|
'openorgs____::ec3665affa01aeafa28b7852c4176dbd', --Rudjer Boskovic Institute
|
||||||
|
'openorgs____::5f31346d444a7f06a28c880fb170b0f6', --Ghent University
|
||||||
|
'openorgs____::2dbe47117fd5409f9c61620813456632', --University of Luxembourg
|
||||||
|
'openorgs____::6445d7758d3a40c4d997953b6632a368', --National Institute of Informatics (NII)
|
||||||
|
'openorgs____::b77c01aa15de3675da34277d48de2ec1', -- Valencia Catholic University Saint Vincent Martyr
|
||||||
|
'openorgs____::7fe2f66cdc43983c6b24816bfe9cf6a0', -- Unviersity of Warsaw
|
||||||
|
'openorgs____::15e7921fc50d9aa1229a82a84429419e', -- University Of Thessaly
|
||||||
|
'openorgs____::11f7919dadc8f8a7251af54bba60c956', -- Technical University of Crete
|
||||||
|
'openorgs____::84f0c5f5dbb6daf42748485924efde4b', -- University of Piraeus
|
||||||
|
'openorgs____::4ac562f0376fce3539504567649cb373', -- University of Patras
|
||||||
|
'openorgs____::3e8d1f8c3f6cd7f418b09f1f58b4873b', -- Aristotle University of Thessaloniki
|
||||||
|
'openorgs____::3fcef6e1c469c10f2a84b281372c9814', -- World Bank
|
||||||
|
'openorgs____::1698a2eb1885ef8adb5a4a969e745ad3', -- École des Ponts ParisTech
|
||||||
|
'openorgs____::e15adb13c4dadd49de4d35c39b5da93a', -- Nanyang Technological University
|
||||||
|
'openorgs____::4b34103bde246228fcd837f5f1bf4212', -- Autonomous University of Barcelona
|
||||||
|
'openorgs____::72ec75fcfc4e0df1a76dc4c49007fceb', -- McMaster University
|
||||||
|
'openorgs____::51c7fc556e46381734a25a6fbc3fd398', -- University of Modena and Reggio Emilia
|
||||||
|
'openorgs____::235d7f9ad18ecd7e6dc62ea4990cb9db', -- Bilkent University
|
||||||
|
'openorgs____::31f2fa9e05b49d4cf40a19c3fed8eb06', -- Saints Cyril and Methodius University of Skopje
|
||||||
|
'openorgs____::db7686f30f22cbe73a4fde872ce812a6', -- University of Milan
|
||||||
|
'openorgs____::b8b8ca674452579f3f593d9f5e557483', -- University College Cork
|
||||||
|
'openorgs____::38d7097854736583dde879d12dacafca', -- Brown University
|
||||||
|
'openorgs____::57784c9e047e826fefdb1ef816120d92', --Arts et Métiers ParisTech
|
||||||
|
'openorgs____::2530baca8a15936ba2e3297f2bce2e7e', -- University of Cape Town
|
||||||
|
'openorgs____::d11f981828c485cd23d93f7f24f24db1', -- Technological University Dublin
|
||||||
|
'openorgs____::5e6bf8962665cdd040341171e5c631d8', -- Delft University of Technology
|
||||||
|
'openorgs____::846cb428d3f52a445f7275561a7beb5d', -- University of Manitoba
|
||||||
|
'openorgs____::eb391317ed0dc684aa81ac16265de041', -- Universitat Rovira i Virgili
|
||||||
|
'openorgs____::66aa9fc2fceb271423dfabcc38752dc0', -- Lund University
|
||||||
|
'openorgs____::3cff625a4370d51e08624cc586138b2f', -- IMT Atlantique
|
||||||
|
'openorgs____::c0b262bd6eab819e4c994914f9c010e2', -- National Institute of Geophysics and Volcanology
|
||||||
|
'openorgs____::1624ff7c01bb641b91f4518539a0c28a', -- Vrije Universiteit Amsterdam
|
||||||
|
'openorgs____::4d4051b56708688235252f1d8fddb8c1', --Iscte - Instituto Universitário de Lisboa
|
||||||
|
'openorgs____::ab4ac74c35fa5dada770cf08e5110fab', -- Universidade Católica Portuguesa
|
||||||
|
'openorgs____::4d4051b56708688235252f1d8fddb8c1', -- Iscte - Instituto Universitário de Lisboa
|
||||||
|
'openorgs____::5d55fb216b14691cf68218daf5d78cd9', -- Munster Technological University
|
||||||
|
'openorgs____::0fccc7640f0cb44d5cd1b06b312a06b9', -- Cardiff University
|
||||||
|
'openorgs____::8839b55dae0c84d56fd533f52d5d483a' -- Leibniz Institute of Ecological Urban and Regional Development
|
||||||
|
) )) foo;
|
||||||
|
|
||||||
|
--ANALYZE TABLE TARGET.result COMPUTE STATISTICS;
|
||||||
|
|
||||||
|
create view if not exists TARGET.category as select * from SOURCE.category;
|
||||||
|
create view if not exists TARGET.concept as select * from SOURCE.concept;
|
||||||
|
create view if not exists TARGET.context as select * from SOURCE.context;
|
||||||
|
create view if not exists TARGET.country as select * from SOURCE.country;
|
||||||
|
create view if not exists TARGET.countrygdp as select * from SOURCE.countrygdp;
|
||||||
|
create view if not exists TARGET.creation_date as select * from SOURCE.creation_date;
|
||||||
|
create view if not exists TARGET.funder as select * from SOURCE.funder;
|
||||||
|
create view if not exists TARGET.fundref as select * from SOURCE.fundref;
|
||||||
|
create view if not exists TARGET.rndexpenditure as select * from SOURCE.rndexpediture;
|
||||||
|
create view if not exists TARGET.rndgdpexpenditure as select * from SOURCE.rndgdpexpenditure;
|
||||||
|
create view if not exists TARGET.doctoratestudents as select * from SOURCE.doctoratestudents;
|
||||||
|
create view if not exists TARGET.totalresearchers as select * from SOURCE.totalresearchers;
|
||||||
|
create view if not exists TARGET.totalresearchersft as select * from SOURCE.totalresearchersft;
|
||||||
|
create view if not exists TARGET.hrrst as select * from SOURCE.hrrst;
|
||||||
|
--create view if not exists TARGET.graduatedoctorates as select * from SOURCE.graduatedoctorates;
|
||||||
|
|
||||||
|
create table TARGET.result_citations stored as parquet as select * from SOURCE.result_citations orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
||||||
|
--ANALYZE TABLE TARGET.result_citations COMPUTE STATISTICS;
|
||||||
|
|
||||||
|
create table TARGET.result_references_oc stored as parquet as select * from SOURCE.result_references_oc orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
||||||
|
--ANALYZE TABLE TARGET.result_references_oc COMPUTE STATISTICS;
|
||||||
|
|
||||||
|
create table TARGET.result_citations_oc stored as parquet as select * from SOURCE.result_citations_oc orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
||||||
|
--ANALYZE TABLE TARGET.result_citations_oc COMPUTE STATISTICS;
|
||||||
|
|
||||||
|
create table TARGET.result_classifications stored as parquet as select * from SOURCE.result_classifications orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
||||||
|
--ANALYZE TABLE TARGET.result_classifications COMPUTE STATISTICS;
|
||||||
|
|
||||||
|
create table TARGET.result_apc stored as parquet as select * from SOURCE.result_apc orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
||||||
|
--ANALYZE TABLE TARGET.result_apc COMPUTE STATISTICS;
|
||||||
|
|
||||||
|
create table TARGET.result_concepts stored as parquet as select * from SOURCE.result_concepts orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
||||||
|
--ANALYZE TABLE TARGET.result_concepts COMPUTE STATISTICS;
|
||||||
|
|
||||||
|
create table TARGET.result_datasources stored as parquet as select * from SOURCE.result_datasources orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
||||||
|
--ANALYZE TABLE TARGET.result_datasources COMPUTE STATISTICS;
|
||||||
|
|
||||||
|
create table TARGET.result_fundercount stored as parquet as select * from SOURCE.result_fundercount orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
||||||
|
--ANALYZE TABLE TARGET.result_fundercount COMPUTE STATISTICS;
|
||||||
|
|
||||||
|
create table TARGET.result_gold stored as parquet as select * from SOURCE.result_gold orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
||||||
|
--ANALYZE TABLE TARGET.result_gold COMPUTE STATISTICS;
|
||||||
|
|
||||||
|
create table TARGET.result_greenoa stored as parquet as select * from SOURCE.result_greenoa orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
||||||
|
--ANALYZE TABLE TARGET.result_greenoa COMPUTE STATISTICS;
|
||||||
|
|
||||||
|
create table TARGET.result_languages stored as parquet as select * from SOURCE.result_languages orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
||||||
|
--ANALYZE TABLE TARGET.result_languages COMPUTE STATISTICS;
|
||||||
|
|
||||||
|
create table TARGET.result_licenses stored as parquet as select * from SOURCE.result_licenses orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
||||||
|
--ANALYZE TABLE TARGET.result_licenses COMPUTE STATISTICS;
|
||||||
|
|
||||||
|
create table TARGET.licenses_normalized STORED AS PARQUET as select * from SOURCE.licenses_normalized;
|
||||||
|
--ANALYZE TABLE TARGET.licenses_normalized COMPUTE STATISTICS;
|
||||||
|
|
||||||
|
create table TARGET.result_oids stored as parquet as select * from SOURCE.result_oids orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
||||||
|
--ANALYZE TABLE TARGET.result_oids COMPUTE STATISTICS;
|
||||||
|
|
||||||
|
create table TARGET.result_organization stored as parquet as select * from SOURCE.result_organization orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
||||||
|
--ANALYZE TABLE TARGET.result_organization COMPUTE STATISTICS;
|
||||||
|
|
||||||
|
create table TARGET.result_peerreviewed stored as parquet as select * from SOURCE.result_peerreviewed orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
||||||
|
--ANALYZE TABLE TARGET.result_peerreviewed COMPUTE STATISTICS;
|
||||||
|
|
||||||
|
create table TARGET.result_pids stored as parquet as select * from SOURCE.result_pids orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
||||||
|
--ANALYZE TABLE TARGET.result_pids COMPUTE STATISTICS;
|
||||||
|
|
||||||
|
create table TARGET.result_projectcount stored as parquet as select * from SOURCE.result_projectcount orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
||||||
|
--ANALYZE TABLE TARGET.result_projectcount COMPUTE STATISTICS;
|
||||||
|
|
||||||
|
create table TARGET.result_projects stored as parquet as select * from SOURCE.result_projects orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
||||||
|
--ANALYZE TABLE TARGET.result_projects COMPUTE STATISTICS;
|
||||||
|
|
||||||
|
create table TARGET.result_refereed stored as parquet as select * from SOURCE.result_refereed orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
||||||
|
--ANALYZE TABLE TARGET.result_refereed COMPUTE STATISTICS;
|
||||||
|
|
||||||
|
create table TARGET.result_sources stored as parquet as select * from SOURCE.result_sources orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
||||||
|
--ANALYZE TABLE TARGET.result_sources COMPUTE STATISTICS;
|
||||||
|
|
||||||
|
create table TARGET.result_topics stored as parquet as select * from SOURCE.result_topics orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
||||||
|
--ANALYZE TABLE TARGET.result_topics COMPUTE STATISTICS;
|
||||||
|
|
||||||
|
create table TARGET.result_fos stored as parquet as select * from SOURCE.result_fos orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
||||||
|
--ANALYZE TABLE TARGET.result_fos COMPUTE STATISTICS;
|
||||||
|
|
||||||
|
create table TARGET.result_accessroute stored as parquet as select * from SOURCE.result_accessroute orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
||||||
|
--ANALYZE TABLE TARGET.result_accessroute COMPUTE STATISTICS;
|
||||||
|
|
||||||
|
create view TARGET.foo1 as select * from SOURCE.result_result rr where rr.source in (select id from TARGET.result);
|
||||||
|
create view TARGET.foo2 as select * from SOURCE.result_result rr where rr.target in (select id from TARGET.result);
|
||||||
|
create table TARGET.result_result STORED AS PARQUET as select distinct * from (select * from TARGET.foo1 union all select * from TARGET.foo2) foufou;
|
||||||
|
drop view TARGET.foo1;
|
||||||
|
drop view TARGET.foo2;
|
||||||
|
--ANALYZE TABLE TARGET.result_result COMPUTE STATISTICS;
|
||||||
|
|
||||||
|
-- datasources
|
||||||
|
create view if not exists TARGET.datasource as select * from SOURCE.datasource;
|
||||||
|
create view if not exists TARGET.datasource_oids as select * from SOURCE.datasource_oids;
|
||||||
|
create view if not exists TARGET.datasource_organizations as select * from SOURCE.datasource_organizations;
|
||||||
|
create view if not exists TARGET.datasource_sources as select * from SOURCE.datasource_sources;
|
||||||
|
|
||||||
|
create table TARGET.datasource_results stored as parquet as select id as result, datasource as id from TARGET.result_datasources;
|
||||||
|
--ANALYZE TABLE TARGET.datasource_results COMPUTE STATISTICS;
|
||||||
|
|
||||||
|
-- organizations
|
||||||
|
create view if not exists TARGET.organization as select * from SOURCE.organization;
|
||||||
|
create view if not exists TARGET.organization_datasources as select * from SOURCE.organization_datasources;
|
||||||
|
create view if not exists TARGET.organization_pids as select * from SOURCE.organization_pids;
|
||||||
|
create view if not exists TARGET.organization_projects as select * from SOURCE.organization_projects;
|
||||||
|
create view if not exists TARGET.organization_sources as select * from SOURCE.organization_sources;
|
||||||
|
|
||||||
|
-- projects
|
||||||
|
create view if not exists TARGET.project as select * from SOURCE.project;
|
||||||
|
create view if not exists TARGET.project_oids as select * from SOURCE.project_oids;
|
||||||
|
create view if not exists TARGET.project_organizations as select * from SOURCE.project_organizations;
|
||||||
|
create view if not exists TARGET.project_resultcount as select * from SOURCE.project_resultcount;
|
||||||
|
create view if not exists TARGET.project_classification as select * from SOURCE.project_classification;
|
||||||
|
create view if not exists TARGET.project_organization_contribution as select * from SOURCE.project_organization_contribution;
|
||||||
|
|
||||||
|
create table TARGET.project_results stored as parquet as select id as result, project as id from TARGET.result_projects;
|
||||||
|
--ANALYZE TABLE TARGET.project_results COMPUTE STATISTICS;
|
||||||
|
|
||||||
|
-- indicators
|
||||||
|
-- Sprint 1 ----
|
||||||
|
create table TARGET.indi_pub_green_oa stored as parquet as select * from SOURCE.indi_pub_green_oa orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
||||||
|
--ANALYZE TABLE TARGET.indi_pub_green_oa COMPUTE STATISTICS;
|
||||||
|
create table TARGET.indi_pub_grey_lit stored as parquet as select * from SOURCE.indi_pub_grey_lit orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
||||||
|
--ANALYZE TABLE TARGET.indi_pub_grey_lit COMPUTE STATISTICS;
|
||||||
|
create table TARGET.indi_pub_doi_from_crossref stored as parquet as select * from SOURCE.indi_pub_doi_from_crossref orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
||||||
|
--ANALYZE TABLE TARGET.indi_pub_doi_from_crossref COMPUTE STATISTICS;
|
||||||
|
-- Sprint 2 ----
|
||||||
|
create table TARGET.indi_result_has_cc_licence stored as parquet as select * from SOURCE.indi_result_has_cc_licence orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
||||||
|
--ANALYZE TABLE TARGET.indi_result_has_cc_licence COMPUTE STATISTICS;
|
||||||
|
create table TARGET.indi_result_has_cc_licence_url stored as parquet as select * from SOURCE.indi_result_has_cc_licence_url orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
||||||
|
--ANALYZE TABLE TARGET.indi_result_has_cc_licence_url COMPUTE STATISTICS;
|
||||||
|
create table TARGET.indi_pub_has_abstract stored as parquet as select * from SOURCE.indi_pub_has_abstract orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
||||||
|
--ANALYZE TABLE TARGET.indi_pub_has_abstract COMPUTE STATISTICS;
|
||||||
|
create table TARGET.indi_result_with_orcid stored as parquet as select * from SOURCE.indi_result_with_orcid orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
||||||
|
--ANALYZE TABLE TARGET.indi_result_with_orcid COMPUTE STATISTICS;
|
||||||
|
---- Sprint 3 ----
|
||||||
|
create table TARGET.indi_funded_result_with_fundref stored as parquet as select * from SOURCE.indi_funded_result_with_fundref orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
||||||
|
--ANALYZE TABLE TARGET.indi_funded_result_with_fundref COMPUTE STATISTICS;
|
||||||
|
create view TARGET.indi_result_org_collab as select * from SOURCE.indi_result_org_collab;
|
||||||
|
create view TARGET.indi_result_org_country_collab as select * from SOURCE.indi_result_org_country_collab;
|
||||||
|
create view TARGET.indi_project_collab_org as select * from SOURCE.indi_project_collab_org;
|
||||||
|
create view TARGET.indi_project_collab_org_country as select * from SOURCE.indi_project_collab_org_country;
|
||||||
|
create view TARGET.indi_funder_country_collab as select * from SOURCE.indi_funder_country_collab;
|
||||||
|
create view TARGET.indi_result_country_collab as select * from SOURCE.indi_result_country_collab;
|
||||||
|
---- Sprint 4 ----
|
||||||
|
create table TARGET.indi_pub_diamond stored as parquet as select * from SOURCE.indi_pub_diamond orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
||||||
|
--ANALYZE TABLE TARGET.indi_pub_diamond COMPUTE STATISTICS;
|
||||||
|
create table TARGET.indi_pub_in_transformative stored as parquet as select * from SOURCE.indi_pub_in_transformative orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
||||||
|
--ANALYZE TABLE TARGET.indi_pub_in_transformative COMPUTE STATISTICS;
|
||||||
|
create table TARGET.indi_pub_closed_other_open stored as parquet as select * from SOURCE.indi_pub_closed_other_open orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
||||||
|
--ANALYZE TABLE TARGET.indi_pub_closed_other_open COMPUTE STATISTICS;
|
||||||
|
---- Sprint 5 ----
|
||||||
|
create table TARGET.indi_result_no_of_copies stored as parquet as select * from SOURCE.indi_result_no_of_copies orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
||||||
|
--ANALYZE TABLE TARGET.indi_result_no_of_copies COMPUTE STATISTICS;
|
||||||
|
---- Sprint 6 ----
|
||||||
|
create table TARGET.indi_pub_hybrid_oa_with_cc stored as parquet as select * from SOURCE.indi_pub_hybrid_oa_with_cc orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
||||||
|
--ANALYZE TABLE TARGET.indi_pub_hybrid_oa_with_cc COMPUTE STATISTICS;
|
||||||
|
create table TARGET.indi_pub_bronze_oa stored as parquet as select * from SOURCE.indi_pub_bronze_oa orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
||||||
|
--ANALYZE TABLE TARGET.indi_pub_bronze_oa COMPUTE STATISTICS;
|
||||||
|
create table TARGET.indi_pub_downloads stored as parquet as select * from SOURCE.indi_pub_downloads orig where exists (select 1 from TARGET.result r where r.id=orig.result_id);
|
||||||
|
--ANALYZE TABLE TARGET.indi_pub_downloads COMPUTE STATISTICS;
|
||||||
|
create table TARGET.indi_pub_downloads_datasource stored as parquet as select * from SOURCE.indi_pub_downloads_datasource orig where exists (select 1 from TARGET.result r where r.id=orig.result_id);
|
||||||
|
--ANALYZE TABLE TARGET.indi_pub_downloads_datasource COMPUTE STATISTICS;
|
||||||
|
create table TARGET.indi_pub_downloads_year stored as parquet as select * from SOURCE.indi_pub_downloads_year orig where exists (select 1 from TARGET.result r where r.id=orig.result_id);
|
||||||
|
--ANALYZE TABLE TARGET.indi_pub_downloads_year COMPUTE STATISTICS;
|
||||||
|
create table TARGET.indi_pub_downloads_datasource_year stored as parquet as select * from SOURCE.indi_pub_downloads_datasource_year orig where exists (select 1 from TARGET.result r where r.id=orig.result_id);
|
||||||
|
--ANALYZE TABLE TARGET.indi_pub_downloads_datasource_year COMPUTE STATISTICS;
|
||||||
|
---- Sprint 7 ----
|
||||||
|
create table TARGET.indi_pub_gold_oa stored as parquet as select * from SOURCE.indi_pub_gold_oa orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
||||||
|
--ANALYZE TABLE TARGET.indi_pub_gold_oa COMPUTE STATISTICS;
|
||||||
|
create table TARGET.indi_pub_hybrid stored as parquet as select * from SOURCE.indi_pub_hybrid orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
||||||
|
--ANALYZE TABLE TARGET.indi_pub_hybrid COMPUTE STATISTICS;
|
||||||
|
create view TARGET.indi_org_fairness as select * from SOURCE.indi_org_fairness;
|
||||||
|
create view TARGET.indi_org_fairness_pub_pr as select * from SOURCE.indi_org_fairness_pub_pr;
|
||||||
|
create view TARGET.indi_org_fairness_pub_year as select * from SOURCE.indi_org_fairness_pub_year;
|
||||||
|
create view TARGET.indi_org_fairness_pub as select * from SOURCE.indi_org_fairness_pub;
|
||||||
|
create view TARGET.indi_org_fairness_year as select * from SOURCE.indi_org_fairness_year;
|
||||||
|
create view TARGET.indi_org_findable_year as select * from SOURCE.indi_org_findable_year;
|
||||||
|
create view TARGET.indi_org_findable as select * from SOURCE.indi_org_findable;
|
||||||
|
create view TARGET.indi_org_openess as select * from SOURCE.indi_org_openess;
|
||||||
|
create view TARGET.indi_org_openess_year as select * from SOURCE.indi_org_openess_year;
|
||||||
|
create table TARGET.indi_pub_has_preprint stored as parquet as select * from SOURCE.indi_pub_has_preprint orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
||||||
|
--ANALYZE TABLE TARGET.indi_pub_has_preprint COMPUTE STATISTICS;
|
||||||
|
create table TARGET.indi_pub_in_subscribed stored as parquet as select * from SOURCE.indi_pub_in_subscribed orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
||||||
|
--ANALYZE TABLE TARGET.indi_pub_in_subscribed COMPUTE STATISTICS;
|
||||||
|
create table TARGET.indi_result_with_pid stored as parquet as select * from SOURCE.indi_result_with_pid orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
||||||
|
--ANALYZE TABLE TARGET.indi_result_with_pid COMPUTE STATISTICS;
|
||||||
|
create table TARGET.indi_impact_measures stored as parquet as select * from SOURCE.indi_impact_measures orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
||||||
|
--ANALYZE TABLE TARGET.indi_impact_measures COMPUTE STATISTICS;
|
||||||
|
create table TARGET.indi_pub_interdisciplinarity stored as parquet as select * from SOURCE.indi_pub_interdisciplinarity orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
||||||
|
--ANALYZE TABLE TARGET.indi_pub_interdisciplinarity COMPUTE STATISTICS;
|
||||||
|
create table TARGET.result_apc_affiliations stored as parquet as select * from SOURCE.result_apc_affiliations orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
||||||
|
--ANALYZE TABLE TARGET.result_apc_affiliations COMPUTE STATISTICS;
|
||||||
|
create table TARGET.indi_is_project_result_after stored as parquet as select * from SOURCE.indi_is_project_result_after orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
||||||
|
create table TARGET.indi_is_funder_plan_s stored as parquet as select * from SOURCE.indi_is_funder_plan_s orig where exists (select 1 from TARGET.result r where r.id=orig.id);
|
||||||
|
create view TARGET.indi_funder_fairness as select * from SOURCE.indi_funder_fairness;
|
||||||
|
create view TARGET.indi_funder_openess as select * from SOURCE.indi_funder_openess;
|
||||||
|
create view TARGET.indi_funder_findable as select * from SOURCE.indi_funder_findable;
|
||||||
|
create view TARGET.indi_ris_fairness as select * from SOURCE.indi_ris_fairness;
|
||||||
|
create view TARGET.indi_ris_openess as select * from SOURCE.indi_ris_openess;
|
||||||
|
create view TARGET.indi_ris_findable as select * from SOURCE.indi_ris_findable;
|
||||||
|
|
||||||
|
|
|
@ -12,4 +12,4 @@ create table TARGET.result stored as parquet as
|
||||||
-- join SOURCE.result
|
-- join SOURCE.result
|
||||||
where rc.id=r.id and conc.category like CONTEXT)
|
where rc.id=r.id and conc.category like CONTEXT)
|
||||||
) foo;
|
) foo;
|
||||||
ANALYZE TABLE TARGET.result COMPUTE STATISTICS;
|
--ANALYZE TABLE TARGET.result COMPUTE STATISTICS;
|
|
@ -12,4 +12,4 @@ create table TARGET.result stored as parquet as
|
||||||
-- join SOURCE.result
|
-- join SOURCE.result
|
||||||
where rc.id=r.id and conc.category not in (CONTEXTS))
|
where rc.id=r.id and conc.category not in (CONTEXTS))
|
||||||
) foo;
|
) foo;
|
||||||
ANALYZE TABLE TARGET.result COMPUTE STATISTICS;
|
--ANALYZE TABLE TARGET.result COMPUTE STATISTICS;
|
|
@ -6,4 +6,4 @@ create table TARGET.result stored as parquet as
|
||||||
select * from SOURCE.result r where exists (select 1 from SOURCE.result_projects rp join SOURCE.project p on rp.project=p.id where rp.id=r.id)
|
select * from SOURCE.result r where exists (select 1 from SOURCE.result_projects rp join SOURCE.project p on rp.project=p.id where rp.id=r.id)
|
||||||
) foo;
|
) foo;
|
||||||
|
|
||||||
ANALYZE TABLE TARGET.result COMPUTE STATISTICS;
|
--ANALYZE TABLE TARGET.result COMPUTE STATISTICS;
|
|
@ -42,7 +42,7 @@ create table TARGET.result stored as parquet as
|
||||||
'openorgs____::31f2fa9e05b49d4cf40a19c3fed8eb06', -- Saints Cyril and Methodius University of Skopje
|
'openorgs____::31f2fa9e05b49d4cf40a19c3fed8eb06', -- Saints Cyril and Methodius University of Skopje
|
||||||
'openorgs____::db7686f30f22cbe73a4fde872ce812a6', -- University of Milan
|
'openorgs____::db7686f30f22cbe73a4fde872ce812a6', -- University of Milan
|
||||||
'openorgs____::b8b8ca674452579f3f593d9f5e557483', -- University College Cork
|
'openorgs____::b8b8ca674452579f3f593d9f5e557483', -- University College Cork
|
||||||
'openorgs____::38d7097854736583dde879d12dacafca' -- Brown University
|
'openorgs____::38d7097854736583dde879d12dacafca', -- Brown University
|
||||||
'openorgs____::57784c9e047e826fefdb1ef816120d92', --Arts et Métiers ParisTech
|
'openorgs____::57784c9e047e826fefdb1ef816120d92', --Arts et Métiers ParisTech
|
||||||
'openorgs____::2530baca8a15936ba2e3297f2bce2e7e', -- University of Cape Town
|
'openorgs____::2530baca8a15936ba2e3297f2bce2e7e', -- University of Cape Town
|
||||||
'openorgs____::d11f981828c485cd23d93f7f24f24db1', -- Technological University Dublin
|
'openorgs____::d11f981828c485cd23d93f7f24f24db1', -- Technological University Dublin
|
||||||
|
@ -52,7 +52,13 @@ create table TARGET.result stored as parquet as
|
||||||
'openorgs____::66aa9fc2fceb271423dfabcc38752dc0', -- Lund University
|
'openorgs____::66aa9fc2fceb271423dfabcc38752dc0', -- Lund University
|
||||||
'openorgs____::3cff625a4370d51e08624cc586138b2f', -- IMT Atlantique
|
'openorgs____::3cff625a4370d51e08624cc586138b2f', -- IMT Atlantique
|
||||||
'openorgs____::c0b262bd6eab819e4c994914f9c010e2', -- National Institute of Geophysics and Volcanology
|
'openorgs____::c0b262bd6eab819e4c994914f9c010e2', -- National Institute of Geophysics and Volcanology
|
||||||
'openorgs____::1624ff7c01bb641b91f4518539a0c28a' -- Vrije Universiteit Amsterdam
|
'openorgs____::1624ff7c01bb641b91f4518539a0c28a', -- Vrije Universiteit Amsterdam
|
||||||
|
'openorgs____::4d4051b56708688235252f1d8fddb8c1', --Iscte - Instituto Universitário de Lisboa
|
||||||
|
'openorgs____::ab4ac74c35fa5dada770cf08e5110fab', -- Universidade Católica Portuguesa
|
||||||
|
'openorgs____::4d4051b56708688235252f1d8fddb8c1', -- Iscte - Instituto Universitário de Lisboa
|
||||||
|
'openorgs____::5d55fb216b14691cf68218daf5d78cd9', -- Munster Technological University
|
||||||
|
'openorgs____::0fccc7640f0cb44d5cd1b06b312a06b9', -- Cardiff University
|
||||||
|
'openorgs____::8839b55dae0c84d56fd533f52d5d483a' -- Leibniz Institute of Ecological Urban and Regional Development
|
||||||
))) foo;
|
))) foo;
|
||||||
|
|
||||||
ANALYZE TABLE TARGET.result COMPUTE STATISTICS;
|
--ANALYZE TABLE TARGET.result COMPUTE STATISTICS;
|
|
@ -8,7 +8,7 @@ from ${stats_db_name}.result r
|
||||||
group by rl.id
|
group by rl.id
|
||||||
) rln on rln.id=r.id;
|
) rln on rln.id=r.id;
|
||||||
|
|
||||||
ANALYZE TABLE ${observatory_db_name}.result_cc_licence COMPUTE STATISTICS;
|
--ANALYZE TABLE ${observatory_db_name}.result_cc_licence COMPUTE STATISTICS;
|
||||||
|
|
||||||
create table ${observatory_db_name}.result_affiliated_country stored as parquet as
|
create table ${observatory_db_name}.result_affiliated_country stored as parquet as
|
||||||
select
|
select
|
||||||
|
@ -39,7 +39,7 @@ group by r.green, r.gold, case when rl.type is not null then true else false end
|
||||||
case when r.access_mode in ('Open Access', 'Open Source') then true else false end, r.peer_reviewed, r.type, abstract,
|
case when r.access_mode in ('Open Access', 'Open Source') then true else false end, r.peer_reviewed, r.type, abstract,
|
||||||
cc_licence, r.authors > 1, rpc.count > 1, rfc.count > 1, c.code, c.name;
|
cc_licence, r.authors > 1, rpc.count > 1, rfc.count > 1, c.code, c.name;
|
||||||
|
|
||||||
ANALYZE TABLE ${observatory_db_name}.result_affiliated_country COMPUTE STATISTICS;
|
--ANALYZE TABLE ${observatory_db_name}.result_affiliated_country COMPUTE STATISTICS;
|
||||||
|
|
||||||
create table ${observatory_db_name}.result_affiliated_year stored as parquet as
|
create table ${observatory_db_name}.result_affiliated_year stored as parquet as
|
||||||
select
|
select
|
||||||
|
@ -70,7 +70,7 @@ group by r.green, r.gold, case when rl.type is not null then true else false end
|
||||||
case when r.access_mode in ('Open Access', 'Open Source') then true else false end, r.peer_reviewed, r.type, abstract,
|
case when r.access_mode in ('Open Access', 'Open Source') then true else false end, r.peer_reviewed, r.type, abstract,
|
||||||
cc_licence, r.authors > 1, rpc.count > 1, rfc.count > 1, r.year;
|
cc_licence, r.authors > 1, rpc.count > 1, rfc.count > 1, r.year;
|
||||||
|
|
||||||
ANALYZE TABLE ${observatory_db_name}.result_affiliated_year COMPUTE STATISTICS;
|
--ANALYZE TABLE ${observatory_db_name}.result_affiliated_year COMPUTE STATISTICS;
|
||||||
|
|
||||||
create table ${observatory_db_name}.result_affiliated_year_country stored as parquet as
|
create table ${observatory_db_name}.result_affiliated_year_country stored as parquet as
|
||||||
select
|
select
|
||||||
|
@ -101,7 +101,7 @@ group by r.green, r.gold, case when rl.type is not null then true else false end
|
||||||
case when r.access_mode in ('Open Access', 'Open Source') then true else false end, r.peer_reviewed, r.type, abstract,
|
case when r.access_mode in ('Open Access', 'Open Source') then true else false end, r.peer_reviewed, r.type, abstract,
|
||||||
cc_licence, r.authors > 1, rpc.count > 1, rfc.count > 1, r.year, c.code, c.name;
|
cc_licence, r.authors > 1, rpc.count > 1, rfc.count > 1, r.year, c.code, c.name;
|
||||||
|
|
||||||
ANALYZE TABLE ${observatory_db_name}.result_affiliated_year_country COMPUTE STATISTICS;
|
--ANALYZE TABLE ${observatory_db_name}.result_affiliated_year_country COMPUTE STATISTICS;
|
||||||
|
|
||||||
create table ${observatory_db_name}.result_affiliated_datasource stored as parquet as
|
create table ${observatory_db_name}.result_affiliated_datasource stored as parquet as
|
||||||
select
|
select
|
||||||
|
@ -134,7 +134,7 @@ group by r.green, r.gold, case when rl.type is not null then true else false end
|
||||||
case when r.access_mode in ('Open Access', 'Open Source') then true else false end, r.peer_reviewed, r.type, abstract,
|
case when r.access_mode in ('Open Access', 'Open Source') then true else false end, r.peer_reviewed, r.type, abstract,
|
||||||
cc_licence, r.authors > 1, rpc.count > 1, rfc.count > 1, d.name;
|
cc_licence, r.authors > 1, rpc.count > 1, rfc.count > 1, d.name;
|
||||||
|
|
||||||
ANALYZE TABLE ${observatory_db_name}.result_affiliated_datasource COMPUTE STATISTICS;
|
--ANALYZE TABLE ${observatory_db_name}.result_affiliated_datasource COMPUTE STATISTICS;
|
||||||
|
|
||||||
create table ${observatory_db_name}.result_affiliated_datasource_country stored as parquet as
|
create table ${observatory_db_name}.result_affiliated_datasource_country stored as parquet as
|
||||||
select
|
select
|
||||||
|
@ -167,7 +167,7 @@ group by r.green, r.gold, case when rl.type is not null then true else false end
|
||||||
case when r.access_mode in ('Open Access', 'Open Source') then true else false end, r.peer_reviewed, r.type, abstract,
|
case when r.access_mode in ('Open Access', 'Open Source') then true else false end, r.peer_reviewed, r.type, abstract,
|
||||||
cc_licence, r.authors > 1, rpc.count > 1, rfc.count > 1, d.name, c.code, c.name;
|
cc_licence, r.authors > 1, rpc.count > 1, rfc.count > 1, d.name, c.code, c.name;
|
||||||
|
|
||||||
ANALYZE TABLE ${observatory_db_name}.result_affiliated_datasource_country COMPUTE STATISTICS;
|
--ANALYZE TABLE ${observatory_db_name}.result_affiliated_datasource_country COMPUTE STATISTICS;
|
||||||
|
|
||||||
create table ${observatory_db_name}.result_affiliated_organization stored as parquet as
|
create table ${observatory_db_name}.result_affiliated_organization stored as parquet as
|
||||||
select
|
select
|
||||||
|
@ -198,7 +198,7 @@ group by r.green, r.gold, case when rl.type is not null then true else false end
|
||||||
case when r.access_mode in ('Open Access', 'Open Source') then true else false end, r.peer_reviewed, r.type, abstract,
|
case when r.access_mode in ('Open Access', 'Open Source') then true else false end, r.peer_reviewed, r.type, abstract,
|
||||||
cc_licence, r.authors > 1, rpc.count > 1, rfc.count > 1, o.name;
|
cc_licence, r.authors > 1, rpc.count > 1, rfc.count > 1, o.name;
|
||||||
|
|
||||||
ANALYZE TABLE ${observatory_db_name}.result_affiliated_organization COMPUTE STATISTICS;
|
--ANALYZE TABLE ${observatory_db_name}.result_affiliated_organization COMPUTE STATISTICS;
|
||||||
|
|
||||||
create table ${observatory_db_name}.result_affiliated_organization_country stored as parquet as
|
create table ${observatory_db_name}.result_affiliated_organization_country stored as parquet as
|
||||||
select
|
select
|
||||||
|
@ -229,7 +229,7 @@ group by r.green, r.gold, case when rl.type is not null then true else false end
|
||||||
case when r.access_mode in ('Open Access', 'Open Source') then true else false end, r.peer_reviewed, r.type, abstract,
|
case when r.access_mode in ('Open Access', 'Open Source') then true else false end, r.peer_reviewed, r.type, abstract,
|
||||||
cc_licence, r.authors > 1, rpc.count > 1, rfc.count > 1, o.name, c.code, c.name;
|
cc_licence, r.authors > 1, rpc.count > 1, rfc.count > 1, o.name, c.code, c.name;
|
||||||
|
|
||||||
ANALYZE TABLE ${observatory_db_name}.result_affiliated_organization_country COMPUTE STATISTICS;
|
--ANALYZE TABLE ${observatory_db_name}.result_affiliated_organization_country COMPUTE STATISTICS;
|
||||||
|
|
||||||
create table ${observatory_db_name}.result_affiliated_funder stored as parquet as
|
create table ${observatory_db_name}.result_affiliated_funder stored as parquet as
|
||||||
select
|
select
|
||||||
|
@ -262,7 +262,7 @@ group by r.green, r.gold, case when rl.type is not null then true else false end
|
||||||
case when r.access_mode in ('Open Access', 'Open Source') then true else false end, r.peer_reviewed, r.type, abstract,
|
case when r.access_mode in ('Open Access', 'Open Source') then true else false end, r.peer_reviewed, r.type, abstract,
|
||||||
cc_licence, r.authors > 1, rpc.count > 1, rfc.count > 1, p.funder;
|
cc_licence, r.authors > 1, rpc.count > 1, rfc.count > 1, p.funder;
|
||||||
|
|
||||||
ANALYZE TABLE ${observatory_db_name}.result_affiliated_funder COMPUTE STATISTICS;
|
--ANALYZE TABLE ${observatory_db_name}.result_affiliated_funder COMPUTE STATISTICS;
|
||||||
|
|
||||||
create table ${observatory_db_name}.result_affiliated_funder_country stored as parquet as
|
create table ${observatory_db_name}.result_affiliated_funder_country stored as parquet as
|
||||||
select
|
select
|
||||||
|
@ -295,7 +295,7 @@ group by r.green, r.gold, case when rl.type is not null then true else false end
|
||||||
case when r.access_mode in ('Open Access', 'Open Source') then true else false end, r.peer_reviewed, r.type, abstract,
|
case when r.access_mode in ('Open Access', 'Open Source') then true else false end, r.peer_reviewed, r.type, abstract,
|
||||||
cc_licence, r.authors > 1, rpc.count > 1, rfc.count > 1, p.funder, c.code, c.name;
|
cc_licence, r.authors > 1, rpc.count > 1, rfc.count > 1, p.funder, c.code, c.name;
|
||||||
|
|
||||||
ANALYZE TABLE ${observatory_db_name}.result_affiliated_funder_country COMPUTE STATISTICS;
|
--ANALYZE TABLE ${observatory_db_name}.result_affiliated_funder_country COMPUTE STATISTICS;
|
||||||
|
|
||||||
create table ${observatory_db_name}.result_deposited_country stored as parquet as
|
create table ${observatory_db_name}.result_deposited_country stored as parquet as
|
||||||
select
|
select
|
||||||
|
@ -328,7 +328,7 @@ group by r.green, r.gold, case when rl.type is not null then true else false end
|
||||||
case when r.access_mode in ('Open Access', 'Open Source') then true else false end, r.peer_reviewed, r.type, abstract,
|
case when r.access_mode in ('Open Access', 'Open Source') then true else false end, r.peer_reviewed, r.type, abstract,
|
||||||
cc_licence, r.authors > 1, rpc.count > 1, rfc.count > 1, c.code, c.name;
|
cc_licence, r.authors > 1, rpc.count > 1, rfc.count > 1, c.code, c.name;
|
||||||
|
|
||||||
ANALYZE TABLE ${observatory_db_name}.result_deposited_country COMPUTE STATISTICS;
|
--ANALYZE TABLE ${observatory_db_name}.result_deposited_country COMPUTE STATISTICS;
|
||||||
|
|
||||||
create table ${observatory_db_name}.result_deposited_year stored as parquet as
|
create table ${observatory_db_name}.result_deposited_year stored as parquet as
|
||||||
select
|
select
|
||||||
|
@ -361,7 +361,7 @@ group by r.green, r.gold, case when rl.type is not null then true else false end
|
||||||
case when r.access_mode in ('Open Access', 'Open Source') then true else false end, r.peer_reviewed, r.type, abstract,
|
case when r.access_mode in ('Open Access', 'Open Source') then true else false end, r.peer_reviewed, r.type, abstract,
|
||||||
cc_licence, r.authors > 1, rpc.count > 1, rfc.count > 1, r.year;
|
cc_licence, r.authors > 1, rpc.count > 1, rfc.count > 1, r.year;
|
||||||
|
|
||||||
ANALYZE TABLE ${observatory_db_name}.result_deposited_year COMPUTE STATISTICS;
|
--ANALYZE TABLE ${observatory_db_name}.result_deposited_year COMPUTE STATISTICS;
|
||||||
|
|
||||||
create table ${observatory_db_name}.result_deposited_year_country stored as parquet as
|
create table ${observatory_db_name}.result_deposited_year_country stored as parquet as
|
||||||
select
|
select
|
||||||
|
@ -394,7 +394,7 @@ group by r.green, r.gold, case when rl.type is not null then true else false end
|
||||||
case when r.access_mode in ('Open Access', 'Open Source') then true else false end, r.peer_reviewed, r.type, abstract,
|
case when r.access_mode in ('Open Access', 'Open Source') then true else false end, r.peer_reviewed, r.type, abstract,
|
||||||
cc_licence, r.authors > 1, rpc.count > 1, rfc.count > 1, r.year, c.code, c.name;
|
cc_licence, r.authors > 1, rpc.count > 1, rfc.count > 1, r.year, c.code, c.name;
|
||||||
|
|
||||||
ANALYZE TABLE ${observatory_db_name}.result_deposited_year_country COMPUTE STATISTICS;
|
--ANALYZE TABLE ${observatory_db_name}.result_deposited_year_country COMPUTE STATISTICS;
|
||||||
|
|
||||||
create table ${observatory_db_name}.result_deposited_datasource stored as parquet as
|
create table ${observatory_db_name}.result_deposited_datasource stored as parquet as
|
||||||
select
|
select
|
||||||
|
@ -427,7 +427,7 @@ group by r.green, r.gold, case when rl.type is not null then true else false end
|
||||||
case when r.access_mode in ('Open Access', 'Open Source') then true else false end, r.peer_reviewed, r.type, abstract,
|
case when r.access_mode in ('Open Access', 'Open Source') then true else false end, r.peer_reviewed, r.type, abstract,
|
||||||
cc_licence, r.authors > 1, rpc.count > 1, rfc.count > 1, d.name;
|
cc_licence, r.authors > 1, rpc.count > 1, rfc.count > 1, d.name;
|
||||||
|
|
||||||
ANALYZE TABLE ${observatory_db_name}.result_deposited_datasource COMPUTE STATISTICS;
|
--ANALYZE TABLE ${observatory_db_name}.result_deposited_datasource COMPUTE STATISTICS;
|
||||||
|
|
||||||
create table ${observatory_db_name}.result_deposited_datasource_country stored as parquet as
|
create table ${observatory_db_name}.result_deposited_datasource_country stored as parquet as
|
||||||
select
|
select
|
||||||
|
@ -460,7 +460,7 @@ group by r.green, r.gold, case when rl.type is not null then true else false end
|
||||||
case when r.access_mode in ('Open Access', 'Open Source') then true else false end, r.peer_reviewed, r.type, abstract,
|
case when r.access_mode in ('Open Access', 'Open Source') then true else false end, r.peer_reviewed, r.type, abstract,
|
||||||
cc_licence, r.authors > 1, rpc.count > 1, rfc.count > 1, d.name, c.code, c.name;
|
cc_licence, r.authors > 1, rpc.count > 1, rfc.count > 1, d.name, c.code, c.name;
|
||||||
|
|
||||||
ANALYZE TABLE ${observatory_db_name}.result_deposited_datasource_country COMPUTE STATISTICS;
|
--ANALYZE TABLE ${observatory_db_name}.result_deposited_datasource_country COMPUTE STATISTICS;
|
||||||
|
|
||||||
create table ${observatory_db_name}.result_deposited_organization stored as parquet as
|
create table ${observatory_db_name}.result_deposited_organization stored as parquet as
|
||||||
select
|
select
|
||||||
|
@ -493,7 +493,7 @@ group by r.green, r.gold, case when rl.type is not null then true else false end
|
||||||
case when r.access_mode in ('Open Access', 'Open Source') then true else false end, r.peer_reviewed, r.type, abstract,
|
case when r.access_mode in ('Open Access', 'Open Source') then true else false end, r.peer_reviewed, r.type, abstract,
|
||||||
cc_licence, r.authors > 1, rpc.count > 1, rfc.count > 1, o.name;
|
cc_licence, r.authors > 1, rpc.count > 1, rfc.count > 1, o.name;
|
||||||
|
|
||||||
ANALYZE TABLE ${observatory_db_name}.result_deposited_organization COMPUTE STATISTICS;
|
--ANALYZE TABLE ${observatory_db_name}.result_deposited_organization COMPUTE STATISTICS;
|
||||||
|
|
||||||
create table ${observatory_db_name}.result_deposited_organization_country stored as parquet as
|
create table ${observatory_db_name}.result_deposited_organization_country stored as parquet as
|
||||||
select
|
select
|
||||||
|
@ -526,7 +526,7 @@ group by r.green, r.gold, case when rl.type is not null then true else false end
|
||||||
case when r.access_mode in ('Open Access', 'Open Source') then true else false end, r.peer_reviewed, r.type, abstract,
|
case when r.access_mode in ('Open Access', 'Open Source') then true else false end, r.peer_reviewed, r.type, abstract,
|
||||||
cc_licence, r.authors > 1, rpc.count > 1, rfc.count > 1, o.name, c.code, c.name;
|
cc_licence, r.authors > 1, rpc.count > 1, rfc.count > 1, o.name, c.code, c.name;
|
||||||
|
|
||||||
ANALYZE TABLE ${observatory_db_name}.result_deposited_organization_country COMPUTE STATISTICS;
|
--ANALYZE TABLE ${observatory_db_name}.result_deposited_organization_country COMPUTE STATISTICS;
|
||||||
|
|
||||||
create table ${observatory_db_name}.result_deposited_funder stored as parquet as
|
create table ${observatory_db_name}.result_deposited_funder stored as parquet as
|
||||||
select
|
select
|
||||||
|
@ -561,7 +561,7 @@ group by r.green, r.gold, case when rl.type is not null then true else false end
|
||||||
case when r.access_mode in ('Open Access', 'Open Source') then true else false end, r.peer_reviewed, r.type, abstract,
|
case when r.access_mode in ('Open Access', 'Open Source') then true else false end, r.peer_reviewed, r.type, abstract,
|
||||||
cc_licence, r.authors > 1, rpc.count > 1, rfc.count > 1, p.funder;
|
cc_licence, r.authors > 1, rpc.count > 1, rfc.count > 1, p.funder;
|
||||||
|
|
||||||
ANALYZE TABLE ${observatory_db_name}.result_deposited_funder COMPUTE STATISTICS;
|
--ANALYZE TABLE ${observatory_db_name}.result_deposited_funder COMPUTE STATISTICS;
|
||||||
|
|
||||||
create table ${observatory_db_name}.result_deposited_funder_country stored as parquet as
|
create table ${observatory_db_name}.result_deposited_funder_country stored as parquet as
|
||||||
select
|
select
|
||||||
|
@ -596,4 +596,4 @@ group by r.green, r.gold, case when rl.type is not null then true else false end
|
||||||
case when r.access_mode in ('Open Access', 'Open Source') then true else false end, r.peer_reviewed, r.type, abstract,
|
case when r.access_mode in ('Open Access', 'Open Source') then true else false end, r.peer_reviewed, r.type, abstract,
|
||||||
cc_licence, r.authors > 1, rpc.count > 1, rfc.count > 1, p.funder, c.code, c.name;
|
cc_licence, r.authors > 1, rpc.count > 1, rfc.count > 1, p.funder, c.code, c.name;
|
||||||
|
|
||||||
ANALYZE TABLE ${observatory_db_name}.result_deposited_funder_country COMPUTE STATISTICS;
|
--ANALYZE TABLE ${observatory_db_name}.result_deposited_funder_country COMPUTE STATISTICS;
|
0
dhp-workflows/dhp-stats-update/src/main/resources/eu/dnetlib/dhp/oa/graph/stats/oozie_app/scripts/step5.sql
Normal file → Executable file
0
dhp-workflows/dhp-stats-update/src/main/resources/eu/dnetlib/dhp/oa/graph/stats/oozie_app/scripts/step5.sql
Normal file → Executable file
|
@ -317,15 +317,12 @@
|
||||||
</action>
|
</action>
|
||||||
|
|
||||||
<action name="Step16-createIndicatorsTables">
|
<action name="Step16-createIndicatorsTables">
|
||||||
<shell xmlns="uri:oozie:shell-action:0.1">
|
<hive2 xmlns="uri:oozie:hive2-action:0.1">
|
||||||
<job-tracker>${jobTracker}</job-tracker>
|
<jdbc-url>${hive_jdbc_url}</jdbc-url>
|
||||||
<name-node>${nameNode}</name-node>
|
<script>scripts/step16-createIndicatorsTables.sql</script>
|
||||||
<exec>indicators.sh</exec>
|
<param>stats_db_name=${stats_db_name}</param>
|
||||||
<argument>${stats_db_name}</argument>
|
<param>external_stats_db_name=${external_stats_db_name}</param>
|
||||||
<argument>${external_stats_db_name}</argument>
|
</hive2>
|
||||||
<argument>${wf:appPath()}/scripts/step16-createIndicatorsTables.sql</argument>
|
|
||||||
<file>indicators.sh</file>
|
|
||||||
</shell>
|
|
||||||
<ok to="Step16_1-definitions"/>
|
<ok to="Step16_1-definitions"/>
|
||||||
<error to="Kill"/>
|
<error to="Kill"/>
|
||||||
</action>
|
</action>
|
||||||
|
@ -378,6 +375,7 @@
|
||||||
<argument>${wf:appPath()}/scripts/step20-createMonitorDB_institutions.sql</argument>
|
<argument>${wf:appPath()}/scripts/step20-createMonitorDB_institutions.sql</argument>
|
||||||
<argument>${wf:appPath()}/scripts/step20-createMonitorDB_RIs.sql</argument>
|
<argument>${wf:appPath()}/scripts/step20-createMonitorDB_RIs.sql</argument>
|
||||||
<argument>${wf:appPath()}/scripts/step20-createMonitorDB_RIs_tail.sql</argument>
|
<argument>${wf:appPath()}/scripts/step20-createMonitorDB_RIs_tail.sql</argument>
|
||||||
|
<argument>${wf:appPath()}/scripts/step20-createMonitorDBAll.sql</argument>
|
||||||
<file>monitor.sh</file>
|
<file>monitor.sh</file>
|
||||||
</shell>
|
</shell>
|
||||||
<ok to="step21-createObservatoryDB-pre"/>
|
<ok to="step21-createObservatoryDB-pre"/>
|
||||||
|
|
Loading…
Reference in New Issue