forked from D-Net/dnet-hadoop
improved crossref mapping
This commit is contained in:
parent
bb6c9785b4
commit
4ba386d996
|
@ -13,7 +13,8 @@ import scala.collection.JavaConverters._
|
||||||
case class mappingAffiliation(name: String)
|
case class mappingAffiliation(name: String)
|
||||||
case class mappingAuthor(given: Option[String], family: String, ORCID: Option[String], affiliation: Option[mappingAffiliation]) {}
|
case class mappingAuthor(given: Option[String], family: String, ORCID: Option[String], affiliation: Option[mappingAffiliation]) {}
|
||||||
|
|
||||||
class Crossref2Oaf {
|
|
||||||
|
case object Crossref2Oaf {
|
||||||
|
|
||||||
//STATIC STRING
|
//STATIC STRING
|
||||||
val MAG = "MAG"
|
val MAG = "MAG"
|
||||||
|
@ -28,7 +29,6 @@ class Crossref2Oaf {
|
||||||
val DNET_LANGUAGES = "dnet:languages"
|
val DNET_LANGUAGES = "dnet:languages"
|
||||||
val PID_TYPES = "dnet:pid_types"
|
val PID_TYPES = "dnet:pid_types"
|
||||||
|
|
||||||
|
|
||||||
val mappingCrossrefType = Map(
|
val mappingCrossrefType = Map(
|
||||||
"book-section" -> "publication",
|
"book-section" -> "publication",
|
||||||
"book" -> "publication",
|
"book" -> "publication",
|
||||||
|
@ -111,7 +111,7 @@ class Crossref2Oaf {
|
||||||
result.setCollectedfrom(List(createCollectedFrom()).asJava)
|
result.setCollectedfrom(List(createCollectedFrom()).asJava)
|
||||||
|
|
||||||
// Publisher ( Name of work's publisher mapped into Result/Publisher)
|
// Publisher ( Name of work's publisher mapped into Result/Publisher)
|
||||||
val publisher = (json \ "publisher").extract[String]
|
val publisher = (json \ "publisher").extractOrElse[String](null)
|
||||||
result.setPublisher(asField(publisher))
|
result.setPublisher(asField(publisher))
|
||||||
|
|
||||||
// TITLE
|
// TITLE
|
||||||
|
@ -144,7 +144,7 @@ class Crossref2Oaf {
|
||||||
|
|
||||||
//Mapping AUthor
|
//Mapping AUthor
|
||||||
|
|
||||||
val authorList:List[mappingAuthor] = (json \ "author").extract[List[mappingAuthor]]
|
val authorList: List[mappingAuthor] = (json \ "author").extractOrElse[List[mappingAuthor]](List())
|
||||||
|
|
||||||
result.setAuthor(authorList.map(a => generateAuhtor(a.given.orNull, a.family, a.ORCID.orNull)).asJava)
|
result.setAuthor(authorList.map(a => generateAuhtor(a.given.orNull, a.family, a.ORCID.orNull)).asJava)
|
||||||
|
|
||||||
|
@ -175,8 +175,6 @@ class Crossref2Oaf {
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def generateAuhtor(given: String, family: String, orcid: String): Author = {
|
def generateAuhtor(given: String, family: String, orcid: String): Author = {
|
||||||
val a = new Author
|
val a = new Author
|
||||||
a.setName(given)
|
a.setName(given)
|
||||||
|
@ -202,30 +200,28 @@ class Crossref2Oaf {
|
||||||
if (result == null)
|
if (result == null)
|
||||||
return result
|
return result
|
||||||
val cOBJCategory = mappingCrossrefSubType.getOrElse(objectType, mappingCrossrefSubType.getOrElse(objectSubType, "0038 Other literature type"));
|
val cOBJCategory = mappingCrossrefSubType.getOrElse(objectType, mappingCrossrefSubType.getOrElse(objectSubType, "0038 Other literature type"));
|
||||||
logger.debug(mappingCrossrefType(objectType))
|
// logger.debug(mappingCrossrefType(objectType))
|
||||||
logger.debug(cOBJCategory)
|
// logger.debug(cOBJCategory)
|
||||||
|
|
||||||
mappingResult(result, json, cOBJCategory)
|
mappingResult(result, json, cOBJCategory)
|
||||||
|
|
||||||
|
|
||||||
result match {
|
result match {
|
||||||
case publication: Publication => convertPublication(publication)
|
case publication: Publication => convertPublication(publication, json, cOBJCategory)
|
||||||
case dataset: Dataset => convertDataset(dataset)
|
case dataset: Dataset => convertDataset(dataset)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
result
|
result
|
||||||
}
|
}
|
||||||
|
|
||||||
def convertDataset(dataset: Dataset): Unit = {
|
def convertDataset(dataset: Dataset): Unit = {
|
||||||
|
//TODO probably we need to add relation and other stuff here
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
def convertPublication(publication: Publication, json: JValue, cobjCategory: String): Unit = {
|
def convertPublication(publication: Publication, json: JValue, cobjCategory: String): Unit = {
|
||||||
|
implicit lazy val formats: DefaultFormats.type = org.json4s.DefaultFormats
|
||||||
val containerTitles = for {JString(ct) <- json \ "container-title"} yield ct
|
val containerTitles = for {JString(ct) <- json \ "container-title"} yield ct
|
||||||
|
|
||||||
|
|
||||||
|
@ -243,11 +239,43 @@ class Crossref2Oaf {
|
||||||
publication.setSource(List(asField(source)).asJava)
|
publication.setSource(List(asField(source)).asJava)
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
val issn =
|
// Mapping Journal
|
||||||
|
|
||||||
|
val issnInfos = for {JArray(issn_types) <- json \ "issn-type"
|
||||||
|
JObject(issn_type) <- issn_types
|
||||||
|
JField("type", JString(tp)) <- issn_type
|
||||||
|
JField("value", JString(vl)) <- issn_type
|
||||||
|
} yield Tuple2(tp, vl)
|
||||||
|
|
||||||
|
val volume = (json \ "volume").extractOrElse[String] (null)
|
||||||
|
if (containerTitles.nonEmpty) {
|
||||||
|
val journal = new Journal
|
||||||
|
journal.setName(containerTitles.head)
|
||||||
|
if (issnInfos.nonEmpty) {
|
||||||
|
|
||||||
|
issnInfos.foreach(tp => {
|
||||||
|
tp._1 match {
|
||||||
|
case "electronic" => journal.setIssnOnline(tp._2)
|
||||||
|
case "print" => journal.setIssnPrinted(tp._2)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
}
|
||||||
|
journal.setVol(volume)
|
||||||
|
|
||||||
|
val page = (json \ "page").extractOrElse[String] (null)
|
||||||
|
if(page!= null ) {
|
||||||
|
val pp = page.split("-")
|
||||||
|
journal.setSp(pp.head)
|
||||||
|
if (pp.size > 1)
|
||||||
|
journal.setEp(pp(1))
|
||||||
}
|
}
|
||||||
|
|
||||||
// Mapping other types of publications
|
|
||||||
|
|
||||||
|
publication.setJournal(journal)
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,6 +1,8 @@
|
||||||
package eu.dnetlib.doiboost.crossref;
|
package eu.dnetlib.doiboost.crossref;
|
||||||
|
|
||||||
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
||||||
|
import java.io.ByteArrayOutputStream;
|
||||||
|
import java.util.zip.Inflater;
|
||||||
import org.apache.commons.codec.binary.Base64;
|
import org.apache.commons.codec.binary.Base64;
|
||||||
import org.apache.commons.io.IOUtils;
|
import org.apache.commons.io.IOUtils;
|
||||||
import org.apache.commons.lang3.StringUtils;
|
import org.apache.commons.lang3.StringUtils;
|
||||||
|
@ -12,17 +14,15 @@ import org.apache.hadoop.io.Text;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
import java.io.ByteArrayOutputStream;
|
|
||||||
import java.util.zip.Inflater;
|
|
||||||
|
|
||||||
|
|
||||||
public class CrossrefImporter {
|
public class CrossrefImporter {
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
public static void main(String[] args) throws Exception {
|
public static void main(String[] args) throws Exception {
|
||||||
|
|
||||||
final ArgumentApplicationParser parser = new ArgumentApplicationParser(IOUtils.toString(CrossrefImporter.class.getResourceAsStream("/eu/dnetlib/dhp/doiboost/import_from_es.json")));
|
final ArgumentApplicationParser parser =
|
||||||
|
new ArgumentApplicationParser(
|
||||||
|
IOUtils.toString(
|
||||||
|
CrossrefImporter.class.getResourceAsStream(
|
||||||
|
"/eu/dnetlib/dhp/doiboost/import_from_es.json")));
|
||||||
Logger logger = LoggerFactory.getLogger(CrossrefImporter.class);
|
Logger logger = LoggerFactory.getLogger(CrossrefImporter.class);
|
||||||
parser.parseArgument(args);
|
parser.parseArgument(args);
|
||||||
|
|
||||||
|
@ -31,11 +31,12 @@ public class CrossrefImporter {
|
||||||
Path hdfswritepath = new Path(parser.get("targetPath"));
|
Path hdfswritepath = new Path(parser.get("targetPath"));
|
||||||
logger.info("TargetPath: " + hdfsuri);
|
logger.info("TargetPath: " + hdfsuri);
|
||||||
|
|
||||||
final Long timestamp = StringUtils.isNotBlank(parser.get("timestamp"))?Long.parseLong(parser.get("timestamp")):-1;
|
final Long timestamp =
|
||||||
|
StringUtils.isNotBlank(parser.get("timestamp"))
|
||||||
if(timestamp>0)
|
? Long.parseLong(parser.get("timestamp"))
|
||||||
logger.info("Timestamp added "+timestamp);
|
: -1;
|
||||||
|
|
||||||
|
if (timestamp > 0) logger.info("Timestamp added " + timestamp);
|
||||||
|
|
||||||
// ====== Init HDFS File System Object
|
// ====== Init HDFS File System Object
|
||||||
Configuration conf = new Configuration();
|
Configuration conf = new Configuration();
|
||||||
|
@ -45,12 +46,17 @@ public class CrossrefImporter {
|
||||||
conf.set("fs.hdfs.impl", org.apache.hadoop.hdfs.DistributedFileSystem.class.getName());
|
conf.set("fs.hdfs.impl", org.apache.hadoop.hdfs.DistributedFileSystem.class.getName());
|
||||||
conf.set("fs.file.impl", org.apache.hadoop.fs.LocalFileSystem.class.getName());
|
conf.set("fs.file.impl", org.apache.hadoop.fs.LocalFileSystem.class.getName());
|
||||||
|
|
||||||
|
ESClient client =
|
||||||
|
timestamp > 0
|
||||||
|
? new ESClient(
|
||||||
|
"ip-90-147-167-25.ct1.garrservices.it", "crossref", timestamp)
|
||||||
|
: new ESClient("ip-90-147-167-25.ct1.garrservices.it", "crossref");
|
||||||
|
|
||||||
|
try (SequenceFile.Writer writer =
|
||||||
ESClient client = timestamp>0?new ESClient("ip-90-147-167-25.ct1.garrservices.it", "crossref", timestamp):new ESClient("ip-90-147-167-25.ct1.garrservices.it", "crossref");
|
SequenceFile.createWriter(
|
||||||
|
conf,
|
||||||
try (SequenceFile.Writer writer = SequenceFile.createWriter(conf,
|
SequenceFile.Writer.file(hdfswritepath),
|
||||||
SequenceFile.Writer.file(hdfswritepath), SequenceFile.Writer.keyClass(IntWritable.class),
|
SequenceFile.Writer.keyClass(IntWritable.class),
|
||||||
SequenceFile.Writer.valueClass(Text.class))) {
|
SequenceFile.Writer.valueClass(Text.class))) {
|
||||||
|
|
||||||
int i = 0;
|
int i = 0;
|
||||||
|
@ -65,7 +71,10 @@ public class CrossrefImporter {
|
||||||
if (i % 1000000 == 0) {
|
if (i % 1000000 == 0) {
|
||||||
end = System.currentTimeMillis();
|
end = System.currentTimeMillis();
|
||||||
final float time = (end - start) / 1000.0F;
|
final float time = (end - start) / 1000.0F;
|
||||||
logger.info(String.format("Imported %d records last 100000 imported in %f seconds", i, time));
|
logger.info(
|
||||||
|
String.format(
|
||||||
|
"Imported %d records last 100000 imported in %f seconds",
|
||||||
|
i, time));
|
||||||
start = System.currentTimeMillis();
|
start = System.currentTimeMillis();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,6 +1,9 @@
|
||||||
package eu.dnetlib.doiboost.crossref;
|
package eu.dnetlib.doiboost.crossref;
|
||||||
|
|
||||||
import com.jayway.jsonpath.JsonPath;
|
import com.jayway.jsonpath.JsonPath;
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.Iterator;
|
||||||
|
import java.util.List;
|
||||||
import org.apache.commons.io.IOUtils;
|
import org.apache.commons.io.IOUtils;
|
||||||
import org.apache.http.client.methods.CloseableHttpResponse;
|
import org.apache.http.client.methods.CloseableHttpResponse;
|
||||||
import org.apache.http.client.methods.HttpPost;
|
import org.apache.http.client.methods.HttpPost;
|
||||||
|
@ -10,18 +13,15 @@ import org.apache.http.impl.client.HttpClients;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
import java.io.IOException;
|
|
||||||
import java.util.Iterator;
|
|
||||||
import java.util.List;
|
|
||||||
|
|
||||||
public class ESClient implements Iterator<String> {
|
public class ESClient implements Iterator<String> {
|
||||||
private final static Logger logger = LoggerFactory.getLogger(ESClient.class);
|
private static final Logger logger = LoggerFactory.getLogger(ESClient.class);
|
||||||
|
|
||||||
final static String blobPath = "$.hits[*].hits[*]._source.blob";
|
static final String blobPath = "$.hits[*].hits[*]._source.blob";
|
||||||
final static String scrollIdPath = "$._scroll_id";
|
static final String scrollIdPath = "$._scroll_id";
|
||||||
final static String JSON_NO_TS ="{\"size\":1000}";
|
static final String JSON_NO_TS = "{\"size\":1000}";
|
||||||
final static String JSON_WITH_TS ="{\"size\":1000, \"query\":{\"range\":{\"timestamp\":{\"gte\":%d}}}}";
|
static final String JSON_WITH_TS =
|
||||||
final static String JSON_SCROLL = "{\"scroll_id\":\"%s\",\"scroll\" : \"1m\"}";
|
"{\"size\":1000, \"query\":{\"range\":{\"timestamp\":{\"gte\":%d}}}}";
|
||||||
|
static final String JSON_SCROLL = "{\"scroll_id\":\"%s\",\"scroll\" : \"1m\"}";
|
||||||
|
|
||||||
private final String scrollId;
|
private final String scrollId;
|
||||||
|
|
||||||
|
@ -29,19 +29,24 @@ public class ESClient implements Iterator<String> {
|
||||||
|
|
||||||
private final String esHost;
|
private final String esHost;
|
||||||
|
|
||||||
|
|
||||||
public ESClient(final String esHost, final String esIndex) throws IOException {
|
public ESClient(final String esHost, final String esIndex) throws IOException {
|
||||||
|
|
||||||
this.esHost = esHost;
|
this.esHost = esHost;
|
||||||
final String body =getResponse(String.format("http://%s:9200/%s/_search?scroll=1m", esHost, esIndex), JSON_NO_TS);
|
final String body =
|
||||||
|
getResponse(
|
||||||
|
String.format("http://%s:9200/%s/_search?scroll=1m", esHost, esIndex),
|
||||||
|
JSON_NO_TS);
|
||||||
scrollId = getJPathString(scrollIdPath, body);
|
scrollId = getJPathString(scrollIdPath, body);
|
||||||
buffer = getBlobs(body);
|
buffer = getBlobs(body);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public ESClient(final String esHost, final String esIndex, final long timestamp)
|
||||||
public ESClient(final String esHost, final String esIndex, final long timestamp) throws IOException {
|
throws IOException {
|
||||||
this.esHost = esHost;
|
this.esHost = esHost;
|
||||||
final String body =getResponse(String.format("http://%s:9200/%s/_search?scroll=1m", esHost, esIndex), String.format(JSON_WITH_TS, timestamp));
|
final String body =
|
||||||
|
getResponse(
|
||||||
|
String.format("http://%s:9200/%s/_search?scroll=1m", esHost, esIndex),
|
||||||
|
String.format(JSON_WITH_TS, timestamp));
|
||||||
scrollId = getJPathString(scrollIdPath, body);
|
scrollId = getJPathString(scrollIdPath, body);
|
||||||
buffer = getBlobs(body);
|
buffer = getBlobs(body);
|
||||||
}
|
}
|
||||||
|
@ -69,14 +74,12 @@ public class ESClient implements Iterator<String> {
|
||||||
throw new RuntimeException("Unable to close client ", e);
|
throw new RuntimeException("Unable to close client ", e);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private String getJPathString(final String jsonPath, final String json) {
|
private String getJPathString(final String jsonPath, final String json) {
|
||||||
try {
|
try {
|
||||||
Object o = JsonPath.read(json, jsonPath);
|
Object o = JsonPath.read(json, jsonPath);
|
||||||
if (o instanceof String)
|
if (o instanceof String) return (String) o;
|
||||||
return (String) o;
|
|
||||||
return null;
|
return null;
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
return "";
|
return "";
|
||||||
|
@ -91,7 +94,6 @@ public class ESClient implements Iterator<String> {
|
||||||
@Override
|
@Override
|
||||||
public boolean hasNext() {
|
public boolean hasNext() {
|
||||||
return (buffer != null && !buffer.isEmpty());
|
return (buffer != null && !buffer.isEmpty());
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -100,7 +102,8 @@ public class ESClient implements Iterator<String> {
|
||||||
if (buffer.isEmpty()) {
|
if (buffer.isEmpty()) {
|
||||||
|
|
||||||
final String json_param = String.format(JSON_SCROLL, scrollId);
|
final String json_param = String.format(JSON_SCROLL, scrollId);
|
||||||
final String body =getResponse(String.format("http://%s:9200/_search/scroll", esHost), json_param);
|
final String body =
|
||||||
|
getResponse(String.format("http://%s:9200/_search/scroll", esHost), json_param);
|
||||||
try {
|
try {
|
||||||
buffer = getBlobs(body);
|
buffer = getBlobs(body);
|
||||||
} catch (Throwable e) {
|
} catch (Throwable e) {
|
||||||
|
|
|
@ -1,14 +1,14 @@
|
||||||
package eu.dnetlib.doiboost.crossref
|
package eu.dnetlib.doiboost.crossref
|
||||||
|
|
||||||
import eu.dnetlib.dhp.application.ArgumentApplicationParser
|
import eu.dnetlib.dhp.application.ArgumentApplicationParser
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.Publication
|
||||||
import org.apache.commons.io.IOUtils
|
import org.apache.commons.io.IOUtils
|
||||||
import org.apache.hadoop.io.{IntWritable, Text}
|
import org.apache.hadoop.io.{IntWritable, Text}
|
||||||
import org.apache.spark.SparkConf
|
import org.apache.spark.SparkConf
|
||||||
import org.apache.spark.sql.SparkSession
|
import org.apache.spark.sql.{Dataset, Encoders, SaveMode, SparkSession}
|
||||||
import org.slf4j.{Logger, LoggerFactory}
|
import org.slf4j.{Logger, LoggerFactory}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
case class Reference(author: String, firstPage: String) {}
|
case class Reference(author: String, firstPage: String) {}
|
||||||
|
|
||||||
object SparkMapDumpIntoOAF {
|
object SparkMapDumpIntoOAF {
|
||||||
|
@ -26,15 +26,24 @@ object SparkMapDumpIntoOAF {
|
||||||
.config(conf)
|
.config(conf)
|
||||||
.appName(SparkMapDumpIntoOAF.getClass.getSimpleName)
|
.appName(SparkMapDumpIntoOAF.getClass.getSimpleName)
|
||||||
.master(parser.get("master")).getOrCreate()
|
.master(parser.get("master")).getOrCreate()
|
||||||
|
import spark.implicits._
|
||||||
|
implicit val mapEncoder = Encoders.bean(classOf[Publication])
|
||||||
|
|
||||||
val sc = spark.sparkContext
|
val sc = spark.sparkContext
|
||||||
val x: String = sc.sequenceFile(parser.get("sourcePath"), classOf[IntWritable], classOf[Text])
|
|
||||||
.map(k => k._2.toString).first()
|
|
||||||
|
|
||||||
val item =CrossrefImporter.decompressBlob(x)
|
val total = sc.sequenceFile(parser.get("sourcePath"), classOf[IntWritable], classOf[Text])
|
||||||
|
.map(k => k._2.toString).map(CrossrefImporter.decompressBlob)
|
||||||
|
.map(k => Crossref2Oaf.convert(k, logger))
|
||||||
|
.filter(k => k != null && k.isInstanceOf[Publication])
|
||||||
|
.map(k => k.asInstanceOf[Publication])
|
||||||
|
|
||||||
|
|
||||||
logger.info(item)
|
val ds: Dataset[Publication] = spark.createDataset(total)
|
||||||
|
val targetPath = parser.get("targetPath")
|
||||||
|
ds.write.mode(SaveMode.Overwrite).save(s"${targetPath}/publication")
|
||||||
|
|
||||||
|
|
||||||
|
logger.info(s"total Item :${total}")
|
||||||
|
|
||||||
// lazy val json: json4s.JValue = parse(item)
|
// lazy val json: json4s.JValue = parse(item)
|
||||||
//
|
//
|
||||||
|
|
|
@ -1,10 +1,8 @@
|
||||||
package eu.dnetlib.doiboost.orcid.json;
|
package eu.dnetlib.doiboost.orcid.json;
|
||||||
|
|
||||||
import com.google.gson.JsonObject;
|
import com.google.gson.JsonObject;
|
||||||
|
|
||||||
import eu.dnetlib.doiboost.orcid.model.AuthorData;
|
import eu.dnetlib.doiboost.orcid.model.AuthorData;
|
||||||
|
|
||||||
|
|
||||||
public class JsonWriter {
|
public class JsonWriter {
|
||||||
|
|
||||||
public static String create(AuthorData authorData) {
|
public static String create(AuthorData authorData) {
|
||||||
|
|
|
@ -11,30 +11,39 @@ public class AuthorData {
|
||||||
public String getErrorCode() {
|
public String getErrorCode() {
|
||||||
return errorCode;
|
return errorCode;
|
||||||
}
|
}
|
||||||
|
|
||||||
public void setErrorCode(String errorCode) {
|
public void setErrorCode(String errorCode) {
|
||||||
this.errorCode = errorCode;
|
this.errorCode = errorCode;
|
||||||
}
|
}
|
||||||
|
|
||||||
public String getName() {
|
public String getName() {
|
||||||
return name;
|
return name;
|
||||||
}
|
}
|
||||||
|
|
||||||
public void setName(String name) {
|
public void setName(String name) {
|
||||||
this.name = name;
|
this.name = name;
|
||||||
}
|
}
|
||||||
|
|
||||||
public String getSurname() {
|
public String getSurname() {
|
||||||
return surname;
|
return surname;
|
||||||
}
|
}
|
||||||
|
|
||||||
public void setSurname(String surname) {
|
public void setSurname(String surname) {
|
||||||
this.surname = surname;
|
this.surname = surname;
|
||||||
}
|
}
|
||||||
|
|
||||||
public String getCreditName() {
|
public String getCreditName() {
|
||||||
return creditName;
|
return creditName;
|
||||||
}
|
}
|
||||||
|
|
||||||
public void setCreditName(String creditName) {
|
public void setCreditName(String creditName) {
|
||||||
this.creditName = creditName;
|
this.creditName = creditName;
|
||||||
}
|
}
|
||||||
|
|
||||||
public String getOid() {
|
public String getOid() {
|
||||||
return oid;
|
return oid;
|
||||||
}
|
}
|
||||||
|
|
||||||
public void setOid(String oid) {
|
public void setOid(String oid) {
|
||||||
this.oid = oid;
|
this.oid = oid;
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
[
|
[
|
||||||
{"paramName":"s", "paramLongName":"sourcePath", "paramDescription": "the path of the sequencial file to read", "paramRequired": true},
|
{"paramName":"s", "paramLongName":"sourcePath", "paramDescription": "the path of the sequencial file to read", "paramRequired": true},
|
||||||
|
{"paramName":"t", "paramLongName":"targetPath", "paramDescription": "the working dir path", "paramRequired": true},
|
||||||
{"paramName":"m", "paramLongName":"master", "paramDescription": "the master name", "paramRequired": true}
|
{"paramName":"m", "paramLongName":"master", "paramDescription": "the master name", "paramRequired": true}
|
||||||
|
|
||||||
]
|
]
|
|
@ -26,18 +26,98 @@ public class DoiBoostTest {
|
||||||
// CrossrefImporter.main("-n file:///tmp -t file:///tmp/p.seq -ts 1586110000749".split("
|
// CrossrefImporter.main("-n file:///tmp -t file:///tmp/p.seq -ts 1586110000749".split("
|
||||||
// "));
|
// "));
|
||||||
SparkMapDumpIntoOAF.main(
|
SparkMapDumpIntoOAF.main(
|
||||||
"-m local[*] -s file:///data/doiboost/crossref_dump.seq".split(" "));
|
"-m local[*] -s file:///data/doiboost/crossref_dump.seq -t /data/doiboost"
|
||||||
|
.split(" "));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testConvertDatasetCrossRef2Oaf() throws IOException {
|
||||||
|
final String json = IOUtils.toString(getClass().getResourceAsStream("dataset.json"));
|
||||||
|
ObjectMapper mapper = new ObjectMapper().enable(SerializationFeature.INDENT_OUTPUT);
|
||||||
|
assertNotNull(json);
|
||||||
|
assertFalse(StringUtils.isBlank(json));
|
||||||
|
final Result result = Crossref2Oaf.convert(json, logger);
|
||||||
|
|
||||||
|
logger.info(mapper.writeValueAsString(result));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testConvertPreprintCrossRef2Oaf() throws IOException {
|
public void testConvertPreprintCrossRef2Oaf() throws IOException {
|
||||||
|
|
||||||
final String json = IOUtils.toString(getClass().getResourceAsStream("article.json"));
|
final String json = IOUtils.toString(getClass().getResourceAsStream("preprint.json"));
|
||||||
ObjectMapper mapper = new ObjectMapper().enable(SerializationFeature.INDENT_OUTPUT);
|
ObjectMapper mapper = new ObjectMapper().enable(SerializationFeature.INDENT_OUTPUT);
|
||||||
assertNotNull(json);
|
assertNotNull(json);
|
||||||
assertFalse(StringUtils.isBlank(json));
|
assertFalse(StringUtils.isBlank(json));
|
||||||
Crossref2Oaf cf = new Crossref2Oaf();
|
|
||||||
final Result result = cf.convert(json, logger);
|
final Result result = Crossref2Oaf.convert(json, logger);
|
||||||
|
assertNotNull(result);
|
||||||
|
|
||||||
|
assertNotNull(result.getDataInfo(), "Datainfo test not null Failed");
|
||||||
|
assertNotNull(
|
||||||
|
result.getDataInfo().getProvenanceaction(),
|
||||||
|
"DataInfo/Provenance test not null Failed");
|
||||||
|
assertFalse(
|
||||||
|
StringUtils.isBlank(result.getDataInfo().getProvenanceaction().getClassid()),
|
||||||
|
"DataInfo/Provenance/classId test not null Failed");
|
||||||
|
assertFalse(
|
||||||
|
StringUtils.isBlank(result.getDataInfo().getProvenanceaction().getClassname()),
|
||||||
|
"DataInfo/Provenance/className test not null Failed");
|
||||||
|
assertFalse(
|
||||||
|
StringUtils.isBlank(result.getDataInfo().getProvenanceaction().getSchemeid()),
|
||||||
|
"DataInfo/Provenance/SchemeId test not null Failed");
|
||||||
|
assertFalse(
|
||||||
|
StringUtils.isBlank(result.getDataInfo().getProvenanceaction().getSchemename()),
|
||||||
|
"DataInfo/Provenance/SchemeName test not null Failed");
|
||||||
|
|
||||||
|
assertNotNull(result.getCollectedfrom(), "CollectedFrom test not null Failed");
|
||||||
|
assertTrue(result.getCollectedfrom().size() > 0);
|
||||||
|
assertTrue(
|
||||||
|
result.getCollectedfrom().stream()
|
||||||
|
.anyMatch(
|
||||||
|
c ->
|
||||||
|
c.getKey()
|
||||||
|
.equalsIgnoreCase(
|
||||||
|
"10|openaire____::081b82f96300b6a6e3d282bad31cb6e2")));
|
||||||
|
assertTrue(
|
||||||
|
result.getCollectedfrom().stream()
|
||||||
|
.anyMatch(c -> c.getValue().equalsIgnoreCase("crossref")));
|
||||||
|
|
||||||
|
assertTrue(
|
||||||
|
result.getRelevantdate().stream()
|
||||||
|
.anyMatch(d -> d.getQualifier().getClassid().equalsIgnoreCase("created")));
|
||||||
|
assertTrue(
|
||||||
|
result.getRelevantdate().stream()
|
||||||
|
.anyMatch(
|
||||||
|
d -> d.getQualifier().getClassid().equalsIgnoreCase("available")));
|
||||||
|
assertTrue(
|
||||||
|
result.getRelevantdate().stream()
|
||||||
|
.anyMatch(d -> d.getQualifier().getClassid().equalsIgnoreCase("accepted")));
|
||||||
|
assertTrue(
|
||||||
|
result.getRelevantdate().stream()
|
||||||
|
.anyMatch(
|
||||||
|
d ->
|
||||||
|
d.getQualifier()
|
||||||
|
.getClassid()
|
||||||
|
.equalsIgnoreCase("published-online")));
|
||||||
|
assertTrue(
|
||||||
|
result.getRelevantdate().stream()
|
||||||
|
.anyMatch(
|
||||||
|
d ->
|
||||||
|
d.getQualifier()
|
||||||
|
.getClassid()
|
||||||
|
.equalsIgnoreCase("published-print")));
|
||||||
|
|
||||||
|
logger.info(mapper.writeValueAsString(result));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testConvertArticleCrossRef2Oaf() throws IOException {
|
||||||
|
|
||||||
|
final String json = IOUtils.toString(getClass().getResourceAsStream("article.json"));
|
||||||
|
ObjectMapper mapper = new ObjectMapper().enable(SerializationFeature.INDENT_OUTPUT);
|
||||||
|
assertNotNull(json);
|
||||||
|
assertFalse(StringUtils.isBlank(json));
|
||||||
|
final Result result = Crossref2Oaf.convert(json, logger);
|
||||||
assertNotNull(result);
|
assertNotNull(result);
|
||||||
|
|
||||||
assertNotNull(result.getDataInfo(), "Datainfo test not null Failed");
|
assertNotNull(result.getDataInfo(), "Datainfo test not null Failed");
|
||||||
|
@ -73,15 +153,6 @@ public class DoiBoostTest {
|
||||||
assertTrue(
|
assertTrue(
|
||||||
result.getRelevantdate().stream()
|
result.getRelevantdate().stream()
|
||||||
.anyMatch(d -> d.getQualifier().getClassid().equalsIgnoreCase("created")));
|
.anyMatch(d -> d.getQualifier().getClassid().equalsIgnoreCase("created")));
|
||||||
// assertTrue(
|
|
||||||
// result.getRelevantdate().stream()
|
|
||||||
// .anyMatch(
|
|
||||||
// d ->
|
|
||||||
// d.getQualifier().getClassid().equalsIgnoreCase("available")));
|
|
||||||
// assertTrue(
|
|
||||||
// result.getRelevantdate().stream()
|
|
||||||
// .anyMatch(d ->
|
|
||||||
// d.getQualifier().getClassid().equalsIgnoreCase("accepted")));
|
|
||||||
assertTrue(
|
assertTrue(
|
||||||
result.getRelevantdate().stream()
|
result.getRelevantdate().stream()
|
||||||
.anyMatch(
|
.anyMatch(
|
||||||
|
@ -107,8 +178,7 @@ public class DoiBoostTest {
|
||||||
ObjectMapper mapper = new ObjectMapper().enable(SerializationFeature.INDENT_OUTPUT);
|
ObjectMapper mapper = new ObjectMapper().enable(SerializationFeature.INDENT_OUTPUT);
|
||||||
assertNotNull(json);
|
assertNotNull(json);
|
||||||
assertFalse(StringUtils.isBlank(json));
|
assertFalse(StringUtils.isBlank(json));
|
||||||
Crossref2Oaf cf = new Crossref2Oaf();
|
final Result result = Crossref2Oaf.convert(json, logger);
|
||||||
final Result result = cf.convert(json, logger);
|
|
||||||
assertNotNull(result);
|
assertNotNull(result);
|
||||||
logger.info(mapper.writeValueAsString(result));
|
logger.info(mapper.writeValueAsString(result));
|
||||||
|
|
||||||
|
|
|
@ -170,5 +170,5 @@
|
||||||
"container-title": [
|
"container-title": [
|
||||||
"Ecl\u00e9tica Qu\u00edmica Journal"
|
"Ecl\u00e9tica Qu\u00edmica Journal"
|
||||||
],
|
],
|
||||||
"page": "41"
|
"page": "41-50"
|
||||||
}
|
}
|
|
@ -0,0 +1,105 @@
|
||||||
|
{
|
||||||
|
"DOI": "10.1037/e522512014-096",
|
||||||
|
"subtitle": [
|
||||||
|
"(522512014-096)"
|
||||||
|
],
|
||||||
|
"issued": {
|
||||||
|
"date-parts": [
|
||||||
|
[
|
||||||
|
2012
|
||||||
|
]
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"prefix": "10.1037",
|
||||||
|
"author": [
|
||||||
|
{
|
||||||
|
"affiliation": [],
|
||||||
|
"given": "Jessica",
|
||||||
|
"family": "Trudeau",
|
||||||
|
"sequence": "first"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"affiliation": [],
|
||||||
|
"given": "Amy",
|
||||||
|
"family": "McShane",
|
||||||
|
"sequence": "additional"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"affiliation": [],
|
||||||
|
"given": "Renee",
|
||||||
|
"family": "McDonald",
|
||||||
|
"sequence": "additional"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"reference-count": 0,
|
||||||
|
"member": "15",
|
||||||
|
"source": "Crossref",
|
||||||
|
"score": 1.0,
|
||||||
|
"deposited": {
|
||||||
|
"timestamp": 1413827035000,
|
||||||
|
"date-parts": [
|
||||||
|
[
|
||||||
|
2014,
|
||||||
|
10,
|
||||||
|
20
|
||||||
|
]
|
||||||
|
],
|
||||||
|
"date-time": "2014-10-20T17:43:55Z"
|
||||||
|
},
|
||||||
|
"indexed": {
|
||||||
|
"timestamp": 1550142454710,
|
||||||
|
"date-parts": [
|
||||||
|
[
|
||||||
|
2019,
|
||||||
|
2,
|
||||||
|
14
|
||||||
|
]
|
||||||
|
],
|
||||||
|
"date-time": "2019-02-14T11:07:34Z"
|
||||||
|
},
|
||||||
|
"type": "dataset",
|
||||||
|
"URL": "http://dx.doi.org/10.1037/e522512014-096",
|
||||||
|
"is-referenced-by-count": 0,
|
||||||
|
"published-print": {
|
||||||
|
"date-parts": [
|
||||||
|
[
|
||||||
|
2012
|
||||||
|
]
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"references-count": 0,
|
||||||
|
"institution": {
|
||||||
|
"acronym": [
|
||||||
|
"APA"
|
||||||
|
],
|
||||||
|
"place": [
|
||||||
|
"-"
|
||||||
|
],
|
||||||
|
"name": "American Psychological Association"
|
||||||
|
},
|
||||||
|
"publisher": "American Psychological Association (APA)",
|
||||||
|
"content-domain": {
|
||||||
|
"domain": [],
|
||||||
|
"crossmark-restriction": false
|
||||||
|
},
|
||||||
|
"created": {
|
||||||
|
"timestamp": 1413826121000,
|
||||||
|
"date-parts": [
|
||||||
|
[
|
||||||
|
2014,
|
||||||
|
10,
|
||||||
|
20
|
||||||
|
]
|
||||||
|
],
|
||||||
|
"date-time": "2014-10-20T17:28:41Z"
|
||||||
|
},
|
||||||
|
"title": [
|
||||||
|
"Project Support: A Randomized Control Study to Evaluate the Translation of an Evidence- Based Program"
|
||||||
|
],
|
||||||
|
"alternative-id": [
|
||||||
|
"522512014-096"
|
||||||
|
],
|
||||||
|
"container-title": [
|
||||||
|
"PsycEXTRA Dataset"
|
||||||
|
]
|
||||||
|
}
|
|
@ -10,19 +10,6 @@ import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.oaiIProvenance;
|
||||||
import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.qualifier;
|
import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.qualifier;
|
||||||
import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.structuredProperty;
|
import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.structuredProperty;
|
||||||
|
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.Arrays;
|
|
||||||
import java.util.Date;
|
|
||||||
import java.util.HashMap;
|
|
||||||
import java.util.List;
|
|
||||||
import java.util.Map;
|
|
||||||
|
|
||||||
import org.apache.commons.lang3.StringUtils;
|
|
||||||
import org.dom4j.Document;
|
|
||||||
import org.dom4j.DocumentFactory;
|
|
||||||
import org.dom4j.DocumentHelper;
|
|
||||||
import org.dom4j.Node;
|
|
||||||
|
|
||||||
import eu.dnetlib.dhp.oa.graph.raw.common.MigrationConstants;
|
import eu.dnetlib.dhp.oa.graph.raw.common.MigrationConstants;
|
||||||
import eu.dnetlib.dhp.schema.oaf.Author;
|
import eu.dnetlib.dhp.schema.oaf.Author;
|
||||||
import eu.dnetlib.dhp.schema.oaf.DataInfo;
|
import eu.dnetlib.dhp.schema.oaf.DataInfo;
|
||||||
|
@ -41,6 +28,17 @@ import eu.dnetlib.dhp.schema.oaf.Relation;
|
||||||
import eu.dnetlib.dhp.schema.oaf.Result;
|
import eu.dnetlib.dhp.schema.oaf.Result;
|
||||||
import eu.dnetlib.dhp.schema.oaf.Software;
|
import eu.dnetlib.dhp.schema.oaf.Software;
|
||||||
import eu.dnetlib.dhp.schema.oaf.StructuredProperty;
|
import eu.dnetlib.dhp.schema.oaf.StructuredProperty;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.Arrays;
|
||||||
|
import java.util.Date;
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
import org.apache.commons.lang3.StringUtils;
|
||||||
|
import org.dom4j.Document;
|
||||||
|
import org.dom4j.DocumentFactory;
|
||||||
|
import org.dom4j.DocumentHelper;
|
||||||
|
import org.dom4j.Node;
|
||||||
|
|
||||||
public abstract class AbstractMdRecordToOafMapper {
|
public abstract class AbstractMdRecordToOafMapper {
|
||||||
|
|
||||||
|
@ -66,15 +64,22 @@ public abstract class AbstractMdRecordToOafMapper {
|
||||||
DocumentFactory.getInstance().setXPathNamespaceURIs(nsContext);
|
DocumentFactory.getInstance().setXPathNamespaceURIs(nsContext);
|
||||||
|
|
||||||
final Document doc =
|
final Document doc =
|
||||||
DocumentHelper.parseText(xml.replaceAll("http://datacite.org/schema/kernel-4", "http://datacite.org/schema/kernel-3"));
|
DocumentHelper.parseText(
|
||||||
|
xml.replaceAll(
|
||||||
|
"http://datacite.org/schema/kernel-4",
|
||||||
|
"http://datacite.org/schema/kernel-3"));
|
||||||
|
|
||||||
final String type = doc.valueOf("//dr:CobjCategory/@type");
|
final String type = doc.valueOf("//dr:CobjCategory/@type");
|
||||||
final KeyValue collectedFrom =
|
final KeyValue collectedFrom =
|
||||||
keyValue(createOpenaireId(10, doc.valueOf("//oaf:collectedFrom/@id"), true), doc.valueOf("//oaf:collectedFrom/@name"));
|
keyValue(
|
||||||
|
createOpenaireId(10, doc.valueOf("//oaf:collectedFrom/@id"), true),
|
||||||
|
doc.valueOf("//oaf:collectedFrom/@name"));
|
||||||
final KeyValue hostedBy =
|
final KeyValue hostedBy =
|
||||||
StringUtils.isBlank(doc.valueOf("//oaf:hostedBy/@id"))
|
StringUtils.isBlank(doc.valueOf("//oaf:hostedBy/@id"))
|
||||||
? collectedFrom
|
? collectedFrom
|
||||||
: keyValue(createOpenaireId(10, doc.valueOf("//oaf:hostedBy/@id"), true), doc.valueOf("//oaf:hostedBy/@name"));
|
: keyValue(
|
||||||
|
createOpenaireId(10, doc.valueOf("//oaf:hostedBy/@id"), true),
|
||||||
|
doc.valueOf("//oaf:hostedBy/@name"));
|
||||||
|
|
||||||
final DataInfo info = prepareDataInfo(doc);
|
final DataInfo info = prepareDataInfo(doc);
|
||||||
final long lastUpdateTimestamp = new Date().getTime();
|
final long lastUpdateTimestamp = new Date().getTime();
|
||||||
|
@ -204,7 +209,14 @@ public abstract class AbstractMdRecordToOafMapper {
|
||||||
r.setId(createOpenaireId(50, doc.valueOf("//dri:objIdentifier"), false));
|
r.setId(createOpenaireId(50, doc.valueOf("//dri:objIdentifier"), false));
|
||||||
r.setOriginalId(Arrays.asList(doc.valueOf("//dri:objIdentifier")));
|
r.setOriginalId(Arrays.asList(doc.valueOf("//dri:objIdentifier")));
|
||||||
r.setCollectedfrom(Arrays.asList(collectedFrom));
|
r.setCollectedfrom(Arrays.asList(collectedFrom));
|
||||||
r.setPid(prepareListStructProps(doc, "//oaf:identifier", "@identifierType", "dnet:pid_types", "dnet:pid_types", info));
|
r.setPid(
|
||||||
|
prepareListStructProps(
|
||||||
|
doc,
|
||||||
|
"//oaf:identifier",
|
||||||
|
"@identifierType",
|
||||||
|
"dnet:pid_types",
|
||||||
|
"dnet:pid_types",
|
||||||
|
info));
|
||||||
r.setDateofcollection(doc.valueOf("//dr:dateOfCollection"));
|
r.setDateofcollection(doc.valueOf("//dr:dateOfCollection"));
|
||||||
r.setDateoftransformation(doc.valueOf("//dr:dateOfTransformation"));
|
r.setDateoftransformation(doc.valueOf("//dr:dateOfTransformation"));
|
||||||
r.setExtraInfo(new ArrayList<>()); // NOT PRESENT IN MDSTORES
|
r.setExtraInfo(new ArrayList<>()); // NOT PRESENT IN MDSTORES
|
||||||
|
@ -233,10 +245,7 @@ public abstract class AbstractMdRecordToOafMapper {
|
||||||
protected abstract Qualifier prepareResourceType(Document doc, DataInfo info);
|
protected abstract Qualifier prepareResourceType(Document doc, DataInfo info);
|
||||||
|
|
||||||
protected abstract List<Instance> prepareInstances(
|
protected abstract List<Instance> prepareInstances(
|
||||||
Document doc,
|
Document doc, DataInfo info, KeyValue collectedfrom, KeyValue hostedby);
|
||||||
DataInfo info,
|
|
||||||
KeyValue collectedfrom,
|
|
||||||
KeyValue hostedby);
|
|
||||||
|
|
||||||
protected abstract List<Field<String>> prepareSources(Document doc, DataInfo info);
|
protected abstract List<Field<String>> prepareSources(Document doc, DataInfo info);
|
||||||
|
|
||||||
|
@ -261,34 +270,28 @@ public abstract class AbstractMdRecordToOafMapper {
|
||||||
protected abstract List<Author> prepareAuthors(Document doc, DataInfo info);
|
protected abstract List<Author> prepareAuthors(Document doc, DataInfo info);
|
||||||
|
|
||||||
protected abstract List<Field<String>> prepareOtherResearchProductTools(
|
protected abstract List<Field<String>> prepareOtherResearchProductTools(
|
||||||
Document doc,
|
Document doc, DataInfo info);
|
||||||
DataInfo info);
|
|
||||||
|
|
||||||
protected abstract List<Field<String>> prepareOtherResearchProductContactGroups(
|
protected abstract List<Field<String>> prepareOtherResearchProductContactGroups(
|
||||||
Document doc,
|
Document doc, DataInfo info);
|
||||||
DataInfo info);
|
|
||||||
|
|
||||||
protected abstract List<Field<String>> prepareOtherResearchProductContactPersons(
|
protected abstract List<Field<String>> prepareOtherResearchProductContactPersons(
|
||||||
Document doc,
|
Document doc, DataInfo info);
|
||||||
DataInfo info);
|
|
||||||
|
|
||||||
protected abstract Qualifier prepareSoftwareProgrammingLanguage(Document doc, DataInfo info);
|
protected abstract Qualifier prepareSoftwareProgrammingLanguage(Document doc, DataInfo info);
|
||||||
|
|
||||||
protected abstract Field<String> prepareSoftwareCodeRepositoryUrl(Document doc, DataInfo info);
|
protected abstract Field<String> prepareSoftwareCodeRepositoryUrl(Document doc, DataInfo info);
|
||||||
|
|
||||||
protected abstract List<StructuredProperty> prepareSoftwareLicenses(
|
protected abstract List<StructuredProperty> prepareSoftwareLicenses(
|
||||||
Document doc,
|
Document doc, DataInfo info);
|
||||||
DataInfo info);
|
|
||||||
|
|
||||||
protected abstract List<Field<String>> prepareSoftwareDocumentationUrls(
|
protected abstract List<Field<String>> prepareSoftwareDocumentationUrls(
|
||||||
Document doc,
|
Document doc, DataInfo info);
|
||||||
DataInfo info);
|
|
||||||
|
|
||||||
protected abstract List<GeoLocation> prepareDatasetGeoLocations(Document doc, DataInfo info);
|
protected abstract List<GeoLocation> prepareDatasetGeoLocations(Document doc, DataInfo info);
|
||||||
|
|
||||||
protected abstract Field<String> prepareDatasetMetadataVersionNumber(
|
protected abstract Field<String> prepareDatasetMetadataVersionNumber(
|
||||||
Document doc,
|
Document doc, DataInfo info);
|
||||||
DataInfo info);
|
|
||||||
|
|
||||||
protected abstract Field<String> prepareDatasetLastMetadataUpdate(Document doc, DataInfo info);
|
protected abstract Field<String> prepareDatasetLastMetadataUpdate(Document doc, DataInfo info);
|
||||||
|
|
||||||
|
@ -312,16 +315,27 @@ public abstract class AbstractMdRecordToOafMapper {
|
||||||
final String sp = n.valueOf("@sp");
|
final String sp = n.valueOf("@sp");
|
||||||
final String vol = n.valueOf("@vol");
|
final String vol = n.valueOf("@vol");
|
||||||
final String edition = n.valueOf("@edition");
|
final String edition = n.valueOf("@edition");
|
||||||
if (StringUtils.isNotBlank(name)) { return journal(name, issnPrinted, issnOnline, issnLinking, ep, iss, sp, vol, edition, null, null, info); }
|
if (StringUtils.isNotBlank(name)) {
|
||||||
|
return journal(
|
||||||
|
name,
|
||||||
|
issnPrinted,
|
||||||
|
issnOnline,
|
||||||
|
issnLinking,
|
||||||
|
ep,
|
||||||
|
iss,
|
||||||
|
sp,
|
||||||
|
vol,
|
||||||
|
edition,
|
||||||
|
null,
|
||||||
|
null,
|
||||||
|
info);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
protected Qualifier prepareQualifier(
|
protected Qualifier prepareQualifier(
|
||||||
final Node node,
|
final Node node, final String xpath, final String schemeId, final String schemeName) {
|
||||||
final String xpath,
|
|
||||||
final String schemeId,
|
|
||||||
final String schemeName) {
|
|
||||||
final String classId = node.valueOf(xpath);
|
final String classId = node.valueOf(xpath);
|
||||||
final String className = code2name.get(classId);
|
final String className = code2name.get(classId);
|
||||||
return qualifier(classId, className, schemeId, schemeName);
|
return qualifier(classId, className, schemeId, schemeName);
|
||||||
|
@ -339,16 +353,15 @@ public abstract class AbstractMdRecordToOafMapper {
|
||||||
final Node n = (Node) o;
|
final Node n = (Node) o;
|
||||||
final String classId = n.valueOf(xpathClassId);
|
final String classId = n.valueOf(xpathClassId);
|
||||||
final String className = code2name.get(classId);
|
final String className = code2name.get(classId);
|
||||||
res.add(structuredProperty(n.getText(), classId, className, schemeId, schemeName, info));
|
res.add(
|
||||||
|
structuredProperty(
|
||||||
|
n.getText(), classId, className, schemeId, schemeName, info));
|
||||||
}
|
}
|
||||||
return res;
|
return res;
|
||||||
}
|
}
|
||||||
|
|
||||||
protected List<StructuredProperty> prepareListStructProps(
|
protected List<StructuredProperty> prepareListStructProps(
|
||||||
final Node node,
|
final Node node, final String xpath, final Qualifier qualifier, final DataInfo info) {
|
||||||
final String xpath,
|
|
||||||
final Qualifier qualifier,
|
|
||||||
final DataInfo info) {
|
|
||||||
final List<StructuredProperty> res = new ArrayList<>();
|
final List<StructuredProperty> res = new ArrayList<>();
|
||||||
for (final Object o : node.selectNodes(xpath)) {
|
for (final Object o : node.selectNodes(xpath)) {
|
||||||
final Node n = (Node) o;
|
final Node n = (Node) o;
|
||||||
|
@ -358,37 +371,58 @@ public abstract class AbstractMdRecordToOafMapper {
|
||||||
}
|
}
|
||||||
|
|
||||||
protected List<StructuredProperty> prepareListStructProps(
|
protected List<StructuredProperty> prepareListStructProps(
|
||||||
final Node node,
|
final Node node, final String xpath, final DataInfo info) {
|
||||||
final String xpath,
|
|
||||||
final DataInfo info) {
|
|
||||||
final List<StructuredProperty> res = new ArrayList<>();
|
final List<StructuredProperty> res = new ArrayList<>();
|
||||||
for (final Object o : node.selectNodes(xpath)) {
|
for (final Object o : node.selectNodes(xpath)) {
|
||||||
final Node n = (Node) o;
|
final Node n = (Node) o;
|
||||||
res.add(structuredProperty(n.getText(), n.valueOf("@classid"), n.valueOf("@classname"), n.valueOf("@schemeid"), n.valueOf("@schemename"), info));
|
res.add(
|
||||||
|
structuredProperty(
|
||||||
|
n.getText(),
|
||||||
|
n.valueOf("@classid"),
|
||||||
|
n.valueOf("@classname"),
|
||||||
|
n.valueOf("@schemeid"),
|
||||||
|
n.valueOf("@schemename"),
|
||||||
|
info));
|
||||||
}
|
}
|
||||||
return res;
|
return res;
|
||||||
}
|
}
|
||||||
|
|
||||||
protected OAIProvenance prepareOAIprovenance(final Document doc) {
|
protected OAIProvenance prepareOAIprovenance(final Document doc) {
|
||||||
final Node n =
|
final Node n =
|
||||||
doc.selectSingleNode("//*[local-name()='provenance']/*[local-name()='originDescription']");
|
doc.selectSingleNode(
|
||||||
|
"//*[local-name()='provenance']/*[local-name()='originDescription']");
|
||||||
|
|
||||||
if (n == null) { return null; }
|
if (n == null) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
final String identifier = n.valueOf("./*[local-name()='identifier']");
|
final String identifier = n.valueOf("./*[local-name()='identifier']");
|
||||||
final String baseURL = n.valueOf("./*[local-name()='baseURL']");;
|
final String baseURL = n.valueOf("./*[local-name()='baseURL']");
|
||||||
final String metadataNamespace = n.valueOf("./*[local-name()='metadataNamespace']");;
|
;
|
||||||
|
final String metadataNamespace = n.valueOf("./*[local-name()='metadataNamespace']");
|
||||||
|
;
|
||||||
final boolean altered = n.valueOf("@altered").equalsIgnoreCase("true");
|
final boolean altered = n.valueOf("@altered").equalsIgnoreCase("true");
|
||||||
final String datestamp = n.valueOf("./*[local-name()='datestamp']");;
|
final String datestamp = n.valueOf("./*[local-name()='datestamp']");
|
||||||
final String harvestDate = n.valueOf("@harvestDate");;
|
;
|
||||||
|
final String harvestDate = n.valueOf("@harvestDate");
|
||||||
|
;
|
||||||
|
|
||||||
return oaiIProvenance(identifier, baseURL, metadataNamespace, altered, datestamp, harvestDate);
|
return oaiIProvenance(
|
||||||
|
identifier, baseURL, metadataNamespace, altered, datestamp, harvestDate);
|
||||||
}
|
}
|
||||||
|
|
||||||
protected DataInfo prepareDataInfo(final Document doc) {
|
protected DataInfo prepareDataInfo(final Document doc) {
|
||||||
final Node n = doc.selectSingleNode("//oaf:datainfo");
|
final Node n = doc.selectSingleNode("//oaf:datainfo");
|
||||||
|
|
||||||
if (n == null) { return dataInfo(false, null, false, false, MigrationConstants.REPOSITORY_PROVENANCE_ACTIONS, "0.9"); }
|
if (n == null) {
|
||||||
|
return dataInfo(
|
||||||
|
false,
|
||||||
|
null,
|
||||||
|
false,
|
||||||
|
false,
|
||||||
|
MigrationConstants.REPOSITORY_PROVENANCE_ACTIONS,
|
||||||
|
"0.9");
|
||||||
|
}
|
||||||
|
|
||||||
final String paClassId = n.valueOf("./oaf:provenanceaction/@classid");
|
final String paClassId = n.valueOf("./oaf:provenanceaction/@classid");
|
||||||
final String paClassName = n.valueOf("./oaf:provenanceaction/@classname");
|
final String paClassName = n.valueOf("./oaf:provenanceaction/@classname");
|
||||||
|
@ -401,7 +435,13 @@ public abstract class AbstractMdRecordToOafMapper {
|
||||||
final Boolean inferred = Boolean.parseBoolean(n.valueOf("./oaf:inferred"));
|
final Boolean inferred = Boolean.parseBoolean(n.valueOf("./oaf:inferred"));
|
||||||
final String trust = n.valueOf("./oaf:trust");
|
final String trust = n.valueOf("./oaf:trust");
|
||||||
|
|
||||||
return dataInfo(deletedbyinference, inferenceprovenance, inferred, false, qualifier(paClassId, paClassName, paSchemeId, paSchemeName), trust);
|
return dataInfo(
|
||||||
|
deletedbyinference,
|
||||||
|
inferenceprovenance,
|
||||||
|
inferred,
|
||||||
|
false,
|
||||||
|
qualifier(paClassId, paClassName, paSchemeId, paSchemeName),
|
||||||
|
trust);
|
||||||
}
|
}
|
||||||
|
|
||||||
protected Field<String> prepareField(final Node node, final String xpath, final DataInfo info) {
|
protected Field<String> prepareField(final Node node, final String xpath, final DataInfo info) {
|
||||||
|
@ -409,9 +449,7 @@ public abstract class AbstractMdRecordToOafMapper {
|
||||||
}
|
}
|
||||||
|
|
||||||
protected List<Field<String>> prepareListFields(
|
protected List<Field<String>> prepareListFields(
|
||||||
final Node node,
|
final Node node, final String xpath, final DataInfo info) {
|
||||||
final String xpath,
|
|
||||||
final DataInfo info) {
|
|
||||||
return listFields(info, prepareListString(node, xpath));
|
return listFields(info, prepareListString(node, xpath));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -10,23 +10,6 @@ import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.listKeyValues;
|
||||||
import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.qualifier;
|
import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.qualifier;
|
||||||
import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.structuredProperty;
|
import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.structuredProperty;
|
||||||
|
|
||||||
import java.io.Closeable;
|
|
||||||
import java.io.IOException;
|
|
||||||
import java.sql.Array;
|
|
||||||
import java.sql.ResultSet;
|
|
||||||
import java.sql.SQLException;
|
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.Arrays;
|
|
||||||
import java.util.Date;
|
|
||||||
import java.util.List;
|
|
||||||
import java.util.function.Consumer;
|
|
||||||
import java.util.function.Function;
|
|
||||||
|
|
||||||
import org.apache.commons.io.IOUtils;
|
|
||||||
import org.apache.commons.lang3.StringUtils;
|
|
||||||
import org.apache.commons.logging.Log;
|
|
||||||
import org.apache.commons.logging.LogFactory;
|
|
||||||
|
|
||||||
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
||||||
import eu.dnetlib.dhp.oa.graph.raw.common.AbstractMigrationApplication;
|
import eu.dnetlib.dhp.oa.graph.raw.common.AbstractMigrationApplication;
|
||||||
import eu.dnetlib.dhp.oa.graph.raw.common.DbClient;
|
import eu.dnetlib.dhp.oa.graph.raw.common.DbClient;
|
||||||
|
@ -48,6 +31,21 @@ import eu.dnetlib.dhp.schema.oaf.Relation;
|
||||||
import eu.dnetlib.dhp.schema.oaf.Result;
|
import eu.dnetlib.dhp.schema.oaf.Result;
|
||||||
import eu.dnetlib.dhp.schema.oaf.Software;
|
import eu.dnetlib.dhp.schema.oaf.Software;
|
||||||
import eu.dnetlib.dhp.schema.oaf.StructuredProperty;
|
import eu.dnetlib.dhp.schema.oaf.StructuredProperty;
|
||||||
|
import java.io.Closeable;
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.sql.Array;
|
||||||
|
import java.sql.ResultSet;
|
||||||
|
import java.sql.SQLException;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.Arrays;
|
||||||
|
import java.util.Date;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.function.Consumer;
|
||||||
|
import java.util.function.Function;
|
||||||
|
import org.apache.commons.io.IOUtils;
|
||||||
|
import org.apache.commons.lang3.StringUtils;
|
||||||
|
import org.apache.commons.logging.Log;
|
||||||
|
import org.apache.commons.logging.LogFactory;
|
||||||
|
|
||||||
public class MigrateDbEntitiesApplication extends AbstractMigrationApplication
|
public class MigrateDbEntitiesApplication extends AbstractMigrationApplication
|
||||||
implements Closeable {
|
implements Closeable {
|
||||||
|
@ -61,8 +59,9 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationApplication
|
||||||
public static void main(final String[] args) throws Exception {
|
public static void main(final String[] args) throws Exception {
|
||||||
final ArgumentApplicationParser parser =
|
final ArgumentApplicationParser parser =
|
||||||
new ArgumentApplicationParser(
|
new ArgumentApplicationParser(
|
||||||
IOUtils.toString(MigrateDbEntitiesApplication.class
|
IOUtils.toString(
|
||||||
.getResourceAsStream("/eu/dnetlib/dhp/oa/graph/migrate_db_entities_parameters.json")));
|
MigrateDbEntitiesApplication.class.getResourceAsStream(
|
||||||
|
"/eu/dnetlib/dhp/oa/graph/migrate_db_entities_parameters.json")));
|
||||||
|
|
||||||
parser.parseArgument(args);
|
parser.parseArgument(args);
|
||||||
|
|
||||||
|
@ -91,7 +90,8 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationApplication
|
||||||
smdbe.execute("queryOrganizations.sql", smdbe::processOrganization);
|
smdbe.execute("queryOrganizations.sql", smdbe::processOrganization);
|
||||||
|
|
||||||
log.info("Processing relations ds <-> orgs ...");
|
log.info("Processing relations ds <-> orgs ...");
|
||||||
smdbe.execute("queryDatasourceOrganization.sql", smdbe::processDatasourceOrganization);
|
smdbe.execute(
|
||||||
|
"queryDatasourceOrganization.sql", smdbe::processDatasourceOrganization);
|
||||||
|
|
||||||
log.info("Processing projects <-> orgs ...");
|
log.info("Processing projects <-> orgs ...");
|
||||||
smdbe.execute("queryProjectOrganization.sql", smdbe::processProjectOrganization);
|
smdbe.execute("queryProjectOrganization.sql", smdbe::processProjectOrganization);
|
||||||
|
@ -117,7 +117,8 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationApplication
|
||||||
public void execute(final String sqlFile, final Function<ResultSet, List<Oaf>> producer)
|
public void execute(final String sqlFile, final Function<ResultSet, List<Oaf>> producer)
|
||||||
throws Exception {
|
throws Exception {
|
||||||
final String sql =
|
final String sql =
|
||||||
IOUtils.toString(getClass().getResourceAsStream("/eu/dnetlib/dhp/oa/graph/sql/" + sqlFile));
|
IOUtils.toString(
|
||||||
|
getClass().getResourceAsStream("/eu/dnetlib/dhp/oa/graph/sql/" + sqlFile));
|
||||||
|
|
||||||
final Consumer<ResultSet> consumer = rs -> producer.apply(rs).forEach(oaf -> emitOaf(oaf));
|
final Consumer<ResultSet> consumer = rs -> producer.apply(rs).forEach(oaf -> emitOaf(oaf));
|
||||||
|
|
||||||
|
@ -134,14 +135,18 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationApplication
|
||||||
|
|
||||||
ds.setId(createOpenaireId(10, rs.getString("datasourceid"), true));
|
ds.setId(createOpenaireId(10, rs.getString("datasourceid"), true));
|
||||||
ds.setOriginalId(Arrays.asList(rs.getString("datasourceid")));
|
ds.setOriginalId(Arrays.asList(rs.getString("datasourceid")));
|
||||||
ds.setCollectedfrom(listKeyValues(createOpenaireId(10, rs.getString("collectedfromid"), true), rs.getString("collectedfromname")));
|
ds.setCollectedfrom(
|
||||||
|
listKeyValues(
|
||||||
|
createOpenaireId(10, rs.getString("collectedfromid"), true),
|
||||||
|
rs.getString("collectedfromname")));
|
||||||
ds.setPid(new ArrayList<>());
|
ds.setPid(new ArrayList<>());
|
||||||
ds.setDateofcollection(asString(rs.getDate("dateofcollection")));
|
ds.setDateofcollection(asString(rs.getDate("dateofcollection")));
|
||||||
ds.setDateoftransformation(null); // Value not returned by the SQL query
|
ds.setDateoftransformation(null); // Value not returned by the SQL query
|
||||||
ds.setExtraInfo(new ArrayList<>()); // Values not present in the DB
|
ds.setExtraInfo(new ArrayList<>()); // Values not present in the DB
|
||||||
ds.setOaiprovenance(null); // Values not present in the DB
|
ds.setOaiprovenance(null); // Values not present in the DB
|
||||||
ds.setDatasourcetype(prepareQualifierSplitting(rs.getString("datasourcetype")));
|
ds.setDatasourcetype(prepareQualifierSplitting(rs.getString("datasourcetype")));
|
||||||
ds.setOpenairecompatibility(prepareQualifierSplitting(rs.getString("openairecompatibility")));
|
ds.setOpenairecompatibility(
|
||||||
|
prepareQualifierSplitting(rs.getString("openairecompatibility")));
|
||||||
ds.setOfficialname(field(rs.getString("officialname"), info));
|
ds.setOfficialname(field(rs.getString("officialname"), info));
|
||||||
ds.setEnglishname(field(rs.getString("englishname"), info));
|
ds.setEnglishname(field(rs.getString("englishname"), info));
|
||||||
ds.setWebsiteurl(field(rs.getString("websiteurl"), info));
|
ds.setWebsiteurl(field(rs.getString("websiteurl"), info));
|
||||||
|
@ -174,7 +179,11 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationApplication
|
||||||
ds.setPidsystems(field(rs.getString("pidsystems"), info));
|
ds.setPidsystems(field(rs.getString("pidsystems"), info));
|
||||||
ds.setCertificates(field(rs.getString("certificates"), info));
|
ds.setCertificates(field(rs.getString("certificates"), info));
|
||||||
ds.setPolicies(new ArrayList<>()); // The sql query returns an empty array
|
ds.setPolicies(new ArrayList<>()); // The sql query returns an empty array
|
||||||
ds.setJournal(prepareJournal(rs.getString("officialname"), rs.getString("journal"), info)); // Journal
|
ds.setJournal(
|
||||||
|
prepareJournal(
|
||||||
|
rs.getString("officialname"),
|
||||||
|
rs.getString("journal"),
|
||||||
|
info)); // Journal
|
||||||
ds.setDataInfo(info);
|
ds.setDataInfo(info);
|
||||||
ds.setLastupdatetimestamp(lastUpdateTimestamp);
|
ds.setLastupdatetimestamp(lastUpdateTimestamp);
|
||||||
|
|
||||||
|
@ -193,7 +202,10 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationApplication
|
||||||
|
|
||||||
p.setId(createOpenaireId(40, rs.getString("projectid"), true));
|
p.setId(createOpenaireId(40, rs.getString("projectid"), true));
|
||||||
p.setOriginalId(Arrays.asList(rs.getString("projectid")));
|
p.setOriginalId(Arrays.asList(rs.getString("projectid")));
|
||||||
p.setCollectedfrom(listKeyValues(createOpenaireId(10, rs.getString("collectedfromid"), true), rs.getString("collectedfromname")));
|
p.setCollectedfrom(
|
||||||
|
listKeyValues(
|
||||||
|
createOpenaireId(10, rs.getString("collectedfromid"), true),
|
||||||
|
rs.getString("collectedfromname")));
|
||||||
p.setPid(new ArrayList<>());
|
p.setPid(new ArrayList<>());
|
||||||
p.setDateofcollection(asString(rs.getDate("dateofcollection")));
|
p.setDateofcollection(asString(rs.getDate("dateofcollection")));
|
||||||
p.setDateoftransformation(asString(rs.getDate("dateoftransformation")));
|
p.setDateoftransformation(asString(rs.getDate("dateoftransformation")));
|
||||||
|
@ -209,7 +221,8 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationApplication
|
||||||
p.setKeywords(field(rs.getString("keywords"), info));
|
p.setKeywords(field(rs.getString("keywords"), info));
|
||||||
p.setDuration(field(Integer.toString(rs.getInt("duration")), info));
|
p.setDuration(field(Integer.toString(rs.getInt("duration")), info));
|
||||||
p.setEcsc39(field(Boolean.toString(rs.getBoolean("ecsc39")), info));
|
p.setEcsc39(field(Boolean.toString(rs.getBoolean("ecsc39")), info));
|
||||||
p.setOamandatepublications(field(Boolean.toString(rs.getBoolean("oamandatepublications")), info));
|
p.setOamandatepublications(
|
||||||
|
field(Boolean.toString(rs.getBoolean("oamandatepublications")), info));
|
||||||
p.setEcarticle29_3(field(Boolean.toString(rs.getBoolean("ecarticle29_3")), info));
|
p.setEcarticle29_3(field(Boolean.toString(rs.getBoolean("ecarticle29_3")), info));
|
||||||
p.setSubjects(prepareListOfStructProps(rs.getArray("subjects"), info));
|
p.setSubjects(prepareListOfStructProps(rs.getArray("subjects"), info));
|
||||||
p.setFundingtree(prepareListFields(rs.getArray("fundingtree"), info));
|
p.setFundingtree(prepareListFields(rs.getArray("fundingtree"), info));
|
||||||
|
@ -245,7 +258,10 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationApplication
|
||||||
|
|
||||||
o.setId(createOpenaireId(20, rs.getString("organizationid"), true));
|
o.setId(createOpenaireId(20, rs.getString("organizationid"), true));
|
||||||
o.setOriginalId(Arrays.asList(rs.getString("organizationid")));
|
o.setOriginalId(Arrays.asList(rs.getString("organizationid")));
|
||||||
o.setCollectedfrom(listKeyValues(createOpenaireId(10, rs.getString("collectedfromid"), true), rs.getString("collectedfromname")));
|
o.setCollectedfrom(
|
||||||
|
listKeyValues(
|
||||||
|
createOpenaireId(10, rs.getString("collectedfromid"), true),
|
||||||
|
rs.getString("collectedfromname")));
|
||||||
o.setPid(new ArrayList<>());
|
o.setPid(new ArrayList<>());
|
||||||
o.setDateofcollection(asString(rs.getDate("dateofcollection")));
|
o.setDateofcollection(asString(rs.getDate("dateofcollection")));
|
||||||
o.setDateoftransformation(asString(rs.getDate("dateoftransformation")));
|
o.setDateoftransformation(asString(rs.getDate("dateoftransformation")));
|
||||||
|
@ -259,10 +275,17 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationApplication
|
||||||
o.setEclegalbody(field(Boolean.toString(rs.getBoolean("eclegalbody")), info));
|
o.setEclegalbody(field(Boolean.toString(rs.getBoolean("eclegalbody")), info));
|
||||||
o.setEclegalperson(field(Boolean.toString(rs.getBoolean("eclegalperson")), info));
|
o.setEclegalperson(field(Boolean.toString(rs.getBoolean("eclegalperson")), info));
|
||||||
o.setEcnonprofit(field(Boolean.toString(rs.getBoolean("ecnonprofit")), info));
|
o.setEcnonprofit(field(Boolean.toString(rs.getBoolean("ecnonprofit")), info));
|
||||||
o.setEcresearchorganization(field(Boolean.toString(rs.getBoolean("ecresearchorganization")), info));
|
o.setEcresearchorganization(
|
||||||
o.setEchighereducation(field(Boolean.toString(rs.getBoolean("echighereducation")), info));
|
field(Boolean.toString(rs.getBoolean("ecresearchorganization")), info));
|
||||||
o.setEcinternationalorganizationeurinterests(field(Boolean.toString(rs.getBoolean("ecinternationalorganizationeurinterests")), info));
|
o.setEchighereducation(
|
||||||
o.setEcinternationalorganization(field(Boolean.toString(rs.getBoolean("ecinternationalorganization")), info));
|
field(Boolean.toString(rs.getBoolean("echighereducation")), info));
|
||||||
|
o.setEcinternationalorganizationeurinterests(
|
||||||
|
field(
|
||||||
|
Boolean.toString(
|
||||||
|
rs.getBoolean("ecinternationalorganizationeurinterests")),
|
||||||
|
info));
|
||||||
|
o.setEcinternationalorganization(
|
||||||
|
field(Boolean.toString(rs.getBoolean("ecinternationalorganization")), info));
|
||||||
o.setEcenterprise(field(Boolean.toString(rs.getBoolean("ecenterprise")), info));
|
o.setEcenterprise(field(Boolean.toString(rs.getBoolean("ecenterprise")), info));
|
||||||
o.setEcsmevalidated(field(Boolean.toString(rs.getBoolean("ecsmevalidated")), info));
|
o.setEcsmevalidated(field(Boolean.toString(rs.getBoolean("ecsmevalidated")), info));
|
||||||
o.setEcnutscode(field(Boolean.toString(rs.getBoolean("ecnutscode")), info));
|
o.setEcnutscode(field(Boolean.toString(rs.getBoolean("ecnutscode")), info));
|
||||||
|
@ -282,7 +305,9 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationApplication
|
||||||
final String orgId = createOpenaireId(20, rs.getString("organization"), true);
|
final String orgId = createOpenaireId(20, rs.getString("organization"), true);
|
||||||
final String dsId = createOpenaireId(10, rs.getString("datasource"), true);
|
final String dsId = createOpenaireId(10, rs.getString("datasource"), true);
|
||||||
final List<KeyValue> collectedFrom =
|
final List<KeyValue> collectedFrom =
|
||||||
listKeyValues(createOpenaireId(10, rs.getString("collectedfromid"), true), rs.getString("collectedfromname"));
|
listKeyValues(
|
||||||
|
createOpenaireId(10, rs.getString("collectedfromid"), true),
|
||||||
|
rs.getString("collectedfromname"));
|
||||||
|
|
||||||
final Relation r1 = new Relation();
|
final Relation r1 = new Relation();
|
||||||
r1.setRelType("datasourceOrganization");
|
r1.setRelType("datasourceOrganization");
|
||||||
|
@ -316,7 +341,9 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationApplication
|
||||||
final String orgId = createOpenaireId(20, rs.getString("resporganization"), true);
|
final String orgId = createOpenaireId(20, rs.getString("resporganization"), true);
|
||||||
final String projectId = createOpenaireId(40, rs.getString("project"), true);
|
final String projectId = createOpenaireId(40, rs.getString("project"), true);
|
||||||
final List<KeyValue> collectedFrom =
|
final List<KeyValue> collectedFrom =
|
||||||
listKeyValues(createOpenaireId(10, rs.getString("collectedfromid"), true), rs.getString("collectedfromname"));
|
listKeyValues(
|
||||||
|
createOpenaireId(10, rs.getString("collectedfromid"), true),
|
||||||
|
rs.getString("collectedfromname"));
|
||||||
|
|
||||||
final Relation r1 = new Relation();
|
final Relation r1 = new Relation();
|
||||||
r1.setRelType("projectOrganization");
|
r1.setRelType("projectOrganization");
|
||||||
|
@ -347,7 +374,17 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationApplication
|
||||||
public List<Oaf> processClaims(final ResultSet rs) {
|
public List<Oaf> processClaims(final ResultSet rs) {
|
||||||
|
|
||||||
final DataInfo info =
|
final DataInfo info =
|
||||||
dataInfo(false, null, false, false, qualifier("user:claim", "user:claim", "dnet:provenanceActions", "dnet:provenanceActions"), "0.9");
|
dataInfo(
|
||||||
|
false,
|
||||||
|
null,
|
||||||
|
false,
|
||||||
|
false,
|
||||||
|
qualifier(
|
||||||
|
"user:claim",
|
||||||
|
"user:claim",
|
||||||
|
"dnet:provenanceActions",
|
||||||
|
"dnet:provenanceActions"),
|
||||||
|
"0.9");
|
||||||
|
|
||||||
final List<KeyValue> collectedFrom =
|
final List<KeyValue> collectedFrom =
|
||||||
listKeyValues(createOpenaireId(10, "infrastruct_::openaire", true), "OpenAIRE");
|
listKeyValues(createOpenaireId(10, "infrastruct_::openaire", true), "OpenAIRE");
|
||||||
|
@ -379,9 +416,11 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationApplication
|
||||||
return Arrays.asList(r);
|
return Arrays.asList(r);
|
||||||
} else {
|
} else {
|
||||||
final String sourceId =
|
final String sourceId =
|
||||||
createOpenaireId(rs.getString("source_type"), rs.getString("source_id"), false);
|
createOpenaireId(
|
||||||
|
rs.getString("source_type"), rs.getString("source_id"), false);
|
||||||
final String targetId =
|
final String targetId =
|
||||||
createOpenaireId(rs.getString("target_type"), rs.getString("target_id"), false);
|
createOpenaireId(
|
||||||
|
rs.getString("target_type"), rs.getString("target_id"), false);
|
||||||
|
|
||||||
final Relation r1 = new Relation();
|
final Relation r1 = new Relation();
|
||||||
final Relation r2 = new Relation();
|
final Relation r2 = new Relation();
|
||||||
|
@ -438,11 +477,19 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationApplication
|
||||||
final String inferenceprovenance = rs.getString("inferenceprovenance");
|
final String inferenceprovenance = rs.getString("inferenceprovenance");
|
||||||
final Boolean inferred = rs.getBoolean("inferred");
|
final Boolean inferred = rs.getBoolean("inferred");
|
||||||
final String trust = rs.getString("trust");
|
final String trust = rs.getString("trust");
|
||||||
return dataInfo(deletedbyinference, inferenceprovenance, inferred, false, MigrationConstants.ENTITYREGISTRY_PROVENANCE_ACTION, trust);
|
return dataInfo(
|
||||||
|
deletedbyinference,
|
||||||
|
inferenceprovenance,
|
||||||
|
inferred,
|
||||||
|
false,
|
||||||
|
MigrationConstants.ENTITYREGISTRY_PROVENANCE_ACTION,
|
||||||
|
trust);
|
||||||
}
|
}
|
||||||
|
|
||||||
private Qualifier prepareQualifierSplitting(final String s) {
|
private Qualifier prepareQualifierSplitting(final String s) {
|
||||||
if (StringUtils.isBlank(s)) { return null; }
|
if (StringUtils.isBlank(s)) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
final String[] arr = s.split("@@@");
|
final String[] arr = s.split("@@@");
|
||||||
return arr.length == 4 ? qualifier(arr[0], arr[1], arr[2], arr[3]) : null;
|
return arr.length == 4 ? qualifier(arr[0], arr[1], arr[2], arr[3]) : null;
|
||||||
}
|
}
|
||||||
|
@ -458,19 +505,22 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationApplication
|
||||||
}
|
}
|
||||||
|
|
||||||
private StructuredProperty prepareStructProp(final String s, final DataInfo dataInfo) {
|
private StructuredProperty prepareStructProp(final String s, final DataInfo dataInfo) {
|
||||||
if (StringUtils.isBlank(s)) { return null; }
|
if (StringUtils.isBlank(s)) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
final String[] parts = s.split("###");
|
final String[] parts = s.split("###");
|
||||||
if (parts.length == 2) {
|
if (parts.length == 2) {
|
||||||
final String value = parts[0];
|
final String value = parts[0];
|
||||||
final String[] arr = parts[1].split("@@@");
|
final String[] arr = parts[1].split("@@@");
|
||||||
if (arr.length == 4) { return structuredProperty(value, arr[0], arr[1], arr[2], arr[3], dataInfo); }
|
if (arr.length == 4) {
|
||||||
|
return structuredProperty(value, arr[0], arr[1], arr[2], arr[3], dataInfo);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
private List<StructuredProperty> prepareListOfStructProps(
|
private List<StructuredProperty> prepareListOfStructProps(
|
||||||
final Array array,
|
final Array array, final DataInfo dataInfo) throws SQLException {
|
||||||
final DataInfo dataInfo) throws SQLException {
|
|
||||||
final List<StructuredProperty> res = new ArrayList<>();
|
final List<StructuredProperty> res = new ArrayList<>();
|
||||||
if (array != null) {
|
if (array != null) {
|
||||||
for (final String s : (String[]) array.getArray()) {
|
for (final String s : (String[]) array.getArray()) {
|
||||||
|
@ -489,10 +539,14 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationApplication
|
||||||
final String[] arr = sj.split("@@@");
|
final String[] arr = sj.split("@@@");
|
||||||
if (arr.length == 3) {
|
if (arr.length == 3) {
|
||||||
final String issn = StringUtils.isNotBlank(arr[0]) ? arr[0] : null;
|
final String issn = StringUtils.isNotBlank(arr[0]) ? arr[0] : null;
|
||||||
final String eissn = StringUtils.isNotBlank(arr[1]) ? arr[1] : null;;
|
final String eissn = StringUtils.isNotBlank(arr[1]) ? arr[1] : null;
|
||||||
final String lissn = StringUtils.isNotBlank(arr[2]) ? arr[2] : null;;
|
;
|
||||||
|
final String lissn = StringUtils.isNotBlank(arr[2]) ? arr[2] : null;
|
||||||
|
;
|
||||||
if (issn != null || eissn != null || lissn != null) {
|
if (issn != null || eissn != null || lissn != null) {
|
||||||
return journal(name, issn, eissn, eissn, null, null, null, null, null, null, null, info);
|
return journal(
|
||||||
|
name, issn, eissn, eissn, null, null, null, null, null, null, null,
|
||||||
|
info);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -7,7 +7,11 @@ import eu.dnetlib.dhp.schema.oaf.Qualifier;
|
||||||
public class MigrationConstants {
|
public class MigrationConstants {
|
||||||
|
|
||||||
public static final Qualifier PUBLICATION_RESULTTYPE_QUALIFIER =
|
public static final Qualifier PUBLICATION_RESULTTYPE_QUALIFIER =
|
||||||
qualifier("publication", "publication", "dnet:result_typologies", "dnet:result_typologies");
|
qualifier(
|
||||||
|
"publication",
|
||||||
|
"publication",
|
||||||
|
"dnet:result_typologies",
|
||||||
|
"dnet:result_typologies");
|
||||||
public static final Qualifier DATASET_RESULTTYPE_QUALIFIER =
|
public static final Qualifier DATASET_RESULTTYPE_QUALIFIER =
|
||||||
qualifier("dataset", "dataset", "dnet:result_typologies", "dnet:result_typologies");
|
qualifier("dataset", "dataset", "dnet:result_typologies", "dnet:result_typologies");
|
||||||
public static final Qualifier SOFTWARE_RESULTTYPE_QUALIFIER =
|
public static final Qualifier SOFTWARE_RESULTTYPE_QUALIFIER =
|
||||||
|
@ -15,8 +19,15 @@ public class MigrationConstants {
|
||||||
public static final Qualifier OTHER_RESULTTYPE_QUALIFIER =
|
public static final Qualifier OTHER_RESULTTYPE_QUALIFIER =
|
||||||
qualifier("other", "other", "dnet:result_typologies", "dnet:result_typologies");
|
qualifier("other", "other", "dnet:result_typologies", "dnet:result_typologies");
|
||||||
public static final Qualifier REPOSITORY_PROVENANCE_ACTIONS =
|
public static final Qualifier REPOSITORY_PROVENANCE_ACTIONS =
|
||||||
qualifier("sysimport:crosswalk:repository", "sysimport:crosswalk:repository", "dnet:provenanceActions", "dnet:provenanceActions");
|
qualifier(
|
||||||
|
"sysimport:crosswalk:repository",
|
||||||
|
"sysimport:crosswalk:repository",
|
||||||
|
"dnet:provenanceActions",
|
||||||
|
"dnet:provenanceActions");
|
||||||
public static final Qualifier ENTITYREGISTRY_PROVENANCE_ACTION =
|
public static final Qualifier ENTITYREGISTRY_PROVENANCE_ACTION =
|
||||||
qualifier("sysimport:crosswalk:entityregistry", "sysimport:crosswalk:entityregistry", "dnet:provenanceActions", "dnet:provenanceActions");
|
qualifier(
|
||||||
|
"sysimport:crosswalk:entityregistry",
|
||||||
|
"sysimport:crosswalk:entityregistry",
|
||||||
|
"dnet:provenanceActions",
|
||||||
|
"dnet:provenanceActions");
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue