forked from D-Net/dnet-hadoop
changes due to adding all the result type under Result
This commit is contained in:
parent
5c8533d1a1
commit
3233b01089
|
@ -1,56 +1,102 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.oa.graph.dump;
|
package eu.dnetlib.dhp.oa.graph.dump;
|
||||||
|
|
||||||
import java.io.*;
|
import java.io.*;
|
||||||
import java.net.MalformedURLException;
|
|
||||||
|
|
||||||
import com.cloudera.org.apache.http.HttpResponse;
|
|
||||||
import com.cloudera.org.apache.http.client.HttpClient;
|
|
||||||
import com.cloudera.org.apache.http.client.methods.HttpPost;
|
|
||||||
import com.cloudera.org.apache.http.entity.StringEntity;
|
|
||||||
import com.cloudera.org.apache.http.impl.client.DefaultHttpClient;
|
|
||||||
import com.cloudera.org.apache.http.util.EntityUtils;
|
|
||||||
import com.google.gson.Gson;
|
|
||||||
import eu.dnetlib.dhp.oa.graph.dump.zenodo.ZenodoModel;
|
|
||||||
import org.springframework.beans.factory.annotation.Value;
|
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
|
||||||
|
//import com.cloudera.org.apache.http.HttpResponse;
|
||||||
|
//import com.cloudera.org.apache.http.client.HttpClient;
|
||||||
|
//import com.cloudera.org.apache.http.client.methods.HttpPost;
|
||||||
|
//import com.cloudera.org.apache.http.entity.StringEntity;
|
||||||
|
//import com.cloudera.org.apache.http.impl.client.DefaultHttpClient;
|
||||||
|
//import com.cloudera.org.apache.http.util.EntityUtils;
|
||||||
|
import com.google.gson.Gson;
|
||||||
|
|
||||||
public class SendToZenodo implements Serializable {
|
import eu.dnetlib.dhp.oa.graph.dump.zenodo.ZenodoModel;
|
||||||
|
import org.apache.http.HttpEntity;
|
||||||
|
import org.apache.http.HttpResponse;
|
||||||
|
import org.apache.http.client.HttpClient;
|
||||||
|
import org.apache.http.client.methods.HttpPost;
|
||||||
|
import org.apache.http.client.methods.HttpPut;
|
||||||
|
import org.apache.http.client.methods.HttpUriRequest;
|
||||||
|
import org.apache.http.client.methods.RequestBuilder;
|
||||||
|
import org.apache.http.entity.StringEntity;
|
||||||
|
import org.apache.http.entity.mime.HttpMultipartMode;
|
||||||
|
import org.apache.http.entity.mime.MultipartEntityBuilder;
|
||||||
|
import org.apache.http.impl.client.DefaultHttpClient;
|
||||||
|
import org.apache.http.util.EntityUtils;
|
||||||
|
|
||||||
|
|
||||||
|
public class APIClient implements Serializable {
|
||||||
|
|
||||||
String urlString;
|
String urlString;
|
||||||
|
String bucket;
|
||||||
|
HttpClient client;
|
||||||
|
String deposition_id;
|
||||||
|
final String ACCESS_TOKEN = "5ImUj0VC1ICg4ifK5dc3AGzJhcfAB4osxrFlsr8WxHXxjaYgCE0hY8HZcDoe";
|
||||||
|
|
||||||
|
public String getUrlString() {
|
||||||
public SendToZenodo(String urlString) throws MalformedURLException {
|
return urlString;
|
||||||
|
|
||||||
|
|
||||||
this.urlString = urlString ;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public void connect() throws IOException {
|
public void setUrlString(String urlString) {
|
||||||
|
this.urlString = urlString;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getBucket() {
|
||||||
|
return bucket;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setBucket(String bucket) {
|
||||||
|
this.bucket = bucket;
|
||||||
|
}
|
||||||
|
|
||||||
|
public APIClient(String urlString) throws IOException {
|
||||||
|
|
||||||
|
this.urlString = urlString;
|
||||||
|
//connect();
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
public int connect() throws IOException {
|
||||||
|
|
||||||
String json = "{}";
|
String json = "{}";
|
||||||
|
|
||||||
HttpClient client = new DefaultHttpClient();
|
|
||||||
|
client = new DefaultHttpClient();
|
||||||
|
|
||||||
HttpPost post = new HttpPost(urlString);
|
HttpPost post = new HttpPost(urlString);
|
||||||
|
|
||||||
StringEntity input = new StringEntity(json);
|
StringEntity input = new StringEntity(json);
|
||||||
post.setEntity(input);
|
post.setEntity(input);
|
||||||
post.addHeader("Content-Type", "application/json");
|
post.addHeader("Content-Type", "application/json");
|
||||||
|
post.setHeader("Authorization", "Bearer " + ACCESS_TOKEN);
|
||||||
|
|
||||||
HttpResponse response = client.execute(post);
|
HttpResponse response = client.execute(post);
|
||||||
System.out.println(response.getStatusLine());
|
|
||||||
System.out.println(response.getEntity().getContent().toString());
|
|
||||||
|
|
||||||
json = EntityUtils.toString(response.getEntity());
|
json = EntityUtils.toString(response.getEntity());
|
||||||
|
|
||||||
ZenodoModel newSubmission = new Gson().fromJson(json, ZenodoModel.class);
|
ZenodoModel newSubmission = new Gson().fromJson(json, ZenodoModel.class);
|
||||||
System.out.println(newSubmission.getLinks().getBucket());
|
this.bucket = newSubmission.getLinks().getBucket();
|
||||||
|
this.deposition_id = newSubmission.getId();
|
||||||
|
|
||||||
|
return response.getStatusLine().getStatusCode();
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void upload(String filePath, String file_name) throws IOException {
|
||||||
|
File file = new File(filePath);
|
||||||
|
HttpPut post = new HttpPut(bucket + "/" + file_name);
|
||||||
|
post.setHeader("Authorization", "Bearer " + ACCESS_TOKEN);
|
||||||
|
post.addHeader("Content-Type", "application/zip");
|
||||||
|
HttpEntity data = MultipartEntityBuilder.create().addBinaryBody(file_name, file).build();
|
||||||
|
post.setEntity(data);
|
||||||
|
|
||||||
|
//HttpUriRequest request = RequestBuilder.post(bucket + "/" + file_name +"?access_token=5ImUj0VC1ICg4ifK5dc3AGzJhcfAB4osxrFlsr8WxHXxjaYgCE0hY8HZcDoe").setEntity(data).build();
|
||||||
|
|
||||||
|
HttpResponse response = client.execute(post);
|
||||||
|
System.out.println(response.getStatusLine().getStatusCode());
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -9,6 +9,7 @@ import javax.swing.text.html.Option;
|
||||||
|
|
||||||
import org.apache.avro.generic.GenericData;
|
import org.apache.avro.generic.GenericData;
|
||||||
|
|
||||||
|
import eu.dnetlib.dhp.schema.common.ModelConstants;
|
||||||
import eu.dnetlib.dhp.schema.dump.oaf.*;
|
import eu.dnetlib.dhp.schema.dump.oaf.*;
|
||||||
import eu.dnetlib.dhp.schema.oaf.DataInfo;
|
import eu.dnetlib.dhp.schema.oaf.DataInfo;
|
||||||
import eu.dnetlib.dhp.schema.oaf.Field;
|
import eu.dnetlib.dhp.schema.oaf.Field;
|
||||||
|
@ -17,15 +18,14 @@ import eu.dnetlib.dhp.schema.oaf.StructuredProperty;
|
||||||
|
|
||||||
public class Mapper implements Serializable {
|
public class Mapper implements Serializable {
|
||||||
|
|
||||||
public static <I extends eu.dnetlib.dhp.schema.oaf.Result, O extends eu.dnetlib.dhp.schema.dump.oaf.Result> O map(
|
public static <I extends eu.dnetlib.dhp.schema.oaf.Result> Result map(
|
||||||
I input, Map<String, String> communityMap) {
|
I input, Map<String, String> communityMap) {
|
||||||
|
|
||||||
O out = null;
|
final Result out = new Result();
|
||||||
Optional<eu.dnetlib.dhp.schema.oaf.Qualifier> ort = Optional.ofNullable(input.getResulttype());
|
Optional<eu.dnetlib.dhp.schema.oaf.Qualifier> ort = Optional.ofNullable(input.getResulttype());
|
||||||
if (ort.isPresent()) {
|
if (ort.isPresent()) {
|
||||||
switch (ort.get().getClassid()) {
|
switch (ort.get().getClassid()) {
|
||||||
case "publication":
|
case "publication":
|
||||||
out = (O) new Publication();
|
|
||||||
Optional<Journal> journal = Optional
|
Optional<Journal> journal = Optional
|
||||||
.ofNullable(((eu.dnetlib.dhp.schema.oaf.Publication) input).getJournal());
|
.ofNullable(((eu.dnetlib.dhp.schema.oaf.Publication) input).getJournal());
|
||||||
if (journal.isPresent()) {
|
if (journal.isPresent()) {
|
||||||
|
@ -43,15 +43,15 @@ public class Mapper implements Serializable {
|
||||||
c.setSp(j.getSp());
|
c.setSp(j.getSp());
|
||||||
c.setVol(j.getVol());
|
c.setVol(j.getVol());
|
||||||
out.setContainer(c);
|
out.setContainer(c);
|
||||||
|
out.setType(ModelConstants.PUBLICATION_DEFAULT_RESULTTYPE.getClassname());
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
case "dataset":
|
case "dataset":
|
||||||
Dataset d = new Dataset();
|
|
||||||
eu.dnetlib.dhp.schema.oaf.Dataset id = (eu.dnetlib.dhp.schema.oaf.Dataset) input;
|
eu.dnetlib.dhp.schema.oaf.Dataset id = (eu.dnetlib.dhp.schema.oaf.Dataset) input;
|
||||||
Optional.ofNullable(id.getSize()).ifPresent(v -> d.setSize(v.getValue()));
|
Optional.ofNullable(id.getSize()).ifPresent(v -> out.setSize(v.getValue()));
|
||||||
Optional.ofNullable(id.getVersion()).ifPresent(v -> d.setVersion(v.getValue()));
|
Optional.ofNullable(id.getVersion()).ifPresent(v -> out.setVersion(v.getValue()));
|
||||||
|
|
||||||
d
|
out
|
||||||
.setGeolocation(
|
.setGeolocation(
|
||||||
Optional
|
Optional
|
||||||
.ofNullable(id.getGeolocation())
|
.ofNullable(id.getGeolocation())
|
||||||
|
@ -69,19 +69,18 @@ public class Mapper implements Serializable {
|
||||||
.collect(Collectors.toList()))
|
.collect(Collectors.toList()))
|
||||||
.orElse(null));
|
.orElse(null));
|
||||||
|
|
||||||
out = (O) d;
|
out.setType(ModelConstants.DATASET_DEFAULT_RESULTTYPE.getClassname());
|
||||||
|
|
||||||
break;
|
break;
|
||||||
case "software":
|
case "software":
|
||||||
Software s = new Software();
|
|
||||||
eu.dnetlib.dhp.schema.oaf.Software is = (eu.dnetlib.dhp.schema.oaf.Software) input;
|
eu.dnetlib.dhp.schema.oaf.Software is = (eu.dnetlib.dhp.schema.oaf.Software) input;
|
||||||
Optional
|
Optional
|
||||||
.ofNullable(is.getCodeRepositoryUrl())
|
.ofNullable(is.getCodeRepositoryUrl())
|
||||||
.ifPresent(value -> s.setCodeRepositoryUrl(value.getValue()));
|
.ifPresent(value -> out.setCodeRepositoryUrl(value.getValue()));
|
||||||
Optional
|
Optional
|
||||||
.ofNullable(is.getDocumentationUrl())
|
.ofNullable(is.getDocumentationUrl())
|
||||||
.ifPresent(
|
.ifPresent(
|
||||||
value -> s
|
value -> out
|
||||||
.setDocumentationUrl(
|
.setDocumentationUrl(
|
||||||
value
|
value
|
||||||
.stream()
|
.stream()
|
||||||
|
@ -90,33 +89,35 @@ public class Mapper implements Serializable {
|
||||||
|
|
||||||
Optional
|
Optional
|
||||||
.ofNullable(is.getProgrammingLanguage())
|
.ofNullable(is.getProgrammingLanguage())
|
||||||
.ifPresent(value -> s.setProgrammingLanguage(value.getClassid()));
|
.ifPresent(value -> out.setProgrammingLanguage(value.getClassid()));
|
||||||
|
|
||||||
out = (O) s;
|
out.setType(ModelConstants.SOFTWARE_DEFAULT_RESULTTYPE.getClassname());
|
||||||
break;
|
break;
|
||||||
case "other":
|
case "other":
|
||||||
OtherResearchProduct or = new OtherResearchProduct();
|
|
||||||
eu.dnetlib.dhp.schema.oaf.OtherResearchProduct ir = (eu.dnetlib.dhp.schema.oaf.OtherResearchProduct) input;
|
eu.dnetlib.dhp.schema.oaf.OtherResearchProduct ir = (eu.dnetlib.dhp.schema.oaf.OtherResearchProduct) input;
|
||||||
or
|
out
|
||||||
.setContactgroup(
|
.setContactgroup(
|
||||||
Optional
|
Optional
|
||||||
.ofNullable(ir.getContactgroup())
|
.ofNullable(ir.getContactgroup())
|
||||||
.map(value -> value.stream().map(cg -> cg.getValue()).collect(Collectors.toList()))
|
.map(value -> value.stream().map(cg -> cg.getValue()).collect(Collectors.toList()))
|
||||||
.orElse(null));
|
.orElse(null));
|
||||||
|
|
||||||
or
|
out
|
||||||
.setContactperson(
|
.setContactperson(
|
||||||
Optional
|
Optional
|
||||||
.ofNullable(ir.getContactperson())
|
.ofNullable(ir.getContactperson())
|
||||||
.map(value -> value.stream().map(cp -> cp.getValue()).collect(Collectors.toList()))
|
.map(value -> value.stream().map(cp -> cp.getValue()).collect(Collectors.toList()))
|
||||||
.orElse(null));
|
.orElse(null));
|
||||||
or
|
out
|
||||||
.setTool(
|
.setTool(
|
||||||
Optional
|
Optional
|
||||||
.ofNullable(ir.getTool())
|
.ofNullable(ir.getTool())
|
||||||
.map(value -> value.stream().map(t -> t.getValue()).collect(Collectors.toList()))
|
.map(value -> value.stream().map(t -> t.getValue()).collect(Collectors.toList()))
|
||||||
.orElse(null));
|
.orElse(null));
|
||||||
out = (O) or;
|
|
||||||
|
out.setType(ModelConstants.ORP_DEFAULT_RESULTTYPE.getClassname());
|
||||||
|
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
Optional<List<eu.dnetlib.dhp.schema.oaf.Author>> oAuthor = Optional.ofNullable(input.getAuthor());
|
Optional<List<eu.dnetlib.dhp.schema.oaf.Author>> oAuthor = Optional.ofNullable(input.getAuthor());
|
||||||
|
@ -293,6 +294,7 @@ public class Mapper implements Serializable {
|
||||||
.ifPresent(value -> instance.setPublicationdate(value.getValue()));
|
.ifPresent(value -> instance.setPublicationdate(value.getValue()));
|
||||||
Optional
|
Optional
|
||||||
.ofNullable(i.getRefereed())
|
.ofNullable(i.getRefereed())
|
||||||
|
// .ifPresent(value -> instance.setRefereed(value.getClassname()));
|
||||||
.ifPresent(value -> instance.setRefereed(value.getValue()));
|
.ifPresent(value -> instance.setRefereed(value.getValue()));
|
||||||
Optional
|
Optional
|
||||||
.ofNullable(i.getInstancetype())
|
.ofNullable(i.getInstancetype())
|
||||||
|
|
|
@ -57,8 +57,8 @@ public class SparkDumpCommunityProducts implements Serializable {
|
||||||
final String resultClassName = parser.get("resultTableName");
|
final String resultClassName = parser.get("resultTableName");
|
||||||
log.info("resultTableName: {}", resultClassName);
|
log.info("resultTableName: {}", resultClassName);
|
||||||
|
|
||||||
final String dumpClassName = parser.get("dumpTableName");
|
// final String dumpClassName = parser.get("dumpTableName");
|
||||||
log.info("dumpClassName: {}", dumpClassName);
|
// log.info("dumpClassName: {}", dumpClassName);
|
||||||
|
|
||||||
final String isLookUpUrl = parser.get("isLookUpUrl");
|
final String isLookUpUrl = parser.get("isLookUpUrl");
|
||||||
log.info("isLookUpUrl: {}", isLookUpUrl);
|
log.info("isLookUpUrl: {}", isLookUpUrl);
|
||||||
|
@ -69,8 +69,8 @@ public class SparkDumpCommunityProducts implements Serializable {
|
||||||
final Optional<String> cm = Optional.ofNullable(parser.get("communityMap"));
|
final Optional<String> cm = Optional.ofNullable(parser.get("communityMap"));
|
||||||
|
|
||||||
Class<? extends Result> inputClazz = (Class<? extends Result>) Class.forName(resultClassName);
|
Class<? extends Result> inputClazz = (Class<? extends Result>) Class.forName(resultClassName);
|
||||||
Class<? extends eu.dnetlib.dhp.schema.dump.oaf.Result> dumpClazz = (Class<? extends eu.dnetlib.dhp.schema.dump.oaf.Result>) Class
|
// Class<? extends eu.dnetlib.dhp.schema.dump.oaf.Result> dumpClazz = (Class<? extends eu.dnetlib.dhp.schema.dump.oaf.Result>) Class
|
||||||
.forName(dumpClassName);
|
// .forName(dumpClassName);
|
||||||
|
|
||||||
SparkConf conf = new SparkConf();
|
SparkConf conf = new SparkConf();
|
||||||
|
|
||||||
|
@ -89,7 +89,7 @@ public class SparkDumpCommunityProducts implements Serializable {
|
||||||
isSparkSessionManaged,
|
isSparkSessionManaged,
|
||||||
spark -> {
|
spark -> {
|
||||||
Utils.removeOutputDir(spark, outputPath);
|
Utils.removeOutputDir(spark, outputPath);
|
||||||
execDump(spark, inputPath, outputPath, communityMap, inputClazz, dumpClazz);
|
execDump(spark, inputPath, outputPath, communityMap, inputClazz);// , dumpClazz);
|
||||||
|
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -103,14 +103,13 @@ public class SparkDumpCommunityProducts implements Serializable {
|
||||||
String inputPath,
|
String inputPath,
|
||||||
String outputPath,
|
String outputPath,
|
||||||
CommunityMap communityMap,
|
CommunityMap communityMap,
|
||||||
Class<I> inputClazz,
|
Class<I> inputClazz) {// Class<O> dumpClazz) {
|
||||||
Class<O> dumpClazz) {
|
|
||||||
|
|
||||||
// Set<String> communities = communityMap.keySet();
|
// Set<String> communities = communityMap.keySet();
|
||||||
Dataset<I> tmp = Utils.readPath(spark, inputPath, inputClazz);
|
Dataset<I> tmp = Utils.readPath(spark, inputPath, inputClazz);
|
||||||
|
|
||||||
tmp
|
tmp
|
||||||
.map(value -> execMap(value, communityMap), Encoders.bean(dumpClazz))
|
.map(value -> execMap(value, communityMap), Encoders.bean(eu.dnetlib.dhp.schema.dump.oaf.Result.class))
|
||||||
.filter(Objects::nonNull)
|
.filter(Objects::nonNull)
|
||||||
.write()
|
.write()
|
||||||
.mode(SaveMode.Overwrite)
|
.mode(SaveMode.Overwrite)
|
||||||
|
@ -119,7 +118,7 @@ public class SparkDumpCommunityProducts implements Serializable {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private static <O extends eu.dnetlib.dhp.schema.dump.oaf.Result, I extends Result> O execMap(I value,
|
private static <I extends Result> eu.dnetlib.dhp.schema.dump.oaf.Result execMap(I value,
|
||||||
CommunityMap communityMap) {
|
CommunityMap communityMap) {
|
||||||
{
|
{
|
||||||
Set<String> communities = communityMap.keySet();
|
Set<String> communities = communityMap.keySet();
|
||||||
|
|
|
@ -4,8 +4,8 @@ package eu.dnetlib.dhp.oa.graph.dump;
|
||||||
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
|
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
|
||||||
|
|
||||||
import java.io.Serializable;
|
import java.io.Serializable;
|
||||||
import java.io.StringReader;
|
import java.util.Optional;
|
||||||
import java.util.*;
|
import java.util.Set;
|
||||||
import java.util.stream.Collectors;
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
import org.apache.commons.io.IOUtils;
|
import org.apache.commons.io.IOUtils;
|
||||||
|
@ -13,10 +13,6 @@ import org.apache.spark.SparkConf;
|
||||||
import org.apache.spark.sql.Dataset;
|
import org.apache.spark.sql.Dataset;
|
||||||
import org.apache.spark.sql.SaveMode;
|
import org.apache.spark.sql.SaveMode;
|
||||||
import org.apache.spark.sql.SparkSession;
|
import org.apache.spark.sql.SparkSession;
|
||||||
import org.dom4j.Document;
|
|
||||||
import org.dom4j.DocumentException;
|
|
||||||
import org.dom4j.Element;
|
|
||||||
import org.dom4j.io.SAXReader;
|
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
@ -53,16 +49,14 @@ public class SparkSplitForCommunity implements Serializable {
|
||||||
final String outputPath = parser.get("outputPath");
|
final String outputPath = parser.get("outputPath");
|
||||||
log.info("outputPath: {}", outputPath);
|
log.info("outputPath: {}", outputPath);
|
||||||
|
|
||||||
final String resultClassName = parser.get("resultTableName");
|
// final String resultClassName = parser.get("resultTableName");
|
||||||
log.info("resultTableName: {}", resultClassName);
|
// log.info("resultTableName: {}", resultClassName);
|
||||||
|
|
||||||
final String isLookUpUrl = parser.get("isLookUpUrl");
|
final String isLookUpUrl = parser.get("isLookUpUrl");
|
||||||
log.info("isLookUpUrl: {}", isLookUpUrl);
|
log.info("isLookUpUrl: {}", isLookUpUrl);
|
||||||
|
|
||||||
final Optional<String> cm = Optional.ofNullable(parser.get("communityMap"));
|
final Optional<String> cm = Optional.ofNullable(parser.get("communityMap"));
|
||||||
|
|
||||||
Class<? extends Result> inputClazz = (Class<? extends Result>) Class.forName(resultClassName);
|
|
||||||
|
|
||||||
SparkConf conf = new SparkConf();
|
SparkConf conf = new SparkConf();
|
||||||
|
|
||||||
CommunityMap communityMap;
|
CommunityMap communityMap;
|
||||||
|
@ -80,7 +74,7 @@ public class SparkSplitForCommunity implements Serializable {
|
||||||
isSparkSessionManaged,
|
isSparkSessionManaged,
|
||||||
spark -> {
|
spark -> {
|
||||||
Utils.removeOutputDir(spark, outputPath);
|
Utils.removeOutputDir(spark, outputPath);
|
||||||
execSplit(spark, inputPath, outputPath, communityMap.keySet(), inputClazz);
|
execSplit(spark, inputPath, outputPath, communityMap.keySet());// , inputClazz);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -88,10 +82,14 @@ public class SparkSplitForCommunity implements Serializable {
|
||||||
return ISLookupClientFactory.getLookUpService(isLookUpUrl);
|
return ISLookupClientFactory.getLookUpService(isLookUpUrl);
|
||||||
}
|
}
|
||||||
|
|
||||||
private static <R extends Result> void execSplit(SparkSession spark, String inputPath, String outputPath,
|
private static void execSplit(SparkSession spark, String inputPath, String outputPath,
|
||||||
Set<String> communities, Class<R> inputClazz) {
|
Set<String> communities) {// }, Class<R> inputClazz) {
|
||||||
|
|
||||||
Dataset<R> result = Utils.readPath(spark, inputPath, inputClazz);
|
Dataset<Result> result = Utils
|
||||||
|
.readPath(spark, inputPath + "/publication", Result.class)
|
||||||
|
.union(Utils.readPath(spark, inputPath + "/dataset", Result.class))
|
||||||
|
.union(Utils.readPath(spark, inputPath + "/orp", Result.class))
|
||||||
|
.union(Utils.readPath(spark, inputPath + "/software", Result.class));
|
||||||
|
|
||||||
communities
|
communities
|
||||||
.stream()
|
.stream()
|
||||||
|
@ -99,16 +97,17 @@ public class SparkSplitForCommunity implements Serializable {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private static <R extends Result> void printResult(String c, Dataset<R> result, String outputPath) {
|
private static void printResult(String c, Dataset<Result> result, String outputPath) {
|
||||||
result
|
result
|
||||||
.filter(r -> containsCommunity(r, c))
|
.filter(r -> containsCommunity(r, c))
|
||||||
|
.repartition(1)
|
||||||
.write()
|
.write()
|
||||||
.option("compression", "gzip")
|
.option("compression", "gzip")
|
||||||
.mode(SaveMode.Append)
|
.mode(SaveMode.Append)
|
||||||
.json(outputPath + "/" + c);
|
.json(outputPath + "/" + c);
|
||||||
}
|
}
|
||||||
|
|
||||||
private static <R extends Result> boolean containsCommunity(R r, String c) {
|
private static boolean containsCommunity(Result r, String c) {
|
||||||
if (Optional.ofNullable(r.getContext()).isPresent()) {
|
if (Optional.ofNullable(r.getContext()).isPresent()) {
|
||||||
return r
|
return r
|
||||||
.getContext()
|
.getContext()
|
||||||
|
|
|
@ -1,4 +0,0 @@
|
||||||
package eu.dnetlib.dhp.oa.graph.dump;
|
|
||||||
|
|
||||||
public class SparkSplitForCommunity2 {
|
|
||||||
}
|
|
|
@ -52,14 +52,9 @@ public class SparkUpdateProjectInfo implements Serializable {
|
||||||
final String outputPath = parser.get("outputPath");
|
final String outputPath = parser.get("outputPath");
|
||||||
log.info("outputPath: {}", outputPath);
|
log.info("outputPath: {}", outputPath);
|
||||||
|
|
||||||
final String resultClassName = parser.get("resultTableName");
|
|
||||||
log.info("resultTableName: {}", resultClassName);
|
|
||||||
|
|
||||||
final String preparedInfoPath = parser.get("preparedInfoPath");
|
final String preparedInfoPath = parser.get("preparedInfoPath");
|
||||||
log.info("preparedInfoPath: {}", preparedInfoPath);
|
log.info("preparedInfoPath: {}", preparedInfoPath);
|
||||||
|
|
||||||
Class<? extends Result> inputClazz = (Class<? extends Result>) Class.forName(resultClassName);
|
|
||||||
|
|
||||||
SparkConf conf = new SparkConf();
|
SparkConf conf = new SparkConf();
|
||||||
|
|
||||||
runWithSparkSession(
|
runWithSparkSession(
|
||||||
|
@ -67,33 +62,33 @@ public class SparkUpdateProjectInfo implements Serializable {
|
||||||
isSparkSessionManaged,
|
isSparkSessionManaged,
|
||||||
spark -> {
|
spark -> {
|
||||||
Utils.removeOutputDir(spark, outputPath);
|
Utils.removeOutputDir(spark, outputPath);
|
||||||
extend(spark, inputPath, outputPath, preparedInfoPath, inputClazz);
|
extend(spark, inputPath, outputPath, preparedInfoPath);// , inputClazz);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
private static <R extends Result> void extend(
|
private static void extend(
|
||||||
SparkSession spark,
|
SparkSession spark,
|
||||||
String inputPath,
|
String inputPath,
|
||||||
String outputPath,
|
String outputPath,
|
||||||
String preparedInfoPath,
|
String preparedInfoPath) {// ,
|
||||||
Class<R> inputClazz) {
|
// Class<R> inputClazz) {
|
||||||
|
|
||||||
Dataset<R> result = Utils.readPath(spark, inputPath, inputClazz);
|
Dataset<Result> result = Utils.readPath(spark, inputPath, Result.class);
|
||||||
Dataset<ResultProject> resultProject = Utils.readPath(spark, preparedInfoPath, ResultProject.class);
|
Dataset<ResultProject> resultProject = Utils.readPath(spark, preparedInfoPath, ResultProject.class);
|
||||||
result
|
result
|
||||||
.joinWith(
|
.joinWith(
|
||||||
resultProject, result.col("id").equalTo(resultProject.col("resultId")),
|
resultProject, result.col("id").equalTo(resultProject.col("resultId")),
|
||||||
"left")
|
"left")
|
||||||
.map(value -> {
|
.map(value -> {
|
||||||
R r = value._1();
|
Result r = value._1();
|
||||||
Optional.ofNullable(value._2()).ifPresent(rp -> {
|
Optional.ofNullable(value._2()).ifPresent(rp -> {
|
||||||
r.setProjects(rp.getProjectsList());
|
r.setProjects(rp.getProjectsList());
|
||||||
});
|
});
|
||||||
return r;
|
return r;
|
||||||
}, Encoders.bean(inputClazz))
|
}, Encoders.bean(Result.class))
|
||||||
.write()
|
.write()
|
||||||
.option("compression", "gzip")
|
.option("compression", "gzip")
|
||||||
.mode(SaveMode.Overwrite)
|
.mode(SaveMode.Append)
|
||||||
.json(outputPath);
|
.json(outputPath);
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.oa.graph.dump.zenodo;
|
package eu.dnetlib.dhp.oa.graph.dump.zenodo;
|
||||||
|
|
||||||
public class Communities {
|
public class Community {
|
||||||
private String identifier;
|
private String identifier;
|
||||||
|
|
||||||
public String getIdentifier() {
|
public String getIdentifier() {
|
||||||
|
|
|
@ -1,4 +1,32 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.oa.graph.dump.zenodo;
|
package eu.dnetlib.dhp.oa.graph.dump.zenodo;
|
||||||
|
|
||||||
public class Creator {
|
public class Creator {
|
||||||
|
private String affiliation;
|
||||||
|
private String name;
|
||||||
|
private String orcid;
|
||||||
|
|
||||||
|
public String getAffiliation() {
|
||||||
|
return affiliation;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setAffiliation(String affiliation) {
|
||||||
|
this.affiliation = affiliation;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getName() {
|
||||||
|
return name;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setName(String name) {
|
||||||
|
this.name = name;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getOrcid() {
|
||||||
|
return orcid;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setOrcid(String orcid) {
|
||||||
|
this.orcid = orcid;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,7 +1,58 @@
|
||||||
package eu.dnetlib.dhp.oa.graph.dump;
|
|
||||||
|
package eu.dnetlib.dhp.oa.graph.dump.zenodo;
|
||||||
|
|
||||||
import java.io.Serializable;
|
import java.io.Serializable;
|
||||||
|
|
||||||
|
import net.minidev.json.annotate.JsonIgnore;
|
||||||
|
|
||||||
public class File implements Serializable {
|
public class File implements Serializable {
|
||||||
{"checksum": "dbf89bfc64fb70f7861b3b338ac3626d", "filename": "dataset.gz", "filesize": 22822474699, "id": "d20d0069-6a66-4cf5-a226-22ab5f87ece8", "links": {"download": "https://zenodo.org/api/files/44450289-0dbe-4d38-aa91-7a671dcd0700/dataset.gz", "self": "https://zenodo.org/api/deposit/depositions/3516918/files/d20d0069-6a66-4cf5-a226-22ab5f87ece8"}};
|
private String checksum;
|
||||||
|
private String filename;
|
||||||
|
private long filesize;
|
||||||
|
private String id;
|
||||||
|
|
||||||
|
@JsonIgnore
|
||||||
|
// private Links links;
|
||||||
|
|
||||||
|
public String getChecksum() {
|
||||||
|
return checksum;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setChecksum(String checksum) {
|
||||||
|
this.checksum = checksum;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getFilename() {
|
||||||
|
return filename;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setFilename(String filename) {
|
||||||
|
this.filename = filename;
|
||||||
|
}
|
||||||
|
|
||||||
|
public long getFilesize() {
|
||||||
|
return filesize;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setFilesize(long filesize) {
|
||||||
|
this.filesize = filesize;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getId() {
|
||||||
|
return id;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setId(String id) {
|
||||||
|
this.id = id;
|
||||||
|
}
|
||||||
|
|
||||||
|
// @JsonIgnore
|
||||||
|
// public Links getLinks() {
|
||||||
|
// return links;
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// @JsonIgnore
|
||||||
|
// public void setLinks(Links links) {
|
||||||
|
// this.links = links;
|
||||||
|
// }
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,4 +1,16 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.oa.graph.dump.zenodo;
|
package eu.dnetlib.dhp.oa.graph.dump.zenodo;
|
||||||
|
|
||||||
public class Grant {
|
import java.io.Serializable;
|
||||||
|
|
||||||
|
public class Grant implements Serializable {
|
||||||
|
private String id;
|
||||||
|
|
||||||
|
public String getId() {
|
||||||
|
return id;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setId(String id) {
|
||||||
|
this.id = id;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,4 +1,92 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.oa.graph.dump.zenodo;
|
package eu.dnetlib.dhp.oa.graph.dump.zenodo;
|
||||||
|
|
||||||
public class Links {
|
import java.io.Serializable;
|
||||||
|
|
||||||
|
public class Links implements Serializable {
|
||||||
|
|
||||||
|
private String bucket;
|
||||||
|
|
||||||
|
private String discard;
|
||||||
|
|
||||||
|
private String edit;
|
||||||
|
private String files;
|
||||||
|
private String html;
|
||||||
|
private String latest_draft;
|
||||||
|
private String latest_draft_html;
|
||||||
|
private String publish;
|
||||||
|
|
||||||
|
private String self;
|
||||||
|
|
||||||
|
public String getBucket() {
|
||||||
|
return bucket;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setBucket(String bucket) {
|
||||||
|
this.bucket = bucket;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getDiscard() {
|
||||||
|
return discard;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setDiscard(String discard) {
|
||||||
|
this.discard = discard;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getEdit() {
|
||||||
|
return edit;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setEdit(String edit) {
|
||||||
|
this.edit = edit;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getFiles() {
|
||||||
|
return files;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setFiles(String files) {
|
||||||
|
this.files = files;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getHtml() {
|
||||||
|
return html;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setHtml(String html) {
|
||||||
|
this.html = html;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getLatest_draft() {
|
||||||
|
return latest_draft;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setLatest_draft(String latest_draft) {
|
||||||
|
this.latest_draft = latest_draft;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getLatest_draft_html() {
|
||||||
|
return latest_draft_html;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setLatest_draft_html(String latest_draft_html) {
|
||||||
|
this.latest_draft_html = latest_draft_html;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getPublish() {
|
||||||
|
return publish;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setPublish(String publish) {
|
||||||
|
this.publish = publish;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getSelf() {
|
||||||
|
return self;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setSelf(String self) {
|
||||||
|
this.self = self;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,4 +1,153 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.oa.graph.dump.zenodo;
|
package eu.dnetlib.dhp.oa.graph.dump.zenodo;
|
||||||
|
|
||||||
public class Metadata {
|
import java.io.Serializable;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
public class Metadata implements Serializable {
|
||||||
|
|
||||||
|
private String access_right;
|
||||||
|
private List<Community> communities;
|
||||||
|
private List<Creator> creators;
|
||||||
|
private String description;
|
||||||
|
private String doi;
|
||||||
|
private List<Grant> grants;
|
||||||
|
private List<String> keywords;
|
||||||
|
private String language;
|
||||||
|
private String license;
|
||||||
|
private PrereserveDoi prereserve_doi;
|
||||||
|
private String publication_date;
|
||||||
|
private List<String> references;
|
||||||
|
private List<RelatedIdentifier> related_identifiers;
|
||||||
|
private String title;
|
||||||
|
private String upload_type;
|
||||||
|
private String version;
|
||||||
|
|
||||||
|
public String getUpload_type() {
|
||||||
|
return upload_type;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setUpload_type(String upload_type) {
|
||||||
|
this.upload_type = upload_type;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getVersion() {
|
||||||
|
return version;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setVersion(String version) {
|
||||||
|
this.version = version;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getAccess_right() {
|
||||||
|
return access_right;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setAccess_right(String access_right) {
|
||||||
|
this.access_right = access_right;
|
||||||
|
}
|
||||||
|
|
||||||
|
public List<Community> getCommunities() {
|
||||||
|
return communities;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setCommunities(List<Community> communities) {
|
||||||
|
this.communities = communities;
|
||||||
|
}
|
||||||
|
|
||||||
|
public List<Creator> getCreators() {
|
||||||
|
return creators;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setCreators(List<Creator> creators) {
|
||||||
|
this.creators = creators;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getDescription() {
|
||||||
|
return description;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setDescription(String description) {
|
||||||
|
this.description = description;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getDoi() {
|
||||||
|
return doi;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setDoi(String doi) {
|
||||||
|
this.doi = doi;
|
||||||
|
}
|
||||||
|
|
||||||
|
public List<Grant> getGrants() {
|
||||||
|
return grants;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setGrants(List<Grant> grants) {
|
||||||
|
this.grants = grants;
|
||||||
|
}
|
||||||
|
|
||||||
|
public List<String> getKeywords() {
|
||||||
|
return keywords;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setKeywords(List<String> keywords) {
|
||||||
|
this.keywords = keywords;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getLanguage() {
|
||||||
|
return language;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setLanguage(String language) {
|
||||||
|
this.language = language;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getLicense() {
|
||||||
|
return license;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setLicense(String license) {
|
||||||
|
this.license = license;
|
||||||
|
}
|
||||||
|
|
||||||
|
public PrereserveDoi getPrereserve_doi() {
|
||||||
|
return prereserve_doi;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setPrereserve_doi(PrereserveDoi prereserve_doi) {
|
||||||
|
this.prereserve_doi = prereserve_doi;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getPublication_date() {
|
||||||
|
return publication_date;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setPublication_date(String publication_date) {
|
||||||
|
this.publication_date = publication_date;
|
||||||
|
}
|
||||||
|
|
||||||
|
public List<String> getReferences() {
|
||||||
|
return references;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setReferences(List<String> references) {
|
||||||
|
this.references = references;
|
||||||
|
}
|
||||||
|
|
||||||
|
public List<RelatedIdentifier> getRelated_identifiers() {
|
||||||
|
return related_identifiers;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setRelated_identifiers(List<RelatedIdentifier> related_identifiers) {
|
||||||
|
this.related_identifiers = related_identifiers;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getTitle() {
|
||||||
|
return title;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setTitle(String title) {
|
||||||
|
this.title = title;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,4 +1,25 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.oa.graph.dump.zenodo;
|
package eu.dnetlib.dhp.oa.graph.dump.zenodo;
|
||||||
|
|
||||||
public class PrereserveDoi {
|
import java.io.Serializable;
|
||||||
|
|
||||||
|
public class PrereserveDoi implements Serializable {
|
||||||
|
private String doi;
|
||||||
|
private String recid;
|
||||||
|
|
||||||
|
public String getDoi() {
|
||||||
|
return doi;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setDoi(String doi) {
|
||||||
|
this.doi = doi;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getRecid() {
|
||||||
|
return recid;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setRecid(String recid) {
|
||||||
|
this.recid = recid;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,4 +1,43 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.oa.graph.dump.zenodo;
|
package eu.dnetlib.dhp.oa.graph.dump.zenodo;
|
||||||
|
|
||||||
public class RelatedIdentifier {
|
import java.io.Serializable;
|
||||||
|
|
||||||
|
public class RelatedIdentifier implements Serializable {
|
||||||
|
private String identifier;
|
||||||
|
private String relation;
|
||||||
|
private String resource_type;
|
||||||
|
private String scheme;
|
||||||
|
|
||||||
|
public String getIdentifier() {
|
||||||
|
return identifier;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setIdentifier(String identifier) {
|
||||||
|
this.identifier = identifier;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getRelation() {
|
||||||
|
return relation;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setRelation(String relation) {
|
||||||
|
this.relation = relation;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getResource_type() {
|
||||||
|
return resource_type;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setResource_type(String resource_type) {
|
||||||
|
this.resource_type = resource_type;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getScheme() {
|
||||||
|
return scheme;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setScheme(String scheme) {
|
||||||
|
this.scheme = scheme;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,34 +1,25 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.oa.graph.dump.zenodo;
|
package eu.dnetlib.dhp.oa.graph.dump.zenodo;
|
||||||
|
|
||||||
import java.io.Serializable;
|
import java.io.Serializable;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
public class ZenodoResponse implements Serializable {
|
public class ZenodoModel implements Serializable {
|
||||||
|
|
||||||
private String conceptdoi;
|
|
||||||
private String conceptrecid;
|
private String conceptrecid;
|
||||||
private String created;
|
private String created;
|
||||||
private String doi;
|
|
||||||
private String doi_url;
|
|
||||||
private List<File> files;
|
private List<File> files;
|
||||||
private String id;
|
private String id;
|
||||||
private Links links;
|
private Links links;
|
||||||
private Metadata metadata;
|
private Metadata metadata;
|
||||||
private String modifie;
|
private String modified;
|
||||||
private String owner;
|
private String owner;
|
||||||
private String record_id;
|
private String record_id;
|
||||||
private String state;
|
private String state;
|
||||||
private boolean submitted;
|
private boolean submitted;
|
||||||
private String title;
|
private String title;
|
||||||
|
|
||||||
public String getConceptdoi() {
|
|
||||||
return conceptdoi;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setConceptdoi(String conceptdoi) {
|
|
||||||
this.conceptdoi = conceptdoi;
|
|
||||||
}
|
|
||||||
|
|
||||||
public String getConceptrecid() {
|
public String getConceptrecid() {
|
||||||
return conceptrecid;
|
return conceptrecid;
|
||||||
}
|
}
|
||||||
|
@ -45,22 +36,6 @@ public class ZenodoResponse implements Serializable {
|
||||||
this.created = created;
|
this.created = created;
|
||||||
}
|
}
|
||||||
|
|
||||||
public String getDoi() {
|
|
||||||
return doi;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setDoi(String doi) {
|
|
||||||
this.doi = doi;
|
|
||||||
}
|
|
||||||
|
|
||||||
public String getDoi_url() {
|
|
||||||
return doi_url;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setDoi_url(String doi_url) {
|
|
||||||
this.doi_url = doi_url;
|
|
||||||
}
|
|
||||||
|
|
||||||
public List<File> getFiles() {
|
public List<File> getFiles() {
|
||||||
return files;
|
return files;
|
||||||
}
|
}
|
||||||
|
@ -93,12 +68,12 @@ public class ZenodoResponse implements Serializable {
|
||||||
this.metadata = metadata;
|
this.metadata = metadata;
|
||||||
}
|
}
|
||||||
|
|
||||||
public String getModifie() {
|
public String getModified() {
|
||||||
return modifie;
|
return modified;
|
||||||
}
|
}
|
||||||
|
|
||||||
public void setModifie(String modifie) {
|
public void setModified(String modified) {
|
||||||
this.modifie = modifie;
|
this.modified = modified;
|
||||||
}
|
}
|
||||||
|
|
||||||
public String getOwner() {
|
public String getOwner() {
|
||||||
|
|
Loading…
Reference in New Issue