fn, String msg) {
+ try {
+ return fn.get();
+ } catch (Exception e) {
+ throw new RuntimeException(msg, e);
+ }
+ }
}
diff --git a/dhp-common/src/main/java/eu/dnetlib/dhp/model/mdstore/MetadataRecord.java b/dhp-common/src/main/java/eu/dnetlib/dhp/model/mdstore/MetadataRecord.java
index 56d7217ff..ce65e710f 100644
--- a/dhp-common/src/main/java/eu/dnetlib/dhp/model/mdstore/MetadataRecord.java
+++ b/dhp-common/src/main/java/eu/dnetlib/dhp/model/mdstore/MetadataRecord.java
@@ -1,120 +1,121 @@
+
package eu.dnetlib.dhp.model.mdstore;
-import eu.dnetlib.dhp.utils.DHPUtils;
import java.io.Serializable;
+import eu.dnetlib.dhp.utils.DHPUtils;
+
/** This class models a record inside the new Metadata store collection on HDFS * */
public class MetadataRecord implements Serializable {
- /** The D-Net Identifier associated to the record */
- private String id;
+ /** The D-Net Identifier associated to the record */
+ private String id;
- /** The original Identifier of the record */
- private String originalId;
+ /** The original Identifier of the record */
+ private String originalId;
- /** The encoding of the record, should be JSON or XML */
- private String encoding;
+ /** The encoding of the record, should be JSON or XML */
+ private String encoding;
- /**
- * The information about the provenance of the record see @{@link Provenance} for the model of
- * this information
- */
- private Provenance provenance;
+ /**
+ * The information about the provenance of the record see @{@link Provenance} for the model of this information
+ */
+ private Provenance provenance;
- /** The content of the metadata */
- private String body;
+ /** The content of the metadata */
+ private String body;
- /** the date when the record has been stored */
- private long dateOfCollection;
+ /** the date when the record has been stored */
+ private long dateOfCollection;
- /** the date when the record has been stored */
- private long dateOfTransformation;
+ /** the date when the record has been stored */
+ private long dateOfTransformation;
- public MetadataRecord() {
- this.dateOfCollection = System.currentTimeMillis();
- }
+ public MetadataRecord() {
+ this.dateOfCollection = System.currentTimeMillis();
+ }
- public MetadataRecord(
- String originalId,
- String encoding,
- Provenance provenance,
- String body,
- long dateOfCollection) {
+ public MetadataRecord(
+ String originalId,
+ String encoding,
+ Provenance provenance,
+ String body,
+ long dateOfCollection) {
- this.originalId = originalId;
- this.encoding = encoding;
- this.provenance = provenance;
- this.body = body;
- this.dateOfCollection = dateOfCollection;
- this.id = DHPUtils.generateIdentifier(originalId, this.provenance.getNsPrefix());
- }
+ this.originalId = originalId;
+ this.encoding = encoding;
+ this.provenance = provenance;
+ this.body = body;
+ this.dateOfCollection = dateOfCollection;
+ this.id = DHPUtils.generateIdentifier(originalId, this.provenance.getNsPrefix());
+ }
- public String getId() {
- return id;
- }
+ public String getId() {
+ return id;
+ }
- public void setId(String id) {
- this.id = id;
- }
+ public void setId(String id) {
+ this.id = id;
+ }
- public String getOriginalId() {
- return originalId;
- }
+ public String getOriginalId() {
+ return originalId;
+ }
- public void setOriginalId(String originalId) {
- this.originalId = originalId;
- }
+ public void setOriginalId(String originalId) {
+ this.originalId = originalId;
+ }
- public String getEncoding() {
- return encoding;
- }
+ public String getEncoding() {
+ return encoding;
+ }
- public void setEncoding(String encoding) {
- this.encoding = encoding;
- }
+ public void setEncoding(String encoding) {
+ this.encoding = encoding;
+ }
- public Provenance getProvenance() {
- return provenance;
- }
+ public Provenance getProvenance() {
+ return provenance;
+ }
- public void setProvenance(Provenance provenance) {
- this.provenance = provenance;
- }
+ public void setProvenance(Provenance provenance) {
+ this.provenance = provenance;
+ }
- public String getBody() {
- return body;
- }
+ public String getBody() {
+ return body;
+ }
- public void setBody(String body) {
- this.body = body;
- }
+ public void setBody(String body) {
+ this.body = body;
+ }
- public long getDateOfCollection() {
- return dateOfCollection;
- }
+ public long getDateOfCollection() {
+ return dateOfCollection;
+ }
- public void setDateOfCollection(long dateOfCollection) {
- this.dateOfCollection = dateOfCollection;
- }
+ public void setDateOfCollection(long dateOfCollection) {
+ this.dateOfCollection = dateOfCollection;
+ }
- public long getDateOfTransformation() {
- return dateOfTransformation;
- }
+ public long getDateOfTransformation() {
+ return dateOfTransformation;
+ }
- public void setDateOfTransformation(long dateOfTransformation) {
- this.dateOfTransformation = dateOfTransformation;
- }
+ public void setDateOfTransformation(long dateOfTransformation) {
+ this.dateOfTransformation = dateOfTransformation;
+ }
- @Override
- public boolean equals(Object o) {
- if (!(o instanceof MetadataRecord)) {
- return false;
- }
- return ((MetadataRecord) o).getId().equalsIgnoreCase(id);
- }
+ @Override
+ public boolean equals(Object o) {
+ if (!(o instanceof MetadataRecord)) {
+ return false;
+ }
+ return ((MetadataRecord) o).getId().equalsIgnoreCase(id);
+ }
- @Override
- public int hashCode() {
- return id.hashCode();
- }
+ @Override
+ public int hashCode() {
+ return id.hashCode();
+ }
}
diff --git a/dhp-common/src/main/java/eu/dnetlib/dhp/model/mdstore/Provenance.java b/dhp-common/src/main/java/eu/dnetlib/dhp/model/mdstore/Provenance.java
index 90897c5c4..556535022 100644
--- a/dhp-common/src/main/java/eu/dnetlib/dhp/model/mdstore/Provenance.java
+++ b/dhp-common/src/main/java/eu/dnetlib/dhp/model/mdstore/Provenance.java
@@ -1,49 +1,52 @@
+
package eu.dnetlib.dhp.model.mdstore;
import java.io.Serializable;
/**
* @author Sandro La Bruzzo
- * Provenace class models the provenance of the record in the metadataStore It contains the
- * identifier and the name of the datasource that gives the record
+ *
+ * Provenace class models the provenance of the record in the metadataStore It contains the identifier and the
+ * name of the datasource that gives the record
*/
public class Provenance implements Serializable {
- private String datasourceId;
+ private String datasourceId;
- private String datasourceName;
+ private String datasourceName;
- private String nsPrefix;
+ private String nsPrefix;
- public Provenance() {}
+ public Provenance() {
+ }
- public Provenance(String datasourceId, String datasourceName, String nsPrefix) {
- this.datasourceId = datasourceId;
- this.datasourceName = datasourceName;
- this.nsPrefix = nsPrefix;
- }
+ public Provenance(String datasourceId, String datasourceName, String nsPrefix) {
+ this.datasourceId = datasourceId;
+ this.datasourceName = datasourceName;
+ this.nsPrefix = nsPrefix;
+ }
- public String getDatasourceId() {
- return datasourceId;
- }
+ public String getDatasourceId() {
+ return datasourceId;
+ }
- public void setDatasourceId(String datasourceId) {
- this.datasourceId = datasourceId;
- }
+ public void setDatasourceId(String datasourceId) {
+ this.datasourceId = datasourceId;
+ }
- public String getDatasourceName() {
- return datasourceName;
- }
+ public String getDatasourceName() {
+ return datasourceName;
+ }
- public void setDatasourceName(String datasourceName) {
- this.datasourceName = datasourceName;
- }
+ public void setDatasourceName(String datasourceName) {
+ this.datasourceName = datasourceName;
+ }
- public String getNsPrefix() {
- return nsPrefix;
- }
+ public String getNsPrefix() {
+ return nsPrefix;
+ }
- public void setNsPrefix(String nsPrefix) {
- this.nsPrefix = nsPrefix;
- }
+ public void setNsPrefix(String nsPrefix) {
+ this.nsPrefix = nsPrefix;
+ }
}
diff --git a/dhp-common/src/main/java/eu/dnetlib/dhp/parser/utility/VtdException.java b/dhp-common/src/main/java/eu/dnetlib/dhp/parser/utility/VtdException.java
index 3576dc92b..22945309c 100644
--- a/dhp-common/src/main/java/eu/dnetlib/dhp/parser/utility/VtdException.java
+++ b/dhp-common/src/main/java/eu/dnetlib/dhp/parser/utility/VtdException.java
@@ -1,12 +1,13 @@
+
package eu.dnetlib.dhp.parser.utility;
public class VtdException extends Exception {
- public VtdException(final Exception e) {
- super(e);
- }
+ public VtdException(final Exception e) {
+ super(e);
+ }
- public VtdException(final Throwable e) {
- super(e);
- }
+ public VtdException(final Throwable e) {
+ super(e);
+ }
}
diff --git a/dhp-common/src/main/java/eu/dnetlib/dhp/parser/utility/VtdUtilityParser.java b/dhp-common/src/main/java/eu/dnetlib/dhp/parser/utility/VtdUtilityParser.java
index a12662d1f..9ac0a0bf7 100644
--- a/dhp-common/src/main/java/eu/dnetlib/dhp/parser/utility/VtdUtilityParser.java
+++ b/dhp-common/src/main/java/eu/dnetlib/dhp/parser/utility/VtdUtilityParser.java
@@ -1,105 +1,110 @@
+
package eu.dnetlib.dhp.parser.utility;
-import com.ximpleware.AutoPilot;
-import com.ximpleware.VTDNav;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
+import com.ximpleware.AutoPilot;
+import com.ximpleware.VTDNav;
+
/** Created by sandro on 9/29/16. */
public class VtdUtilityParser {
- public static List getTextValuesWithAttributes(
- final AutoPilot ap, final VTDNav vn, final String xpath, final List attributes)
- throws VtdException {
- final List results = new ArrayList<>();
- try {
- ap.selectXPath(xpath);
+ public static List getTextValuesWithAttributes(
+ final AutoPilot ap, final VTDNav vn, final String xpath, final List attributes)
+ throws VtdException {
+ final List results = new ArrayList<>();
+ try {
+ ap.selectXPath(xpath);
- while (ap.evalXPath() != -1) {
- final Node currentNode = new Node();
- int t = vn.getText();
- if (t >= 0) {
- currentNode.setTextValue(vn.toNormalizedString(t));
- }
- currentNode.setAttributes(getAttributes(vn, attributes));
- results.add(currentNode);
- }
- return results;
- } catch (Exception e) {
- throw new VtdException(e);
- }
- }
+ while (ap.evalXPath() != -1) {
+ final Node currentNode = new Node();
+ int t = vn.getText();
+ if (t >= 0) {
+ currentNode.setTextValue(vn.toNormalizedString(t));
+ }
+ currentNode.setAttributes(getAttributes(vn, attributes));
+ results.add(currentNode);
+ }
+ return results;
+ } catch (Exception e) {
+ throw new VtdException(e);
+ }
+ }
- private static Map getAttributes(final VTDNav vn, final List attributes) {
- final Map currentAttributes = new HashMap<>();
- if (attributes != null) {
+ private static Map getAttributes(final VTDNav vn, final List attributes) {
+ final Map currentAttributes = new HashMap<>();
+ if (attributes != null) {
- attributes.forEach(
- attributeKey -> {
- try {
- int attr = vn.getAttrVal(attributeKey);
- if (attr > -1) {
- currentAttributes.put(attributeKey, vn.toNormalizedString(attr));
- }
- } catch (Throwable e) {
- throw new RuntimeException(e);
- }
- });
- }
- return currentAttributes;
- }
+ attributes
+ .forEach(
+ attributeKey -> {
+ try {
+ int attr = vn.getAttrVal(attributeKey);
+ if (attr > -1) {
+ currentAttributes.put(attributeKey, vn.toNormalizedString(attr));
+ }
+ } catch (Throwable e) {
+ throw new RuntimeException(e);
+ }
+ });
+ }
+ return currentAttributes;
+ }
- public static List getTextValue(final AutoPilot ap, final VTDNav vn, final String xpath)
- throws VtdException {
- List results = new ArrayList<>();
- try {
- ap.selectXPath(xpath);
- while (ap.evalXPath() != -1) {
- int t = vn.getText();
- if (t > -1) results.add(vn.toNormalizedString(t));
- }
- return results;
- } catch (Exception e) {
- throw new VtdException(e);
- }
- }
+ public static List getTextValue(final AutoPilot ap, final VTDNav vn, final String xpath)
+ throws VtdException {
+ List results = new ArrayList<>();
+ try {
+ ap.selectXPath(xpath);
+ while (ap.evalXPath() != -1) {
+ int t = vn.getText();
+ if (t > -1)
+ results.add(vn.toNormalizedString(t));
+ }
+ return results;
+ } catch (Exception e) {
+ throw new VtdException(e);
+ }
+ }
- public static String getSingleValue(final AutoPilot ap, final VTDNav nav, final String xpath)
- throws VtdException {
- try {
- ap.selectXPath(xpath);
- while (ap.evalXPath() != -1) {
- int it = nav.getText();
- if (it > -1) return nav.toNormalizedString(it);
- }
- return null;
- } catch (Exception e) {
- throw new VtdException(e);
- }
- }
+ public static String getSingleValue(final AutoPilot ap, final VTDNav nav, final String xpath)
+ throws VtdException {
+ try {
+ ap.selectXPath(xpath);
+ while (ap.evalXPath() != -1) {
+ int it = nav.getText();
+ if (it > -1)
+ return nav.toNormalizedString(it);
+ }
+ return null;
+ } catch (Exception e) {
+ throw new VtdException(e);
+ }
+ }
- public static class Node {
+ public static class Node {
- private String textValue;
+ private String textValue;
- private Map attributes;
+ private Map attributes;
- public String getTextValue() {
- return textValue;
- }
+ public String getTextValue() {
+ return textValue;
+ }
- public void setTextValue(final String textValue) {
- this.textValue = textValue;
- }
+ public void setTextValue(final String textValue) {
+ this.textValue = textValue;
+ }
- public Map getAttributes() {
- return attributes;
- }
+ public Map getAttributes() {
+ return attributes;
+ }
- public void setAttributes(final Map attributes) {
- this.attributes = attributes;
- }
- }
+ public void setAttributes(final Map attributes) {
+ this.attributes = attributes;
+ }
+ }
}
diff --git a/dhp-common/src/main/java/eu/dnetlib/dhp/utils/DHPUtils.java b/dhp-common/src/main/java/eu/dnetlib/dhp/utils/DHPUtils.java
index f5800cdaf..dfbaf3a6c 100644
--- a/dhp-common/src/main/java/eu/dnetlib/dhp/utils/DHPUtils.java
+++ b/dhp-common/src/main/java/eu/dnetlib/dhp/utils/DHPUtils.java
@@ -1,70 +1,75 @@
+
package eu.dnetlib.dhp.utils;
-import com.jayway.jsonpath.JsonPath;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.nio.charset.StandardCharsets;
import java.security.MessageDigest;
import java.util.zip.GZIPInputStream;
import java.util.zip.GZIPOutputStream;
-import net.minidev.json.JSONArray;
+
import org.apache.commons.codec.binary.Base64;
import org.apache.commons.codec.binary.Base64OutputStream;
import org.apache.commons.codec.binary.Hex;
+import com.jayway.jsonpath.JsonPath;
+
+import net.minidev.json.JSONArray;
+
public class DHPUtils {
- public static String md5(final String s) {
- try {
- final MessageDigest md = MessageDigest.getInstance("MD5");
- md.update(s.getBytes("UTF-8"));
- return new String(Hex.encodeHex(md.digest()));
- } catch (final Exception e) {
- System.err.println("Error creating id");
- return null;
- }
- }
+ public static String md5(final String s) {
+ try {
+ final MessageDigest md = MessageDigest.getInstance("MD5");
+ md.update(s.getBytes(StandardCharsets.UTF_8));
+ return new String(Hex.encodeHex(md.digest()));
+ } catch (final Exception e) {
+ System.err.println("Error creating id");
+ return null;
+ }
+ }
- public static String generateIdentifier(final String originalId, final String nsPrefix) {
- return String.format("%s::%s", nsPrefix, DHPUtils.md5(originalId));
- }
+ public static String generateIdentifier(final String originalId, final String nsPrefix) {
+ return String.format("%s::%s", nsPrefix, DHPUtils.md5(originalId));
+ }
- public static String compressString(final String input) {
- try (ByteArrayOutputStream out = new ByteArrayOutputStream();
- Base64OutputStream b64os = new Base64OutputStream(out)) {
- GZIPOutputStream gzip = new GZIPOutputStream(b64os);
- gzip.write(input.getBytes(StandardCharsets.UTF_8));
- gzip.close();
- return out.toString();
- } catch (Throwable e) {
- return null;
- }
- }
+ public static String compressString(final String input) {
+ try (ByteArrayOutputStream out = new ByteArrayOutputStream();
+ Base64OutputStream b64os = new Base64OutputStream(out)) {
+ GZIPOutputStream gzip = new GZIPOutputStream(b64os);
+ gzip.write(input.getBytes(StandardCharsets.UTF_8));
+ gzip.close();
+ return out.toString();
+ } catch (Throwable e) {
+ return null;
+ }
+ }
- public static String decompressString(final String input) {
- byte[] byteArray = Base64.decodeBase64(input.getBytes());
- int len;
- try (GZIPInputStream gis = new GZIPInputStream(new ByteArrayInputStream((byteArray)));
- ByteArrayOutputStream bos = new ByteArrayOutputStream(byteArray.length)) {
- byte[] buffer = new byte[1024];
- while ((len = gis.read(buffer)) != -1) {
- bos.write(buffer, 0, len);
- }
- return bos.toString();
- } catch (Exception e) {
- return null;
- }
- }
+ public static String decompressString(final String input) {
+ byte[] byteArray = Base64.decodeBase64(input.getBytes());
+ int len;
+ try (GZIPInputStream gis = new GZIPInputStream(new ByteArrayInputStream((byteArray)));
+ ByteArrayOutputStream bos = new ByteArrayOutputStream(byteArray.length)) {
+ byte[] buffer = new byte[1024];
+ while ((len = gis.read(buffer)) != -1) {
+ bos.write(buffer, 0, len);
+ }
+ return bos.toString();
+ } catch (Exception e) {
+ return null;
+ }
+ }
- public static String getJPathString(final String jsonPath, final String json) {
- try {
- Object o = JsonPath.read(json, jsonPath);
- if (o instanceof String) return (String) o;
- if (o instanceof JSONArray && ((JSONArray) o).size() > 0)
- return (String) ((JSONArray) o).get(0);
- return o.toString();
- } catch (Exception e) {
- return "";
- }
- }
+ public static String getJPathString(final String jsonPath, final String json) {
+ try {
+ Object o = JsonPath.read(json, jsonPath);
+ if (o instanceof String)
+ return (String) o;
+ if (o instanceof JSONArray && ((JSONArray) o).size() > 0)
+ return (String) ((JSONArray) o).get(0);
+ return o.toString();
+ } catch (Exception e) {
+ return "";
+ }
+ }
}
diff --git a/dhp-common/src/main/java/eu/dnetlib/dhp/utils/ISLookupClientFactory.java b/dhp-common/src/main/java/eu/dnetlib/dhp/utils/ISLookupClientFactory.java
index b6f3f111a..97fe4b9d8 100644
--- a/dhp-common/src/main/java/eu/dnetlib/dhp/utils/ISLookupClientFactory.java
+++ b/dhp-common/src/main/java/eu/dnetlib/dhp/utils/ISLookupClientFactory.java
@@ -1,24 +1,26 @@
+
package eu.dnetlib.dhp.utils;
-import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpService;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.cxf.jaxws.JaxWsProxyFactoryBean;
+import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpService;
+
public class ISLookupClientFactory {
- private static final Log log = LogFactory.getLog(ISLookupClientFactory.class);
+ private static final Log log = LogFactory.getLog(ISLookupClientFactory.class);
- public static ISLookUpService getLookUpService(final String isLookupUrl) {
- return getServiceStub(ISLookUpService.class, isLookupUrl);
- }
+ public static ISLookUpService getLookUpService(final String isLookupUrl) {
+ return getServiceStub(ISLookUpService.class, isLookupUrl);
+ }
- @SuppressWarnings("unchecked")
- private static T getServiceStub(final Class clazz, final String endpoint) {
- log.info(String.format("creating %s stub from %s", clazz.getName(), endpoint));
- final JaxWsProxyFactoryBean jaxWsProxyFactory = new JaxWsProxyFactoryBean();
- jaxWsProxyFactory.setServiceClass(clazz);
- jaxWsProxyFactory.setAddress(endpoint);
- return (T) jaxWsProxyFactory.create();
- }
+ @SuppressWarnings("unchecked")
+ private static T getServiceStub(final Class clazz, final String endpoint) {
+ log.info(String.format("creating %s stub from %s", clazz.getName(), endpoint));
+ final JaxWsProxyFactoryBean jaxWsProxyFactory = new JaxWsProxyFactoryBean();
+ jaxWsProxyFactory.setServiceClass(clazz);
+ jaxWsProxyFactory.setAddress(endpoint);
+ return (T) jaxWsProxyFactory.create();
+ }
}
diff --git a/dhp-common/src/main/java/eu/dnetlib/dhp/utils/saxon/AbstractExtensionFunction.java b/dhp-common/src/main/java/eu/dnetlib/dhp/utils/saxon/AbstractExtensionFunction.java
index 57bd130cb..9b00b908c 100644
--- a/dhp-common/src/main/java/eu/dnetlib/dhp/utils/saxon/AbstractExtensionFunction.java
+++ b/dhp-common/src/main/java/eu/dnetlib/dhp/utils/saxon/AbstractExtensionFunction.java
@@ -1,3 +1,4 @@
+
package eu.dnetlib.dhp.utils.saxon;
import net.sf.saxon.expr.XPathContext;
@@ -9,25 +10,24 @@ import net.sf.saxon.trans.XPathException;
public abstract class AbstractExtensionFunction extends ExtensionFunctionDefinition {
- public static String DEFAULT_SAXON_EXT_NS_URI =
- "http://www.d-net.research-infrastructures.eu/saxon-extension";
+ public static String DEFAULT_SAXON_EXT_NS_URI = "http://www.d-net.research-infrastructures.eu/saxon-extension";
- public abstract String getName();
+ public abstract String getName();
- public abstract Sequence doCall(XPathContext context, Sequence[] arguments) throws XPathException;
+ public abstract Sequence doCall(XPathContext context, Sequence[] arguments) throws XPathException;
- @Override
- public StructuredQName getFunctionQName() {
- return new StructuredQName("dnet", DEFAULT_SAXON_EXT_NS_URI, getName());
- }
+ @Override
+ public StructuredQName getFunctionQName() {
+ return new StructuredQName("dnet", DEFAULT_SAXON_EXT_NS_URI, getName());
+ }
- @Override
- public ExtensionFunctionCall makeCallExpression() {
- return new ExtensionFunctionCall() {
- @Override
- public Sequence call(XPathContext context, Sequence[] arguments) throws XPathException {
- return doCall(context, arguments);
- }
- };
- }
+ @Override
+ public ExtensionFunctionCall makeCallExpression() {
+ return new ExtensionFunctionCall() {
+ @Override
+ public Sequence call(XPathContext context, Sequence[] arguments) throws XPathException {
+ return doCall(context, arguments);
+ }
+ };
+ }
}
diff --git a/dhp-common/src/main/java/eu/dnetlib/dhp/utils/saxon/ExtractYear.java b/dhp-common/src/main/java/eu/dnetlib/dhp/utils/saxon/ExtractYear.java
index 38ecb6377..c7e311b02 100644
--- a/dhp-common/src/main/java/eu/dnetlib/dhp/utils/saxon/ExtractYear.java
+++ b/dhp-common/src/main/java/eu/dnetlib/dhp/utils/saxon/ExtractYear.java
@@ -1,9 +1,11 @@
+
package eu.dnetlib.dhp.utils.saxon;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Calendar;
import java.util.GregorianCalendar;
+
import net.sf.saxon.expr.XPathContext;
import net.sf.saxon.om.Item;
import net.sf.saxon.om.Sequence;
@@ -13,55 +15,59 @@ import net.sf.saxon.value.StringValue;
public class ExtractYear extends AbstractExtensionFunction {
- private static final String[] dateFormats = {"yyyy-MM-dd", "yyyy/MM/dd"};
+ private static final String[] dateFormats = {
+ "yyyy-MM-dd", "yyyy/MM/dd"
+ };
- @Override
- public String getName() {
- return "extractYear";
- }
+ @Override
+ public String getName() {
+ return "extractYear";
+ }
- @Override
- public Sequence doCall(XPathContext context, Sequence[] arguments) throws XPathException {
- if (arguments == null | arguments.length == 0) {
- return new StringValue("");
- }
- final Item item = arguments[0].head();
- if (item == null) {
- return new StringValue("");
- }
- return new StringValue(_year(item.getStringValue()));
- }
+ @Override
+ public Sequence doCall(XPathContext context, Sequence[] arguments) throws XPathException {
+ if (arguments == null | arguments.length == 0) {
+ return new StringValue("");
+ }
+ final Item item = arguments[0].head();
+ if (item == null) {
+ return new StringValue("");
+ }
+ return new StringValue(_year(item.getStringValue()));
+ }
- @Override
- public int getMinimumNumberOfArguments() {
- return 0;
- }
+ @Override
+ public int getMinimumNumberOfArguments() {
+ return 0;
+ }
- @Override
- public int getMaximumNumberOfArguments() {
- return 1;
- }
+ @Override
+ public int getMaximumNumberOfArguments() {
+ return 1;
+ }
- @Override
- public SequenceType[] getArgumentTypes() {
- return new SequenceType[] {SequenceType.OPTIONAL_ITEM};
- }
+ @Override
+ public SequenceType[] getArgumentTypes() {
+ return new SequenceType[] {
+ SequenceType.OPTIONAL_ITEM
+ };
+ }
- @Override
- public SequenceType getResultType(SequenceType[] suppliedArgumentTypes) {
- return SequenceType.SINGLE_STRING;
- }
+ @Override
+ public SequenceType getResultType(SequenceType[] suppliedArgumentTypes) {
+ return SequenceType.SINGLE_STRING;
+ }
- private String _year(String s) {
- Calendar c = new GregorianCalendar();
- for (String format : dateFormats) {
- try {
- c.setTime(new SimpleDateFormat(format).parse(s));
- String year = String.valueOf(c.get(Calendar.YEAR));
- return year;
- } catch (ParseException e) {
- }
- }
- return "";
- }
+ private String _year(String s) {
+ Calendar c = new GregorianCalendar();
+ for (String format : dateFormats) {
+ try {
+ c.setTime(new SimpleDateFormat(format).parse(s));
+ String year = String.valueOf(c.get(Calendar.YEAR));
+ return year;
+ } catch (ParseException e) {
+ }
+ }
+ return "";
+ }
}
diff --git a/dhp-common/src/main/java/eu/dnetlib/dhp/utils/saxon/NormalizeDate.java b/dhp-common/src/main/java/eu/dnetlib/dhp/utils/saxon/NormalizeDate.java
index def4fdfc7..9fb60e145 100644
--- a/dhp-common/src/main/java/eu/dnetlib/dhp/utils/saxon/NormalizeDate.java
+++ b/dhp-common/src/main/java/eu/dnetlib/dhp/utils/saxon/NormalizeDate.java
@@ -1,8 +1,10 @@
+
package eu.dnetlib.dhp.utils.saxon;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Date;
+
import net.sf.saxon.expr.XPathContext;
import net.sf.saxon.om.Sequence;
import net.sf.saxon.trans.XPathException;
@@ -11,57 +13,59 @@ import net.sf.saxon.value.StringValue;
public class NormalizeDate extends AbstractExtensionFunction {
- private static final String[] normalizeDateFormats = {
- "yyyy-MM-dd'T'hh:mm:ss", "yyyy-MM-dd", "yyyy/MM/dd", "yyyy"
- };
+ private static final String[] normalizeDateFormats = {
+ "yyyy-MM-dd'T'hh:mm:ss", "yyyy-MM-dd", "yyyy/MM/dd", "yyyy"
+ };
- private static final String normalizeOutFormat = new String("yyyy-MM-dd'T'hh:mm:ss'Z'");
+ private static final String normalizeOutFormat = "yyyy-MM-dd'T'hh:mm:ss'Z'";
- @Override
- public String getName() {
- return "normalizeDate";
- }
+ @Override
+ public String getName() {
+ return "normalizeDate";
+ }
- @Override
- public Sequence doCall(XPathContext context, Sequence[] arguments) throws XPathException {
- if (arguments == null | arguments.length == 0) {
- return new StringValue("");
- }
- String s = arguments[0].head().getStringValue();
- return new StringValue(_year(s));
- }
+ @Override
+ public Sequence doCall(XPathContext context, Sequence[] arguments) throws XPathException {
+ if (arguments == null | arguments.length == 0) {
+ return new StringValue("");
+ }
+ String s = arguments[0].head().getStringValue();
+ return new StringValue(_year(s));
+ }
- @Override
- public int getMinimumNumberOfArguments() {
- return 0;
- }
+ @Override
+ public int getMinimumNumberOfArguments() {
+ return 0;
+ }
- @Override
- public int getMaximumNumberOfArguments() {
- return 1;
- }
+ @Override
+ public int getMaximumNumberOfArguments() {
+ return 1;
+ }
- @Override
- public SequenceType[] getArgumentTypes() {
- return new SequenceType[] {SequenceType.OPTIONAL_ITEM};
- }
+ @Override
+ public SequenceType[] getArgumentTypes() {
+ return new SequenceType[] {
+ SequenceType.OPTIONAL_ITEM
+ };
+ }
- @Override
- public SequenceType getResultType(SequenceType[] suppliedArgumentTypes) {
- return SequenceType.SINGLE_STRING;
- }
+ @Override
+ public SequenceType getResultType(SequenceType[] suppliedArgumentTypes) {
+ return SequenceType.SINGLE_STRING;
+ }
- private String _year(String s) {
- final String date = s != null ? s.trim() : "";
+ private String _year(String s) {
+ final String date = s != null ? s.trim() : "";
- for (String format : normalizeDateFormats) {
- try {
- Date parse = new SimpleDateFormat(format).parse(date);
- String res = new SimpleDateFormat(normalizeOutFormat).format(parse);
- return res;
- } catch (ParseException e) {
- }
- }
- return "";
- }
+ for (String format : normalizeDateFormats) {
+ try {
+ Date parse = new SimpleDateFormat(format).parse(date);
+ String res = new SimpleDateFormat(normalizeOutFormat).format(parse);
+ return res;
+ } catch (ParseException e) {
+ }
+ }
+ return "";
+ }
}
diff --git a/dhp-common/src/main/java/eu/dnetlib/dhp/utils/saxon/PickFirst.java b/dhp-common/src/main/java/eu/dnetlib/dhp/utils/saxon/PickFirst.java
index 73159c617..46ecafd0a 100644
--- a/dhp-common/src/main/java/eu/dnetlib/dhp/utils/saxon/PickFirst.java
+++ b/dhp-common/src/main/java/eu/dnetlib/dhp/utils/saxon/PickFirst.java
@@ -1,59 +1,63 @@
+
package eu.dnetlib.dhp.utils.saxon;
+import org.apache.commons.lang3.StringUtils;
+
import net.sf.saxon.expr.XPathContext;
import net.sf.saxon.om.Item;
import net.sf.saxon.om.Sequence;
import net.sf.saxon.trans.XPathException;
import net.sf.saxon.value.SequenceType;
import net.sf.saxon.value.StringValue;
-import org.apache.commons.lang3.StringUtils;
public class PickFirst extends AbstractExtensionFunction {
- @Override
- public String getName() {
- return "pickFirst";
- }
+ @Override
+ public String getName() {
+ return "pickFirst";
+ }
- @Override
- public Sequence doCall(XPathContext context, Sequence[] arguments) throws XPathException {
- if (arguments == null | arguments.length == 0) {
- return new StringValue("");
- }
+ @Override
+ public Sequence doCall(XPathContext context, Sequence[] arguments) throws XPathException {
+ if (arguments == null | arguments.length == 0) {
+ return new StringValue("");
+ }
- final String s1 = getValue(arguments[0]);
- final String s2 = getValue(arguments[1]);
+ final String s1 = getValue(arguments[0]);
+ final String s2 = getValue(arguments[1]);
- return new StringValue(StringUtils.isNotBlank(s1) ? s1 : StringUtils.isNotBlank(s2) ? s2 : "");
- }
+ return new StringValue(StringUtils.isNotBlank(s1) ? s1 : StringUtils.isNotBlank(s2) ? s2 : "");
+ }
- private String getValue(final Sequence arg) throws XPathException {
- if (arg != null) {
- final Item item = arg.head();
- if (item != null) {
- return item.getStringValue();
- }
- }
- return "";
- }
+ private String getValue(final Sequence arg) throws XPathException {
+ if (arg != null) {
+ final Item item = arg.head();
+ if (item != null) {
+ return item.getStringValue();
+ }
+ }
+ return "";
+ }
- @Override
- public int getMinimumNumberOfArguments() {
- return 0;
- }
+ @Override
+ public int getMinimumNumberOfArguments() {
+ return 0;
+ }
- @Override
- public int getMaximumNumberOfArguments() {
- return 2;
- }
+ @Override
+ public int getMaximumNumberOfArguments() {
+ return 2;
+ }
- @Override
- public SequenceType[] getArgumentTypes() {
- return new SequenceType[] {SequenceType.OPTIONAL_ITEM};
- }
+ @Override
+ public SequenceType[] getArgumentTypes() {
+ return new SequenceType[] {
+ SequenceType.OPTIONAL_ITEM
+ };
+ }
- @Override
- public SequenceType getResultType(SequenceType[] suppliedArgumentTypes) {
- return SequenceType.SINGLE_STRING;
- }
+ @Override
+ public SequenceType getResultType(SequenceType[] suppliedArgumentTypes) {
+ return SequenceType.SINGLE_STRING;
+ }
}
diff --git a/dhp-common/src/main/java/eu/dnetlib/dhp/utils/saxon/SaxonTransformerFactory.java b/dhp-common/src/main/java/eu/dnetlib/dhp/utils/saxon/SaxonTransformerFactory.java
index 18ce51887..b85d866f1 100644
--- a/dhp-common/src/main/java/eu/dnetlib/dhp/utils/saxon/SaxonTransformerFactory.java
+++ b/dhp-common/src/main/java/eu/dnetlib/dhp/utils/saxon/SaxonTransformerFactory.java
@@ -1,29 +1,32 @@
+
package eu.dnetlib.dhp.utils.saxon;
import java.io.StringReader;
+
import javax.xml.transform.Transformer;
import javax.xml.transform.TransformerException;
import javax.xml.transform.stream.StreamSource;
+
import net.sf.saxon.Configuration;
import net.sf.saxon.TransformerFactoryImpl;
public class SaxonTransformerFactory {
- /**
- * Creates the index record transformer from the given XSLT
- *
- * @param xslt
- * @return
- * @throws TransformerException
- */
- public static Transformer newInstance(final String xslt) throws TransformerException {
+ /**
+ * Creates the index record transformer from the given XSLT
+ *
+ * @param xslt
+ * @return
+ * @throws TransformerException
+ */
+ public static Transformer newInstance(final String xslt) throws TransformerException {
- final TransformerFactoryImpl factory = new TransformerFactoryImpl();
- final Configuration conf = factory.getConfiguration();
- conf.registerExtensionFunction(new ExtractYear());
- conf.registerExtensionFunction(new NormalizeDate());
- conf.registerExtensionFunction(new PickFirst());
+ final TransformerFactoryImpl factory = new TransformerFactoryImpl();
+ final Configuration conf = factory.getConfiguration();
+ conf.registerExtensionFunction(new ExtractYear());
+ conf.registerExtensionFunction(new NormalizeDate());
+ conf.registerExtensionFunction(new PickFirst());
- return factory.newTransformer(new StreamSource(new StringReader(xslt)));
- }
+ return factory.newTransformer(new StreamSource(new StringReader(xslt)));
+ }
}
diff --git a/dhp-common/src/main/java/eu/dnetlib/message/Message.java b/dhp-common/src/main/java/eu/dnetlib/message/Message.java
index b62afb19a..fc1c38291 100644
--- a/dhp-common/src/main/java/eu/dnetlib/message/Message.java
+++ b/dhp-common/src/main/java/eu/dnetlib/message/Message.java
@@ -1,73 +1,76 @@
+
package eu.dnetlib.message;
-import com.fasterxml.jackson.core.JsonProcessingException;
-import com.fasterxml.jackson.databind.ObjectMapper;
import java.io.IOException;
import java.util.Map;
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.ObjectMapper;
+
public class Message {
- private String workflowId;
+ private String workflowId;
- private String jobName;
+ private String jobName;
- private MessageType type;
+ private MessageType type;
- private Map body;
+ private Map body;
- public static Message fromJson(final String json) throws IOException {
- final ObjectMapper jsonMapper = new ObjectMapper();
- return jsonMapper.readValue(json, Message.class);
- }
+ public static Message fromJson(final String json) throws IOException {
+ final ObjectMapper jsonMapper = new ObjectMapper();
+ return jsonMapper.readValue(json, Message.class);
+ }
- public Message() {}
+ public Message() {
+ }
- public Message(String workflowId, String jobName, MessageType type, Map body) {
- this.workflowId = workflowId;
- this.jobName = jobName;
- this.type = type;
- this.body = body;
- }
+ public Message(String workflowId, String jobName, MessageType type, Map body) {
+ this.workflowId = workflowId;
+ this.jobName = jobName;
+ this.type = type;
+ this.body = body;
+ }
- public String getWorkflowId() {
- return workflowId;
- }
+ public String getWorkflowId() {
+ return workflowId;
+ }
- public void setWorkflowId(String workflowId) {
- this.workflowId = workflowId;
- }
+ public void setWorkflowId(String workflowId) {
+ this.workflowId = workflowId;
+ }
- public String getJobName() {
- return jobName;
- }
+ public String getJobName() {
+ return jobName;
+ }
- public void setJobName(String jobName) {
- this.jobName = jobName;
- }
+ public void setJobName(String jobName) {
+ this.jobName = jobName;
+ }
- public MessageType getType() {
- return type;
- }
+ public MessageType getType() {
+ return type;
+ }
- public void setType(MessageType type) {
- this.type = type;
- }
+ public void setType(MessageType type) {
+ this.type = type;
+ }
- public Map getBody() {
- return body;
- }
+ public Map getBody() {
+ return body;
+ }
- public void setBody(Map body) {
- this.body = body;
- }
+ public void setBody(Map body) {
+ this.body = body;
+ }
- @Override
- public String toString() {
- final ObjectMapper jsonMapper = new ObjectMapper();
- try {
- return jsonMapper.writeValueAsString(this);
- } catch (JsonProcessingException e) {
- return null;
- }
- }
+ @Override
+ public String toString() {
+ final ObjectMapper jsonMapper = new ObjectMapper();
+ try {
+ return jsonMapper.writeValueAsString(this);
+ } catch (JsonProcessingException e) {
+ return null;
+ }
+ }
}
diff --git a/dhp-common/src/main/java/eu/dnetlib/message/MessageConsumer.java b/dhp-common/src/main/java/eu/dnetlib/message/MessageConsumer.java
index 3df712a62..fb3f0bd95 100644
--- a/dhp-common/src/main/java/eu/dnetlib/message/MessageConsumer.java
+++ b/dhp-common/src/main/java/eu/dnetlib/message/MessageConsumer.java
@@ -1,45 +1,47 @@
+
package eu.dnetlib.message;
+import java.io.IOException;
+import java.nio.charset.StandardCharsets;
+import java.util.concurrent.LinkedBlockingQueue;
+
import com.rabbitmq.client.AMQP;
import com.rabbitmq.client.Channel;
import com.rabbitmq.client.DefaultConsumer;
import com.rabbitmq.client.Envelope;
-import java.io.IOException;
-import java.nio.charset.StandardCharsets;
-import java.util.concurrent.LinkedBlockingQueue;
public class MessageConsumer extends DefaultConsumer {
- final LinkedBlockingQueue queueMessages;
+ final LinkedBlockingQueue queueMessages;
- /**
- * Constructs a new instance and records its association to the passed-in channel.
- *
- * @param channel the channel to which this consumer is attached
- * @param queueMessages
- */
- public MessageConsumer(Channel channel, LinkedBlockingQueue queueMessages) {
- super(channel);
- this.queueMessages = queueMessages;
- }
+ /**
+ * Constructs a new instance and records its association to the passed-in channel.
+ *
+ * @param channel the channel to which this consumer is attached
+ * @param queueMessages
+ */
+ public MessageConsumer(Channel channel, LinkedBlockingQueue queueMessages) {
+ super(channel);
+ this.queueMessages = queueMessages;
+ }
- @Override
- public void handleDelivery(
- String consumerTag, Envelope envelope, AMQP.BasicProperties properties, byte[] body)
- throws IOException {
- final String json = new String(body, StandardCharsets.UTF_8);
- Message message = Message.fromJson(json);
- try {
- this.queueMessages.put(message);
- System.out.println("Receiving Message " + message);
- } catch (InterruptedException e) {
- if (message.getType() == MessageType.REPORT)
- throw new RuntimeException("Error on sending message");
- else {
- // TODO LOGGING EXCEPTION
- }
- } finally {
- getChannel().basicAck(envelope.getDeliveryTag(), false);
- }
- }
+ @Override
+ public void handleDelivery(
+ String consumerTag, Envelope envelope, AMQP.BasicProperties properties, byte[] body)
+ throws IOException {
+ final String json = new String(body, StandardCharsets.UTF_8);
+ Message message = Message.fromJson(json);
+ try {
+ this.queueMessages.put(message);
+ System.out.println("Receiving Message " + message);
+ } catch (InterruptedException e) {
+ if (message.getType() == MessageType.REPORT)
+ throw new RuntimeException("Error on sending message");
+ else {
+ // TODO LOGGING EXCEPTION
+ }
+ } finally {
+ getChannel().basicAck(envelope.getDeliveryTag(), false);
+ }
+ }
}
diff --git a/dhp-common/src/main/java/eu/dnetlib/message/MessageManager.java b/dhp-common/src/main/java/eu/dnetlib/message/MessageManager.java
index 8370a6cc8..5ca79f3cc 100644
--- a/dhp-common/src/main/java/eu/dnetlib/message/MessageManager.java
+++ b/dhp-common/src/main/java/eu/dnetlib/message/MessageManager.java
@@ -1,134 +1,136 @@
+
package eu.dnetlib.message;
-import com.rabbitmq.client.Channel;
-import com.rabbitmq.client.Connection;
-import com.rabbitmq.client.ConnectionFactory;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.TimeoutException;
+import com.rabbitmq.client.Channel;
+import com.rabbitmq.client.Connection;
+import com.rabbitmq.client.ConnectionFactory;
+
public class MessageManager {
- private final String messageHost;
+ private final String messageHost;
- private final String username;
+ private final String username;
- private final String password;
+ private final String password;
- private Connection connection;
+ private Connection connection;
- private Map channels = new HashMap<>();
+ private final Map channels = new HashMap<>();
- private boolean durable;
+ private boolean durable;
- private boolean autodelete;
+ private boolean autodelete;
- private final LinkedBlockingQueue queueMessages;
+ private final LinkedBlockingQueue queueMessages;
- public MessageManager(
- String messageHost,
- String username,
- String password,
- final LinkedBlockingQueue queueMessages) {
- this.queueMessages = queueMessages;
- this.messageHost = messageHost;
- this.username = username;
- this.password = password;
- }
+ public MessageManager(
+ String messageHost,
+ String username,
+ String password,
+ final LinkedBlockingQueue queueMessages) {
+ this.queueMessages = queueMessages;
+ this.messageHost = messageHost;
+ this.username = username;
+ this.password = password;
+ }
- public MessageManager(
- String messageHost,
- String username,
- String password,
- boolean durable,
- boolean autodelete,
- final LinkedBlockingQueue queueMessages) {
- this.queueMessages = queueMessages;
- this.messageHost = messageHost;
- this.username = username;
- this.password = password;
+ public MessageManager(
+ String messageHost,
+ String username,
+ String password,
+ boolean durable,
+ boolean autodelete,
+ final LinkedBlockingQueue queueMessages) {
+ this.queueMessages = queueMessages;
+ this.messageHost = messageHost;
+ this.username = username;
+ this.password = password;
- this.durable = durable;
- this.autodelete = autodelete;
- }
+ this.durable = durable;
+ this.autodelete = autodelete;
+ }
- private Connection createConnection() throws IOException, TimeoutException {
- ConnectionFactory factory = new ConnectionFactory();
- factory.setHost(this.messageHost);
- factory.setUsername(this.username);
- factory.setPassword(this.password);
- return factory.newConnection();
- }
+ private Connection createConnection() throws IOException, TimeoutException {
+ ConnectionFactory factory = new ConnectionFactory();
+ factory.setHost(this.messageHost);
+ factory.setUsername(this.username);
+ factory.setPassword(this.password);
+ return factory.newConnection();
+ }
- private Channel createChannel(
- final Connection connection,
- final String queueName,
- final boolean durable,
- final boolean autodelete)
- throws Exception {
- Map args = new HashMap<>();
- args.put("x-message-ttl", 10000);
- Channel channel = connection.createChannel();
- channel.queueDeclare(queueName, durable, false, this.autodelete, args);
- return channel;
- }
+ private Channel createChannel(
+ final Connection connection,
+ final String queueName,
+ final boolean durable,
+ final boolean autodelete)
+ throws Exception {
+ Map args = new HashMap<>();
+ args.put("x-message-ttl", 10000);
+ Channel channel = connection.createChannel();
+ channel.queueDeclare(queueName, durable, false, this.autodelete, args);
+ return channel;
+ }
- private Channel getOrCreateChannel(final String queueName, boolean durable, boolean autodelete)
- throws Exception {
- if (channels.containsKey(queueName)) {
- return channels.get(queueName);
- }
+ private Channel getOrCreateChannel(final String queueName, boolean durable, boolean autodelete)
+ throws Exception {
+ if (channels.containsKey(queueName)) {
+ return channels.get(queueName);
+ }
- if (this.connection == null) {
- this.connection = createConnection();
- }
- channels.put(queueName, createChannel(this.connection, queueName, durable, autodelete));
- return channels.get(queueName);
- }
+ if (this.connection == null) {
+ this.connection = createConnection();
+ }
+ channels.put(queueName, createChannel(this.connection, queueName, durable, autodelete));
+ return channels.get(queueName);
+ }
- public void close() throws IOException {
- channels
- .values()
- .forEach(
- ch -> {
- try {
- ch.close();
- } catch (Exception e) {
- // TODO LOG
- }
- });
+ public void close() throws IOException {
+ channels
+ .values()
+ .forEach(
+ ch -> {
+ try {
+ ch.close();
+ } catch (Exception e) {
+ // TODO LOG
+ }
+ });
- this.connection.close();
- }
+ this.connection.close();
+ }
- public boolean sendMessage(final Message message, String queueName) throws Exception {
- try {
- Channel channel = getOrCreateChannel(queueName, this.durable, this.autodelete);
- channel.basicPublish("", queueName, null, message.toString().getBytes());
- return true;
- } catch (Throwable e) {
- throw new RuntimeException(e);
- }
- }
+ public boolean sendMessage(final Message message, String queueName) throws Exception {
+ try {
+ Channel channel = getOrCreateChannel(queueName, this.durable, this.autodelete);
+ channel.basicPublish("", queueName, null, message.toString().getBytes());
+ return true;
+ } catch (Throwable e) {
+ throw new RuntimeException(e);
+ }
+ }
- public boolean sendMessage(
- final Message message, String queueName, boolean durable_var, boolean autodelete_var)
- throws Exception {
- try {
- Channel channel = getOrCreateChannel(queueName, durable_var, autodelete_var);
- channel.basicPublish("", queueName, null, message.toString().getBytes());
- return true;
- } catch (Throwable e) {
- throw new RuntimeException(e);
- }
- }
+ public boolean sendMessage(
+ final Message message, String queueName, boolean durable_var, boolean autodelete_var)
+ throws Exception {
+ try {
+ Channel channel = getOrCreateChannel(queueName, durable_var, autodelete_var);
+ channel.basicPublish("", queueName, null, message.toString().getBytes());
+ return true;
+ } catch (Throwable e) {
+ throw new RuntimeException(e);
+ }
+ }
- public void startConsumingMessage(
- final String queueName, final boolean durable, final boolean autodelete) throws Exception {
+ public void startConsumingMessage(
+ final String queueName, final boolean durable, final boolean autodelete) throws Exception {
- Channel channel = createChannel(createConnection(), queueName, durable, autodelete);
- channel.basicConsume(queueName, false, new MessageConsumer(channel, queueMessages));
- }
+ Channel channel = createChannel(createConnection(), queueName, durable, autodelete);
+ channel.basicConsume(queueName, false, new MessageConsumer(channel, queueMessages));
+ }
}
diff --git a/dhp-common/src/main/java/eu/dnetlib/message/MessageType.java b/dhp-common/src/main/java/eu/dnetlib/message/MessageType.java
index edca90061..72cbda252 100644
--- a/dhp-common/src/main/java/eu/dnetlib/message/MessageType.java
+++ b/dhp-common/src/main/java/eu/dnetlib/message/MessageType.java
@@ -1,6 +1,6 @@
+
package eu.dnetlib.message;
public enum MessageType {
- ONGOING,
- REPORT
+ ONGOING, REPORT
}
diff --git a/dhp-common/src/main/java/eu/dnetlib/scholexplorer/relation/RelInfo.java b/dhp-common/src/main/java/eu/dnetlib/scholexplorer/relation/RelInfo.java
index 1ae6e8ead..e07fcef66 100644
--- a/dhp-common/src/main/java/eu/dnetlib/scholexplorer/relation/RelInfo.java
+++ b/dhp-common/src/main/java/eu/dnetlib/scholexplorer/relation/RelInfo.java
@@ -1,24 +1,25 @@
+
package eu.dnetlib.scholexplorer.relation;
import java.io.Serializable;
public class RelInfo implements Serializable {
- private String original;
- private String inverse;
+ private String original;
+ private String inverse;
- public String getOriginal() {
- return original;
- }
+ public String getOriginal() {
+ return original;
+ }
- public void setOriginal(String original) {
- this.original = original;
- }
+ public void setOriginal(String original) {
+ this.original = original;
+ }
- public String getInverse() {
- return inverse;
- }
+ public String getInverse() {
+ return inverse;
+ }
- public void setInverse(String inverse) {
- this.inverse = inverse;
- }
+ public void setInverse(String inverse) {
+ this.inverse = inverse;
+ }
}
diff --git a/dhp-common/src/main/java/eu/dnetlib/scholexplorer/relation/RelationMapper.java b/dhp-common/src/main/java/eu/dnetlib/scholexplorer/relation/RelationMapper.java
index 9cc995821..eb708c390 100644
--- a/dhp-common/src/main/java/eu/dnetlib/scholexplorer/relation/RelationMapper.java
+++ b/dhp-common/src/main/java/eu/dnetlib/scholexplorer/relation/RelationMapper.java
@@ -1,18 +1,20 @@
+
package eu.dnetlib.scholexplorer.relation;
-import com.fasterxml.jackson.databind.ObjectMapper;
import java.io.Serializable;
import java.util.HashMap;
+
import org.apache.commons.io.IOUtils;
+import com.fasterxml.jackson.databind.ObjectMapper;
+
public class RelationMapper extends HashMap implements Serializable {
- public static RelationMapper load() throws Exception {
+ public static RelationMapper load() throws Exception {
- final String json =
- IOUtils.toString(RelationMapper.class.getResourceAsStream("relations.json"));
+ final String json = IOUtils.toString(RelationMapper.class.getResourceAsStream("relations.json"));
- ObjectMapper mapper = new ObjectMapper();
- return mapper.readValue(json, RelationMapper.class);
- }
+ ObjectMapper mapper = new ObjectMapper();
+ return mapper.readValue(json, RelationMapper.class);
+ }
}
diff --git a/dhp-common/src/test/java/eu/dnetlib/dhp/application/ArgumentApplicationParserTest.java b/dhp-common/src/test/java/eu/dnetlib/dhp/application/ArgumentApplicationParserTest.java
index bb7351745..e14020830 100644
--- a/dhp-common/src/test/java/eu/dnetlib/dhp/application/ArgumentApplicationParserTest.java
+++ b/dhp-common/src/test/java/eu/dnetlib/dhp/application/ArgumentApplicationParserTest.java
@@ -1,3 +1,4 @@
+
package eu.dnetlib.dhp.application;
import static org.junit.jupiter.api.Assertions.assertEquals;
@@ -8,58 +9,59 @@ import org.junit.jupiter.api.Test;
public class ArgumentApplicationParserTest {
- @Test
- public void testParseParameter() throws Exception {
- final String jsonConfiguration =
- IOUtils.toString(
- this.getClass().getResourceAsStream("/eu/dnetlib/application/parameters.json"));
- assertNotNull(jsonConfiguration);
- ArgumentApplicationParser parser = new ArgumentApplicationParser(jsonConfiguration);
- parser.parseArgument(
- new String[] {
- "-p",
- "value0",
- "-a",
- "value1",
- "-n",
- "value2",
- "-u",
- "value3",
- "-ru",
- "value4",
- "-rp",
- "value5",
- "-rh",
- "value6",
- "-ro",
- "value7",
- "-rr",
- "value8",
- "-w",
- "value9",
- "-cc",
- ArgumentApplicationParser.compressArgument(jsonConfiguration)
- });
- assertNotNull(parser.get("hdfsPath"));
- assertNotNull(parser.get("apidescriptor"));
- assertNotNull(parser.get("namenode"));
- assertNotNull(parser.get("userHDFS"));
- assertNotNull(parser.get("rabbitUser"));
- assertNotNull(parser.get("rabbitPassWord"));
- assertNotNull(parser.get("rabbitHost"));
- assertNotNull(parser.get("rabbitOngoingQueue"));
- assertNotNull(parser.get("rabbitReportQueue"));
- assertNotNull(parser.get("workflowId"));
- assertEquals("value0", parser.get("hdfsPath"));
- assertEquals("value1", parser.get("apidescriptor"));
- assertEquals("value2", parser.get("namenode"));
- assertEquals("value3", parser.get("userHDFS"));
- assertEquals("value4", parser.get("rabbitUser"));
- assertEquals("value5", parser.get("rabbitPassWord"));
- assertEquals("value6", parser.get("rabbitHost"));
- assertEquals("value7", parser.get("rabbitOngoingQueue"));
- assertEquals("value8", parser.get("rabbitReportQueue"));
- assertEquals("value9", parser.get("workflowId"));
- assertEquals(jsonConfiguration, parser.get("ccCoco"));
- }
+ @Test
+ public void testParseParameter() throws Exception {
+ final String jsonConfiguration = IOUtils
+ .toString(
+ this.getClass().getResourceAsStream("/eu/dnetlib/application/parameters.json"));
+ assertNotNull(jsonConfiguration);
+ ArgumentApplicationParser parser = new ArgumentApplicationParser(jsonConfiguration);
+ parser
+ .parseArgument(
+ new String[] {
+ "-p",
+ "value0",
+ "-a",
+ "value1",
+ "-n",
+ "value2",
+ "-u",
+ "value3",
+ "-ru",
+ "value4",
+ "-rp",
+ "value5",
+ "-rh",
+ "value6",
+ "-ro",
+ "value7",
+ "-rr",
+ "value8",
+ "-w",
+ "value9",
+ "-cc",
+ ArgumentApplicationParser.compressArgument(jsonConfiguration)
+ });
+ assertNotNull(parser.get("hdfsPath"));
+ assertNotNull(parser.get("apidescriptor"));
+ assertNotNull(parser.get("namenode"));
+ assertNotNull(parser.get("userHDFS"));
+ assertNotNull(parser.get("rabbitUser"));
+ assertNotNull(parser.get("rabbitPassWord"));
+ assertNotNull(parser.get("rabbitHost"));
+ assertNotNull(parser.get("rabbitOngoingQueue"));
+ assertNotNull(parser.get("rabbitReportQueue"));
+ assertNotNull(parser.get("workflowId"));
+ assertEquals("value0", parser.get("hdfsPath"));
+ assertEquals("value1", parser.get("apidescriptor"));
+ assertEquals("value2", parser.get("namenode"));
+ assertEquals("value3", parser.get("userHDFS"));
+ assertEquals("value4", parser.get("rabbitUser"));
+ assertEquals("value5", parser.get("rabbitPassWord"));
+ assertEquals("value6", parser.get("rabbitHost"));
+ assertEquals("value7", parser.get("rabbitOngoingQueue"));
+ assertEquals("value8", parser.get("rabbitReportQueue"));
+ assertEquals("value9", parser.get("workflowId"));
+ assertEquals(jsonConfiguration, parser.get("ccCoco"));
+ }
}
diff --git a/dhp-common/src/test/java/eu/dnetlib/dhp/common/HdfsSupportTest.java b/dhp-common/src/test/java/eu/dnetlib/dhp/common/HdfsSupportTest.java
index a8f0bbb0d..870943816 100644
--- a/dhp-common/src/test/java/eu/dnetlib/dhp/common/HdfsSupportTest.java
+++ b/dhp-common/src/test/java/eu/dnetlib/dhp/common/HdfsSupportTest.java
@@ -1,3 +1,4 @@
+
package eu.dnetlib.dhp.common;
import static org.junit.jupiter.api.Assertions.*;
@@ -8,6 +9,7 @@ import java.nio.file.Path;
import java.util.Arrays;
import java.util.List;
import java.util.stream.Collectors;
+
import org.apache.hadoop.conf.Configuration;
import org.junit.jupiter.api.Nested;
import org.junit.jupiter.api.Test;
@@ -15,63 +17,64 @@ import org.junit.jupiter.api.io.TempDir;
public class HdfsSupportTest {
- @Nested
- class Remove {
+ @Nested
+ class Remove {
- @Test
- public void shouldThrowARuntimeExceptionOnError() {
- // when
- assertThrows(RuntimeException.class, () -> HdfsSupport.remove(null, new Configuration()));
- }
+ @Test
+ public void shouldThrowARuntimeExceptionOnError() {
+ // when
+ assertThrows(RuntimeException.class, () -> HdfsSupport.remove(null, new Configuration()));
+ }
- @Test
- public void shouldRemoveADirFromHDFS(@TempDir Path tempDir) {
- // when
- HdfsSupport.remove(tempDir.toString(), new Configuration());
+ @Test
+ public void shouldRemoveADirFromHDFS(@TempDir Path tempDir) {
+ // when
+ HdfsSupport.remove(tempDir.toString(), new Configuration());
- // then
- assertFalse(Files.exists(tempDir));
- }
+ // then
+ assertFalse(Files.exists(tempDir));
+ }
- @Test
- public void shouldRemoveAFileFromHDFS(@TempDir Path tempDir) throws IOException {
- // given
- Path file = Files.createTempFile(tempDir, "p", "s");
+ @Test
+ public void shouldRemoveAFileFromHDFS(@TempDir Path tempDir) throws IOException {
+ // given
+ Path file = Files.createTempFile(tempDir, "p", "s");
- // when
- HdfsSupport.remove(file.toString(), new Configuration());
+ // when
+ HdfsSupport.remove(file.toString(), new Configuration());
- // then
- assertFalse(Files.exists(file));
- }
- }
+ // then
+ assertFalse(Files.exists(file));
+ }
+ }
- @Nested
- class ListFiles {
+ @Nested
+ class ListFiles {
- @Test
- public void shouldThrowARuntimeExceptionOnError() {
- // when
- assertThrows(RuntimeException.class, () -> HdfsSupport.listFiles(null, new Configuration()));
- }
+ @Test
+ public void shouldThrowARuntimeExceptionOnError() {
+ // when
+ assertThrows(RuntimeException.class, () -> HdfsSupport.listFiles(null, new Configuration()));
+ }
- @Test
- public void shouldListFilesLocatedInPath(@TempDir Path tempDir) throws IOException {
- Path subDir1 = Files.createTempDirectory(tempDir, "list_me");
- Path subDir2 = Files.createTempDirectory(tempDir, "list_me");
+ @Test
+ public void shouldListFilesLocatedInPath(@TempDir Path tempDir) throws IOException {
+ Path subDir1 = Files.createTempDirectory(tempDir, "list_me");
+ Path subDir2 = Files.createTempDirectory(tempDir, "list_me");
- // when
- List paths = HdfsSupport.listFiles(tempDir.toString(), new Configuration());
+ // when
+ List paths = HdfsSupport.listFiles(tempDir.toString(), new Configuration());
- // then
- assertEquals(2, paths.size());
- List expecteds =
- Arrays.stream(new String[] {subDir1.toString(), subDir2.toString()})
- .sorted()
- .collect(Collectors.toList());
- List actuals = paths.stream().sorted().collect(Collectors.toList());
- assertTrue(actuals.get(0).contains(expecteds.get(0)));
- assertTrue(actuals.get(1).contains(expecteds.get(1)));
- }
- }
+ // then
+ assertEquals(2, paths.size());
+ List expecteds = Arrays.stream(new String[] {
+ subDir1.toString(), subDir2.toString()
+ })
+ .sorted()
+ .collect(Collectors.toList());
+ List actuals = paths.stream().sorted().collect(Collectors.toList());
+ assertTrue(actuals.get(0).contains(expecteds.get(0)));
+ assertTrue(actuals.get(1).contains(expecteds.get(1)));
+ }
+ }
}
diff --git a/dhp-common/src/test/java/eu/dnetlib/dhp/common/SparkSessionSupportTest.java b/dhp-common/src/test/java/eu/dnetlib/dhp/common/SparkSessionSupportTest.java
index 698b9cea5..2f01c0863 100644
--- a/dhp-common/src/test/java/eu/dnetlib/dhp/common/SparkSessionSupportTest.java
+++ b/dhp-common/src/test/java/eu/dnetlib/dhp/common/SparkSessionSupportTest.java
@@ -1,55 +1,58 @@
+
package eu.dnetlib.dhp.common;
import static org.mockito.Mockito.*;
-import eu.dnetlib.dhp.common.FunctionalInterfaceSupport.ThrowingConsumer;
import java.util.function.Function;
+
import org.apache.spark.SparkConf;
import org.apache.spark.sql.SparkSession;
import org.junit.jupiter.api.Nested;
import org.junit.jupiter.api.Test;
+import eu.dnetlib.dhp.common.FunctionalInterfaceSupport.ThrowingConsumer;
+
public class SparkSessionSupportTest {
- @Nested
- class RunWithSparkSession {
+ @Nested
+ class RunWithSparkSession {
- @Test
- public void shouldExecuteFunctionAndNotStopSparkSessionWhenSparkSessionIsNotManaged()
- throws Exception {
- // given
- SparkSession spark = mock(SparkSession.class);
- SparkConf conf = mock(SparkConf.class);
- Function sparkSessionBuilder = mock(Function.class);
- when(sparkSessionBuilder.apply(conf)).thenReturn(spark);
- ThrowingConsumer fn = mock(ThrowingConsumer.class);
+ @Test
+ public void shouldExecuteFunctionAndNotStopSparkSessionWhenSparkSessionIsNotManaged()
+ throws Exception {
+ // given
+ SparkSession spark = mock(SparkSession.class);
+ SparkConf conf = mock(SparkConf.class);
+ Function sparkSessionBuilder = mock(Function.class);
+ when(sparkSessionBuilder.apply(conf)).thenReturn(spark);
+ ThrowingConsumer fn = mock(ThrowingConsumer.class);
- // when
- SparkSessionSupport.runWithSparkSession(sparkSessionBuilder, conf, false, fn);
+ // when
+ SparkSessionSupport.runWithSparkSession(sparkSessionBuilder, conf, false, fn);
- // then
- verify(sparkSessionBuilder).apply(conf);
- verify(fn).accept(spark);
- verify(spark, never()).stop();
- }
+ // then
+ verify(sparkSessionBuilder).apply(conf);
+ verify(fn).accept(spark);
+ verify(spark, never()).stop();
+ }
- @Test
- public void shouldExecuteFunctionAndStopSparkSessionWhenSparkSessionIsManaged()
- throws Exception {
- // given
- SparkSession spark = mock(SparkSession.class);
- SparkConf conf = mock(SparkConf.class);
- Function sparkSessionBuilder = mock(Function.class);
- when(sparkSessionBuilder.apply(conf)).thenReturn(spark);
- ThrowingConsumer fn = mock(ThrowingConsumer.class);
+ @Test
+ public void shouldExecuteFunctionAndStopSparkSessionWhenSparkSessionIsManaged()
+ throws Exception {
+ // given
+ SparkSession spark = mock(SparkSession.class);
+ SparkConf conf = mock(SparkConf.class);
+ Function sparkSessionBuilder = mock(Function.class);
+ when(sparkSessionBuilder.apply(conf)).thenReturn(spark);
+ ThrowingConsumer fn = mock(ThrowingConsumer.class);
- // when
- SparkSessionSupport.runWithSparkSession(sparkSessionBuilder, conf, true, fn);
+ // when
+ SparkSessionSupport.runWithSparkSession(sparkSessionBuilder, conf, true, fn);
- // then
- verify(sparkSessionBuilder).apply(conf);
- verify(fn).accept(spark);
- verify(spark, times(1)).stop();
- }
- }
+ // then
+ verify(sparkSessionBuilder).apply(conf);
+ verify(fn).accept(spark);
+ verify(spark, times(1)).stop();
+ }
+ }
}
diff --git a/dhp-common/src/test/java/eu/dnetlib/dhp/model/mdstore/MetadataRecordTest.java b/dhp-common/src/test/java/eu/dnetlib/dhp/model/mdstore/MetadataRecordTest.java
index 84cb08d95..cb4d0ab50 100644
--- a/dhp-common/src/test/java/eu/dnetlib/dhp/model/mdstore/MetadataRecordTest.java
+++ b/dhp-common/src/test/java/eu/dnetlib/dhp/model/mdstore/MetadataRecordTest.java
@@ -1,3 +1,4 @@
+
package eu.dnetlib.dhp.model.mdstore;
import static org.junit.jupiter.api.Assertions.assertTrue;
@@ -6,10 +7,10 @@ import org.junit.jupiter.api.Test;
public class MetadataRecordTest {
- @Test
- public void getTimestamp() {
+ @Test
+ public void getTimestamp() {
- MetadataRecord r = new MetadataRecord();
- assertTrue(r.getDateOfCollection() > 0);
- }
+ MetadataRecord r = new MetadataRecord();
+ assertTrue(r.getDateOfCollection() > 0);
+ }
}
diff --git a/dhp-common/src/test/java/eu/dnetlib/message/MessageTest.java b/dhp-common/src/test/java/eu/dnetlib/message/MessageTest.java
index a514f8573..442f7b5c2 100644
--- a/dhp-common/src/test/java/eu/dnetlib/message/MessageTest.java
+++ b/dhp-common/src/test/java/eu/dnetlib/message/MessageTest.java
@@ -1,3 +1,4 @@
+
package eu.dnetlib.message;
import static org.junit.jupiter.api.Assertions.*;
@@ -5,46 +6,46 @@ import static org.junit.jupiter.api.Assertions.*;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
+
import org.junit.jupiter.api.Test;
public class MessageTest {
- @Test
- public void fromJsonTest() throws IOException {
- Message m = new Message();
- m.setWorkflowId("wId");
- m.setType(MessageType.ONGOING);
- m.setJobName("Collection");
- Map body = new HashMap<>();
- body.put("parsedItem", "300");
- body.put("ExecutionTime", "30s");
+ @Test
+ public void fromJsonTest() throws IOException {
+ Message m = new Message();
+ m.setWorkflowId("wId");
+ m.setType(MessageType.ONGOING);
+ m.setJobName("Collection");
+ Map body = new HashMap<>();
+ body.put("parsedItem", "300");
+ body.put("ExecutionTime", "30s");
- m.setBody(body);
- System.out.println("m = " + m);
- Message m1 = Message.fromJson(m.toString());
- assertEquals(m1.getWorkflowId(), m.getWorkflowId());
- assertEquals(m1.getType(), m.getType());
- assertEquals(m1.getJobName(), m.getJobName());
+ m.setBody(body);
+ System.out.println("m = " + m);
+ Message m1 = Message.fromJson(m.toString());
+ assertEquals(m1.getWorkflowId(), m.getWorkflowId());
+ assertEquals(m1.getType(), m.getType());
+ assertEquals(m1.getJobName(), m.getJobName());
- assertNotNull(m1.getBody());
- m1.getBody().keySet().forEach(it -> assertEquals(m1.getBody().get(it), m.getBody().get(it)));
- assertEquals(m1.getJobName(), m.getJobName());
- }
+ assertNotNull(m1.getBody());
+ m1.getBody().keySet().forEach(it -> assertEquals(m1.getBody().get(it), m.getBody().get(it)));
+ assertEquals(m1.getJobName(), m.getJobName());
+ }
- @Test
- public void toStringTest() {
- final String expectedJson =
- "{\"workflowId\":\"wId\",\"jobName\":\"Collection\",\"type\":\"ONGOING\",\"body\":{\"ExecutionTime\":\"30s\",\"parsedItem\":\"300\"}}";
- Message m = new Message();
- m.setWorkflowId("wId");
- m.setType(MessageType.ONGOING);
- m.setJobName("Collection");
- Map body = new HashMap<>();
- body.put("parsedItem", "300");
- body.put("ExecutionTime", "30s");
+ @Test
+ public void toStringTest() {
+ final String expectedJson = "{\"workflowId\":\"wId\",\"jobName\":\"Collection\",\"type\":\"ONGOING\",\"body\":{\"ExecutionTime\":\"30s\",\"parsedItem\":\"300\"}}";
+ Message m = new Message();
+ m.setWorkflowId("wId");
+ m.setType(MessageType.ONGOING);
+ m.setJobName("Collection");
+ Map body = new HashMap<>();
+ body.put("parsedItem", "300");
+ body.put("ExecutionTime", "30s");
- m.setBody(body);
+ m.setBody(body);
- assertEquals(expectedJson, m.toString());
- }
+ assertEquals(expectedJson, m.toString());
+ }
}
diff --git a/dhp-common/src/test/java/eu/dnetlib/scholexplorer/relation/RelationMapperTest.java b/dhp-common/src/test/java/eu/dnetlib/scholexplorer/relation/RelationMapperTest.java
index 9381cb01f..d1d1ada71 100644
--- a/dhp-common/src/test/java/eu/dnetlib/scholexplorer/relation/RelationMapperTest.java
+++ b/dhp-common/src/test/java/eu/dnetlib/scholexplorer/relation/RelationMapperTest.java
@@ -1,13 +1,14 @@
+
package eu.dnetlib.scholexplorer.relation;
import org.junit.jupiter.api.Test;
public class RelationMapperTest {
- @Test
- public void testLoadRels() throws Exception {
+ @Test
+ public void testLoadRels() throws Exception {
- RelationMapper relationMapper = RelationMapper.load();
- relationMapper.keySet().forEach(System.out::println);
- }
+ RelationMapper relationMapper = RelationMapper.load();
+ relationMapper.keySet().forEach(System.out::println);
+ }
}
diff --git a/dhp-schemas/pom.xml b/dhp-schemas/pom.xml
index 8deb2eab2..4a123ceda 100644
--- a/dhp-schemas/pom.xml
+++ b/dhp-schemas/pom.xml
@@ -12,7 +12,7 @@
dhp-schemas
jar
-
+ This module contains common schema classes meant to be used across the dnet-hadoop submodules
diff --git a/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/action/AtomicAction.java b/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/action/AtomicAction.java
index c803fab52..84b22c81c 100644
--- a/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/action/AtomicAction.java
+++ b/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/action/AtomicAction.java
@@ -1,36 +1,40 @@
+
package eu.dnetlib.dhp.schema.action;
-import com.fasterxml.jackson.databind.annotation.JsonDeserialize;
-import eu.dnetlib.dhp.schema.oaf.Oaf;
import java.io.Serializable;
+import com.fasterxml.jackson.databind.annotation.JsonDeserialize;
+
+import eu.dnetlib.dhp.schema.oaf.Oaf;
+
@JsonDeserialize(using = AtomicActionDeserializer.class)
public class AtomicAction implements Serializable {
- private Class clazz;
+ private Class clazz;
- private T payload;
+ private T payload;
- public AtomicAction() {}
+ public AtomicAction() {
+ }
- public AtomicAction(Class clazz, T payload) {
- this.clazz = clazz;
- this.payload = payload;
- }
+ public AtomicAction(Class clazz, T payload) {
+ this.clazz = clazz;
+ this.payload = payload;
+ }
- public Class getClazz() {
- return clazz;
- }
+ public Class getClazz() {
+ return clazz;
+ }
- public void setClazz(Class clazz) {
- this.clazz = clazz;
- }
+ public void setClazz(Class clazz) {
+ this.clazz = clazz;
+ }
- public T getPayload() {
- return payload;
- }
+ public T getPayload() {
+ return payload;
+ }
- public void setPayload(T payload) {
- this.payload = payload;
- }
+ public void setPayload(T payload) {
+ this.payload = payload;
+ }
}
diff --git a/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/action/AtomicActionDeserializer.java b/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/action/AtomicActionDeserializer.java
index 701833c42..7b88e9c7e 100644
--- a/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/action/AtomicActionDeserializer.java
+++ b/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/action/AtomicActionDeserializer.java
@@ -1,29 +1,32 @@
+
package eu.dnetlib.dhp.schema.action;
+import java.io.IOException;
+
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.DeserializationContext;
import com.fasterxml.jackson.databind.JsonDeserializer;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
+
import eu.dnetlib.dhp.schema.oaf.Oaf;
-import java.io.IOException;
public class AtomicActionDeserializer extends JsonDeserializer {
- @Override
- public Object deserialize(JsonParser jp, DeserializationContext ctxt)
- throws IOException, JsonProcessingException {
- JsonNode node = jp.getCodec().readTree(jp);
- String classTag = node.get("clazz").asText();
- JsonNode payload = node.get("payload");
- ObjectMapper mapper = new ObjectMapper();
+ @Override
+ public Object deserialize(JsonParser jp, DeserializationContext ctxt)
+ throws IOException {
+ JsonNode node = jp.getCodec().readTree(jp);
+ String classTag = node.get("clazz").asText();
+ JsonNode payload = node.get("payload");
+ ObjectMapper mapper = new ObjectMapper();
- try {
- final Class> clazz = Class.forName(classTag);
- return new AtomicAction(clazz, (Oaf) mapper.readValue(payload.toString(), clazz));
- } catch (ClassNotFoundException e) {
- throw new IOException(e);
- }
- }
+ try {
+ final Class> clazz = Class.forName(classTag);
+ return new AtomicAction(clazz, (Oaf) mapper.readValue(payload.toString(), clazz));
+ } catch (ClassNotFoundException e) {
+ throw new IOException(e);
+ }
+ }
}
diff --git a/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/common/EntityType.java b/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/common/EntityType.java
index d597ecb53..54f30cf33 100644
--- a/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/common/EntityType.java
+++ b/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/common/EntityType.java
@@ -1,26 +1,21 @@
+
package eu.dnetlib.dhp.schema.common;
import eu.dnetlib.dhp.schema.oaf.OafEntity;
/** Actual entity types in the Graph */
public enum EntityType {
- publication,
- dataset,
- otherresearchproduct,
- software,
- datasource,
- organization,
- project;
+ publication, dataset, otherresearchproduct, software, datasource, organization, project;
- /**
- * Resolves the EntityType, given the relative class name
- *
- * @param clazz the given class name
- * @param actual OafEntity subclass
- * @return the EntityType associated to the given class
- */
- public static EntityType fromClass(Class clazz) {
+ /**
+ * Resolves the EntityType, given the relative class name
+ *
+ * @param clazz the given class name
+ * @param actual OafEntity subclass
+ * @return the EntityType associated to the given class
+ */
+ public static EntityType fromClass(Class clazz) {
- return EntityType.valueOf(clazz.getSimpleName().toLowerCase());
- }
+ return EntityType.valueOf(clazz.getSimpleName().toLowerCase());
+ }
}
diff --git a/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/common/MainEntityType.java b/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/common/MainEntityType.java
index 466cdc9e9..cda8ba484 100644
--- a/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/common/MainEntityType.java
+++ b/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/common/MainEntityType.java
@@ -1,9 +1,7 @@
+
package eu.dnetlib.dhp.schema.common;
/** Main entity types in the Graph */
public enum MainEntityType {
- result,
- datasource,
- organization,
- project
+ result, datasource, organization, project
}
diff --git a/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/common/ModelConstants.java b/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/common/ModelConstants.java
index 0dfdaad52..926b02110 100644
--- a/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/common/ModelConstants.java
+++ b/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/common/ModelConstants.java
@@ -1,40 +1,88 @@
+
package eu.dnetlib.dhp.schema.common;
import eu.dnetlib.dhp.schema.oaf.Qualifier;
public class ModelConstants {
- public static final String DNET_RESULT_TYPOLOGIES = "dnet:result_typologies";
+ public static final String DNET_RESULT_TYPOLOGIES = "dnet:result_typologies";
+ public static final String DNET_PUBLICATION_RESOURCE = "dnet:publication_resource";
+ public static final String DNET_ACCESS_MODES = "dnet:access_modes";
+ public static final String DNET_LANGUAGES = "dnet:languages";
+ public static final String DNET_PID_TYPES = "dnet:pid_types";
+ public static final String DNET_DATA_CITE_DATE = "dnet:dataCite_date";
+ public static final String DNET_DATA_CITE_RESOURCE = "dnet:dataCite_resource";
+ public static final String DNET_PROVENANCE_ACTIONS = "dnet:provenanceActions";
- public static final String DATASET_RESULTTYPE_CLASSID = "dataset";
- public static final String PUBLICATION_RESULTTYPE_CLASSID = "publication";
- public static final String SOFTWARE_RESULTTYPE_CLASSID = "software";
- public static final String ORP_RESULTTYPE_CLASSID = "other";
+ public static final String SYSIMPORT_CROSSWALK_REPOSITORY = "sysimport:crosswalk:repository";
+ public static final String SYSIMPORT_CROSSWALK_ENTITYREGISTRY = "sysimport:crosswalk:entityregistry";
+ public static final String USER_CLAIM = "user:claim";
- public static Qualifier PUBLICATION_DEFAULT_RESULTTYPE = new Qualifier();
- public static Qualifier DATASET_DEFAULT_RESULTTYPE = new Qualifier();
- public static Qualifier SOFTWARE_DEFAULT_RESULTTYPE = new Qualifier();
- public static Qualifier ORP_DEFAULT_RESULTTYPE = new Qualifier();
+ public static final String DATASET_RESULTTYPE_CLASSID = "dataset";
+ public static final String PUBLICATION_RESULTTYPE_CLASSID = "publication";
+ public static final String SOFTWARE_RESULTTYPE_CLASSID = "software";
+ public static final String ORP_RESULTTYPE_CLASSID = "other";
- static {
- PUBLICATION_DEFAULT_RESULTTYPE.setClassid(PUBLICATION_RESULTTYPE_CLASSID);
- PUBLICATION_DEFAULT_RESULTTYPE.setClassname(PUBLICATION_RESULTTYPE_CLASSID);
- PUBLICATION_DEFAULT_RESULTTYPE.setSchemeid(DNET_RESULT_TYPOLOGIES);
- PUBLICATION_DEFAULT_RESULTTYPE.setSchemename(DNET_RESULT_TYPOLOGIES);
+ public static final String RESULT_RESULT = "resultResult";
+ public static final String PUBLICATION_DATASET = "publicationDataset";
+ public static final String IS_RELATED_TO = "isRelatedTo";
+ public static final String SUPPLEMENT = "supplement";
+ public static final String IS_SUPPLEMENT_TO = "isSupplementTo";
+ public static final String IS_SUPPLEMENTED_BY = "isSupplementedBy";
+ public static final String PART = "part";
+ public static final String IS_PART_OF = "IsPartOf";
+ public static final String HAS_PARTS = "HasParts";
+ public static final String RELATIONSHIP = "relationship";
- DATASET_DEFAULT_RESULTTYPE.setClassid(DATASET_RESULTTYPE_CLASSID);
- DATASET_DEFAULT_RESULTTYPE.setClassname(DATASET_RESULTTYPE_CLASSID);
- DATASET_DEFAULT_RESULTTYPE.setSchemeid(DNET_RESULT_TYPOLOGIES);
- DATASET_DEFAULT_RESULTTYPE.setSchemename(DNET_RESULT_TYPOLOGIES);
+ public static final String RESULT_PROJECT = "resultProject";
+ public static final String OUTCOME = "outcome";
+ public static final String IS_PRODUCED_BY = "isProducedBy";
+ public static final String PRODUCES = "produces";
- SOFTWARE_DEFAULT_RESULTTYPE.setClassid(SOFTWARE_RESULTTYPE_CLASSID);
- SOFTWARE_DEFAULT_RESULTTYPE.setClassname(SOFTWARE_RESULTTYPE_CLASSID);
- SOFTWARE_DEFAULT_RESULTTYPE.setSchemeid(DNET_RESULT_TYPOLOGIES);
- SOFTWARE_DEFAULT_RESULTTYPE.setSchemename(DNET_RESULT_TYPOLOGIES);
+ public static final String DATASOURCE_ORGANIZATION = "datasourceOrganization";
+ public static final String PROVISION = "provision";
+ public static final String IS_PROVIDED_BY = "isProvidedBy";
+ public static final String PROVIDES = "provides";
- ORP_DEFAULT_RESULTTYPE.setClassid(ORP_RESULTTYPE_CLASSID);
- ORP_DEFAULT_RESULTTYPE.setClassname(ORP_RESULTTYPE_CLASSID);
- ORP_DEFAULT_RESULTTYPE.setSchemeid(DNET_RESULT_TYPOLOGIES);
- ORP_DEFAULT_RESULTTYPE.setSchemename(DNET_RESULT_TYPOLOGIES);
- }
+ public static final String PROJECT_ORGANIZATION = "projectOrganization";
+ public static final String PARTICIPATION = "participation";
+ public static final String HAS_PARTICIPANT = "hasParticipant";
+ public static final String IS_PARTICIPANT = "isParticipant";
+
+ public static final Qualifier PUBLICATION_DEFAULT_RESULTTYPE = qualifier(
+ PUBLICATION_RESULTTYPE_CLASSID, PUBLICATION_RESULTTYPE_CLASSID,
+ DNET_RESULT_TYPOLOGIES, DNET_RESULT_TYPOLOGIES);
+
+ public static final Qualifier DATASET_DEFAULT_RESULTTYPE = qualifier(
+ DATASET_RESULTTYPE_CLASSID, DATASET_RESULTTYPE_CLASSID,
+ DNET_RESULT_TYPOLOGIES, DNET_RESULT_TYPOLOGIES);
+
+ public static final Qualifier SOFTWARE_DEFAULT_RESULTTYPE = qualifier(
+ SOFTWARE_RESULTTYPE_CLASSID, SOFTWARE_RESULTTYPE_CLASSID,
+ DNET_RESULT_TYPOLOGIES, DNET_RESULT_TYPOLOGIES);
+
+ public static final Qualifier ORP_DEFAULT_RESULTTYPE = qualifier(
+ ORP_RESULTTYPE_CLASSID, ORP_RESULTTYPE_CLASSID,
+ DNET_RESULT_TYPOLOGIES, DNET_RESULT_TYPOLOGIES);
+
+ public static final Qualifier REPOSITORY_PROVENANCE_ACTIONS = qualifier(
+ SYSIMPORT_CROSSWALK_REPOSITORY, SYSIMPORT_CROSSWALK_REPOSITORY,
+ DNET_PROVENANCE_ACTIONS, DNET_PROVENANCE_ACTIONS);
+
+ public static final Qualifier ENTITYREGISTRY_PROVENANCE_ACTION = qualifier(
+ SYSIMPORT_CROSSWALK_ENTITYREGISTRY, SYSIMPORT_CROSSWALK_ENTITYREGISTRY,
+ DNET_PROVENANCE_ACTIONS, DNET_PROVENANCE_ACTIONS);
+
+ private static Qualifier qualifier(
+ final String classid,
+ final String classname,
+ final String schemeid,
+ final String schemename) {
+ final Qualifier q = new Qualifier();
+ q.setClassid(classid);
+ q.setClassname(classname);
+ q.setSchemeid(schemeid);
+ q.setSchemename(schemename);
+ return q;
+ }
}
diff --git a/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/common/ModelSupport.java b/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/common/ModelSupport.java
index fc85b1ac1..1fd2ef2da 100644
--- a/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/common/ModelSupport.java
+++ b/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/common/ModelSupport.java
@@ -13,7 +13,7 @@ import eu.dnetlib.dhp.schema.oaf.*;
public class ModelSupport {
/** Defines the mapping between the actual entity type and the main entity type */
- private static Map entityMapping = Maps.newHashMap();
+ private static final Map entityMapping = Maps.newHashMap();
static {
entityMapping.put(EntityType.publication, MainEntityType.result);
@@ -53,232 +53,6 @@ public class ModelSupport {
oafTypes.put("relation", Relation.class);
}
- public static final Map entityIdPrefix = Maps.newHashMap();
-
- static {
- entityIdPrefix.put("datasource", "10");
- entityIdPrefix.put("organization", "20");
- entityIdPrefix.put("project", "40");
- entityIdPrefix.put("result", "50");
- }
-
- public static final Map relationInverseMap = Maps.newHashMap();
-
- static {
- relationInverseMap
- .put(
- "personResult_authorship_isAuthorOf", new RelationInverse()
- .setRelation("isAuthorOf")
- .setInverse("hasAuthor")
- .setRelType("personResult")
- .setSubReltype("authorship"));
- relationInverseMap
- .put(
- "personResult_authorship_hasAuthor", new RelationInverse()
- .setInverse("isAuthorOf")
- .setRelation("hasAuthor")
- .setRelType("personResult")
- .setSubReltype("authorship"));
- relationInverseMap
- .put(
- "projectOrganization_participation_isParticipant", new RelationInverse()
- .setRelation("isParticipant")
- .setInverse("hasParticipant")
- .setRelType("projectOrganization")
- .setSubReltype("participation"));
- relationInverseMap
- .put(
- "projectOrganization_participation_hasParticipant", new RelationInverse()
- .setInverse("isParticipant")
- .setRelation("hasParticipant")
- .setRelType("projectOrganization")
- .setSubReltype("participation"));
- relationInverseMap
- .put(
- "resultOrganization_affiliation_hasAuthorInstitution", new RelationInverse()
- .setRelation("hasAuthorInstitution")
- .setInverse("isAuthorInstitutionOf")
- .setRelType("resultOrganization")
- .setSubReltype("affiliation"));
- relationInverseMap
- .put(
- "resultOrganization_affiliation_isAuthorInstitutionOf", new RelationInverse()
- .setInverse("hasAuthorInstitution")
- .setRelation("isAuthorInstitutionOf")
- .setRelType("resultOrganization")
- .setSubReltype("affiliation"));
- relationInverseMap
- .put(
- "organizationOrganization_dedup_merges", new RelationInverse()
- .setRelation("merges")
- .setInverse("isMergedIn")
- .setRelType("organizationOrganization")
- .setSubReltype("dedup"));
- relationInverseMap
- .put(
- "organizationOrganization_dedup_isMergedIn", new RelationInverse()
- .setInverse("merges")
- .setRelation("isMergedIn")
- .setRelType("organizationOrganization")
- .setSubReltype("dedup"));
- relationInverseMap
- .put(
- "organizationOrganization_dedupSimilarity_isSimilarTo", new RelationInverse()
- .setInverse("isSimilarTo")
- .setRelation("isSimilarTo")
- .setRelType("organizationOrganization")
- .setSubReltype("dedupSimilarity"));
-
- relationInverseMap
- .put(
- "resultProject_outcome_isProducedBy", new RelationInverse()
- .setRelation("isProducedBy")
- .setInverse("produces")
- .setRelType("resultProject")
- .setSubReltype("outcome"));
- relationInverseMap
- .put(
- "resultProject_outcome_produces", new RelationInverse()
- .setInverse("isProducedBy")
- .setRelation("produces")
- .setRelType("resultProject")
- .setSubReltype("outcome"));
- relationInverseMap
- .put(
- "projectPerson_contactPerson_isContact", new RelationInverse()
- .setRelation("isContact")
- .setInverse("hasContact")
- .setRelType("projectPerson")
- .setSubReltype("contactPerson"));
- relationInverseMap
- .put(
- "projectPerson_contactPerson_hasContact", new RelationInverse()
- .setInverse("isContact")
- .setRelation("hasContact")
- .setRelType("personPerson")
- .setSubReltype("coAuthorship"));
- relationInverseMap
- .put(
- "personPerson_coAuthorship_isCoauthorOf", new RelationInverse()
- .setInverse("isCoAuthorOf")
- .setRelation("isCoAuthorOf")
- .setRelType("personPerson")
- .setSubReltype("coAuthorship"));
- relationInverseMap
- .put(
- "personPerson_dedup_merges", new RelationInverse()
- .setInverse("isMergedIn")
- .setRelation("merges")
- .setRelType("personPerson")
- .setSubReltype("dedup"));
- relationInverseMap
- .put(
- "personPerson_dedup_isMergedIn", new RelationInverse()
- .setInverse("merges")
- .setRelation("isMergedIn")
- .setRelType("personPerson")
- .setSubReltype("dedup"));
- relationInverseMap
- .put(
- "personPerson_dedupSimilarity_isSimilarTo", new RelationInverse()
- .setInverse("isSimilarTo")
- .setRelation("isSimilarTo")
- .setRelType("personPerson")
- .setSubReltype("dedupSimilarity"));
- relationInverseMap
- .put(
- "datasourceOrganization_provision_isProvidedBy", new RelationInverse()
- .setInverse("provides")
- .setRelation("isProvidedBy")
- .setRelType("datasourceOrganization")
- .setSubReltype("provision"));
- relationInverseMap
- .put(
- "datasourceOrganization_provision_provides", new RelationInverse()
- .setInverse("isProvidedBy")
- .setRelation("provides")
- .setRelType("datasourceOrganization")
- .setSubReltype("provision"));
- relationInverseMap
- .put(
- "resultResult_similarity_hasAmongTopNSimilarDocuments", new RelationInverse()
- .setInverse("isAmongTopNSimilarDocuments")
- .setRelation("hasAmongTopNSimilarDocuments")
- .setRelType("resultResult")
- .setSubReltype("similarity"));
- relationInverseMap
- .put(
- "resultResult_similarity_isAmongTopNSimilarDocuments", new RelationInverse()
- .setInverse("hasAmongTopNSimilarDocuments")
- .setRelation("isAmongTopNSimilarDocuments")
- .setRelType("resultResult")
- .setSubReltype("similarity"));
- relationInverseMap
- .put(
- "resultResult_relationship_isRelatedTo", new RelationInverse()
- .setInverse("isRelatedTo")
- .setRelation("isRelatedTo")
- .setRelType("resultResult")
- .setSubReltype("relationship"));
- relationInverseMap
- .put(
- "resultResult_similarity_isAmongTopNSimilarDocuments", new RelationInverse()
- .setInverse("hasAmongTopNSimilarDocuments")
- .setRelation("isAmongTopNSimilarDocuments")
- .setRelType("resultResult")
- .setSubReltype("similarity"));
- relationInverseMap
- .put(
- "resultResult_supplement_isSupplementTo", new RelationInverse()
- .setInverse("isSupplementedBy")
- .setRelation("isSupplementTo")
- .setRelType("resultResult")
- .setSubReltype("supplement"));
- relationInverseMap
- .put(
- "resultResult_supplement_isSupplementedBy", new RelationInverse()
- .setInverse("isSupplementTo")
- .setRelation("isSupplementedBy")
- .setRelType("resultResult")
- .setSubReltype("supplement"));
- relationInverseMap
- .put(
- "resultResult_part_isPartOf", new RelationInverse()
- .setInverse("hasPart")
- .setRelation("isPartOf")
- .setRelType("resultResult")
- .setSubReltype("part"));
- relationInverseMap
- .put(
- "resultResult_part_hasPart", new RelationInverse()
- .setInverse("isPartOf")
- .setRelation("hasPart")
- .setRelType("resultResult")
- .setSubReltype("part"));
- relationInverseMap
- .put(
- "resultResult_dedup_merges", new RelationInverse()
- .setInverse("isMergedIn")
- .setRelation("merges")
- .setRelType("resultResult")
- .setSubReltype("dedup"));
- relationInverseMap
- .put(
- "resultResult_dedup_isMergedIn", new RelationInverse()
- .setInverse("merges")
- .setRelation("isMergedIn")
- .setRelType("resultResult")
- .setSubReltype("dedup"));
- relationInverseMap
- .put(
- "resultResult_dedupSimilarity_isSimilarTo", new RelationInverse()
- .setInverse("isSimilarTo")
- .setRelation("isSimilarTo")
- .setRelType("resultResult")
- .setSubReltype("dedupSimilarity"));
-
- }
-
private static final String schemeTemplate = "dnet:%s_%s_relations";
private ModelSupport() {
@@ -428,4 +202,5 @@ public class ModelSupport {
private static String idFnForOafEntity(T t) {
return ((OafEntity) t).getId();
}
+
}
diff --git a/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/oaf/Author.java b/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/oaf/Author.java
index e004e5800..231fb1e60 100644
--- a/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/oaf/Author.java
+++ b/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/oaf/Author.java
@@ -1,3 +1,4 @@
+
package eu.dnetlib.dhp.schema.oaf;
import java.io.Serializable;
@@ -5,92 +6,84 @@ import java.util.*;
public class Author implements Serializable {
- private String fullname;
+ private String fullname;
- private String name;
+ private String name;
- private String surname;
+ private String surname;
- private Integer rank;
+ private Integer rank;
- private List pid;
+ private List pid;
- private List> affiliation;
+ private List> affiliation;
- public String getFullname() {
- return fullname;
- }
+ public String getFullname() {
+ return fullname;
+ }
- public void setFullname(String fullname) {
- this.fullname = fullname;
- }
+ public void setFullname(String fullname) {
+ this.fullname = fullname;
+ }
- public String getName() {
- return name;
- }
+ public String getName() {
+ return name;
+ }
- public void setName(String name) {
- this.name = name;
- }
+ public void setName(String name) {
+ this.name = name;
+ }
- public String getSurname() {
- return surname;
- }
+ public String getSurname() {
+ return surname;
+ }
- public void setSurname(String surname) {
- this.surname = surname;
- }
+ public void setSurname(String surname) {
+ this.surname = surname;
+ }
- public Integer getRank() {
- return rank;
- }
+ public Integer getRank() {
+ return rank;
+ }
- public void setRank(Integer rank) {
- this.rank = rank;
- }
+ public void setRank(Integer rank) {
+ this.rank = rank;
+ }
- public List getPid() {
- return pid;
- }
+ public List getPid() {
+ return pid;
+ }
- public void setPid(List pid) {
- this.pid = pid;
- }
+ public void setPid(List pid) {
+ this.pid = pid;
+ }
- public List> getAffiliation() {
- return affiliation;
- }
+ public List> getAffiliation() {
+ return affiliation;
+ }
- public void setAffiliation(List> affiliation) {
- this.affiliation = affiliation;
- }
+ public void setAffiliation(List> affiliation) {
+ this.affiliation = affiliation;
+ }
- @Override
- public boolean equals(Object o) {
- if (this == o) return true;
- if (o == null || getClass() != o.getClass()) return false;
- Author author = (Author) o;
- return Objects.equals(fullname, author.fullname)
- && Objects.equals(name, author.name)
- && Objects.equals(surname, author.surname)
- && Objects.equals(rank, author.rank)
- && Objects.equals(pid, author.pid)
- && Objects.equals(affiliation, author.affiliation);
- }
+ @Override
+ public boolean equals(Object o) {
+ if (this == o)
+ return true;
+ if (o == null || getClass() != o.getClass())
+ return false;
+ Author author = (Author) o;
+ return Objects.equals(fullname, author.fullname)
+ && Objects.equals(name, author.name)
+ && Objects.equals(surname, author.surname)
+ && Objects.equals(rank, author.rank)
+ && Objects.equals(pid, author.pid)
+ && Objects.equals(affiliation, author.affiliation);
+ }
- @Override
- public int hashCode() {
- return Objects.hash(fullname, name, surname, rank, pid, affiliation);
- }
+ @Override
+ public int hashCode() {
+ return Objects.hash(fullname, name, surname, rank, pid, affiliation);
+ }
- public void addPid(StructuredProperty pid) {
-
- if (pid == null) return;
-
- if (this.pid == null) {
- this.pid = Arrays.asList(pid);
- } else {
- this.pid.add(pid);
- }
- }
}
diff --git a/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/oaf/Context.java b/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/oaf/Context.java
index 7d930630d..57912c463 100644
--- a/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/oaf/Context.java
+++ b/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/oaf/Context.java
@@ -1,42 +1,46 @@
+
package eu.dnetlib.dhp.schema.oaf;
import java.io.Serializable;
import java.util.List;
public class Context implements Serializable {
- private String id;
+ private String id;
- private List dataInfo;
+ private List dataInfo;
- public String getId() {
- return id;
- }
+ public String getId() {
+ return id;
+ }
- public void setId(String id) {
- this.id = id;
- }
+ public void setId(String id) {
+ this.id = id;
+ }
- public List getDataInfo() {
- return dataInfo;
- }
+ public List getDataInfo() {
+ return dataInfo;
+ }
- public void setDataInfo(List dataInfo) {
- this.dataInfo = dataInfo;
- }
+ public void setDataInfo(List dataInfo) {
+ this.dataInfo = dataInfo;
+ }
- @Override
- public int hashCode() {
- return id == null ? 0 : id.hashCode();
- }
+ @Override
+ public int hashCode() {
+ return id == null ? 0 : id.hashCode();
+ }
- @Override
- public boolean equals(Object obj) {
- if (this == obj) return true;
- if (obj == null) return false;
- if (getClass() != obj.getClass()) return false;
+ @Override
+ public boolean equals(Object obj) {
+ if (this == obj)
+ return true;
+ if (obj == null)
+ return false;
+ if (getClass() != obj.getClass())
+ return false;
- Context other = (Context) obj;
+ Context other = (Context) obj;
- return id.equals(other.getId());
- }
+ return id.equals(other.getId());
+ }
}
diff --git a/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/oaf/Country.java b/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/oaf/Country.java
index 388b9aab6..e25fdcade 100644
--- a/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/oaf/Country.java
+++ b/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/oaf/Country.java
@@ -1,30 +1,34 @@
+
package eu.dnetlib.dhp.schema.oaf;
import java.util.Objects;
public class Country extends Qualifier {
- private DataInfo dataInfo;
+ private DataInfo dataInfo;
- public DataInfo getDataInfo() {
- return dataInfo;
- }
+ public DataInfo getDataInfo() {
+ return dataInfo;
+ }
- public void setDataInfo(DataInfo dataInfo) {
- this.dataInfo = dataInfo;
- }
+ public void setDataInfo(DataInfo dataInfo) {
+ this.dataInfo = dataInfo;
+ }
- @Override
- public boolean equals(Object o) {
- if (this == o) return true;
- if (o == null || getClass() != o.getClass()) return false;
- if (!super.equals(o)) return false;
- Country country = (Country) o;
- return Objects.equals(dataInfo, country.dataInfo);
- }
+ @Override
+ public boolean equals(Object o) {
+ if (this == o)
+ return true;
+ if (o == null || getClass() != o.getClass())
+ return false;
+ if (!super.equals(o))
+ return false;
+ Country country = (Country) o;
+ return Objects.equals(dataInfo, country.dataInfo);
+ }
- @Override
- public int hashCode() {
- return Objects.hash(super.hashCode(), dataInfo);
- }
+ @Override
+ public int hashCode() {
+ return Objects.hash(super.hashCode(), dataInfo);
+ }
}
diff --git a/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/oaf/DataInfo.java b/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/oaf/DataInfo.java
index f65518a1f..cc77e1ea0 100644
--- a/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/oaf/DataInfo.java
+++ b/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/oaf/DataInfo.java
@@ -1,3 +1,4 @@
+
package eu.dnetlib.dhp.schema.oaf;
import java.io.Serializable;
@@ -5,77 +6,80 @@ import java.util.Objects;
public class DataInfo implements Serializable {
- private Boolean invisible = false;
- private Boolean inferred;
- private Boolean deletedbyinference;
- private String trust;
- private String inferenceprovenance;
- private Qualifier provenanceaction;
+ private Boolean invisible = false;
+ private Boolean inferred;
+ private Boolean deletedbyinference;
+ private String trust;
+ private String inferenceprovenance;
+ private Qualifier provenanceaction;
- public Boolean getInvisible() {
- return invisible;
- }
+ public Boolean getInvisible() {
+ return invisible;
+ }
- public void setInvisible(Boolean invisible) {
- this.invisible = invisible;
- }
+ public void setInvisible(Boolean invisible) {
+ this.invisible = invisible;
+ }
- public Boolean getInferred() {
- return inferred;
- }
+ public Boolean getInferred() {
+ return inferred;
+ }
- public void setInferred(Boolean inferred) {
- this.inferred = inferred;
- }
+ public void setInferred(Boolean inferred) {
+ this.inferred = inferred;
+ }
- public Boolean getDeletedbyinference() {
- return deletedbyinference;
- }
+ public Boolean getDeletedbyinference() {
+ return deletedbyinference;
+ }
- public void setDeletedbyinference(Boolean deletedbyinference) {
- this.deletedbyinference = deletedbyinference;
- }
+ public void setDeletedbyinference(Boolean deletedbyinference) {
+ this.deletedbyinference = deletedbyinference;
+ }
- public String getTrust() {
- return trust;
- }
+ public String getTrust() {
+ return trust;
+ }
- public void setTrust(String trust) {
- this.trust = trust;
- }
+ public void setTrust(String trust) {
+ this.trust = trust;
+ }
- public String getInferenceprovenance() {
- return inferenceprovenance;
- }
+ public String getInferenceprovenance() {
+ return inferenceprovenance;
+ }
- public void setInferenceprovenance(String inferenceprovenance) {
- this.inferenceprovenance = inferenceprovenance;
- }
+ public void setInferenceprovenance(String inferenceprovenance) {
+ this.inferenceprovenance = inferenceprovenance;
+ }
- public Qualifier getProvenanceaction() {
- return provenanceaction;
- }
+ public Qualifier getProvenanceaction() {
+ return provenanceaction;
+ }
- public void setProvenanceaction(Qualifier provenanceaction) {
- this.provenanceaction = provenanceaction;
- }
+ public void setProvenanceaction(Qualifier provenanceaction) {
+ this.provenanceaction = provenanceaction;
+ }
- @Override
- public boolean equals(Object o) {
- if (this == o) return true;
- if (o == null || getClass() != o.getClass()) return false;
- DataInfo dataInfo = (DataInfo) o;
- return Objects.equals(invisible, dataInfo.invisible)
- && Objects.equals(inferred, dataInfo.inferred)
- && Objects.equals(deletedbyinference, dataInfo.deletedbyinference)
- && Objects.equals(trust, dataInfo.trust)
- && Objects.equals(inferenceprovenance, dataInfo.inferenceprovenance)
- && Objects.equals(provenanceaction, dataInfo.provenanceaction);
- }
+ @Override
+ public boolean equals(Object o) {
+ if (this == o)
+ return true;
+ if (o == null || getClass() != o.getClass())
+ return false;
+ DataInfo dataInfo = (DataInfo) o;
+ return Objects.equals(invisible, dataInfo.invisible)
+ && Objects.equals(inferred, dataInfo.inferred)
+ && Objects.equals(deletedbyinference, dataInfo.deletedbyinference)
+ && Objects.equals(trust, dataInfo.trust)
+ && Objects.equals(inferenceprovenance, dataInfo.inferenceprovenance)
+ && Objects.equals(provenanceaction, dataInfo.provenanceaction);
+ }
- @Override
- public int hashCode() {
- return Objects.hash(
- invisible, inferred, deletedbyinference, trust, inferenceprovenance, provenanceaction);
- }
+ @Override
+ public int hashCode() {
+ return Objects
+ .hash(
+ invisible, inferred, deletedbyinference, trust, inferenceprovenance, provenanceaction);
+ }
}
diff --git a/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/oaf/Dataset.java b/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/oaf/Dataset.java
index 93b51f352..07ddbb00e 100644
--- a/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/oaf/Dataset.java
+++ b/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/oaf/Dataset.java
@@ -1,116 +1,115 @@
+
package eu.dnetlib.dhp.schema.oaf;
-import eu.dnetlib.dhp.schema.common.ModelConstants;
import java.io.Serializable;
import java.util.List;
+import eu.dnetlib.dhp.schema.common.ModelConstants;
+
public class Dataset extends Result implements Serializable {
- private Field storagedate;
+ private Field storagedate;
- private Field device;
+ private Field device;
- private Field size;
+ private Field size;
- private Field version;
+ private Field version;
- private Field lastmetadataupdate;
+ private Field lastmetadataupdate;
- private Field metadataversionnumber;
+ private Field metadataversionnumber;
- private List geolocation;
+ private List geolocation;
- public Dataset() {
- setResulttype(ModelConstants.DATASET_DEFAULT_RESULTTYPE);
- }
+ public Dataset() {
+ setResulttype(ModelConstants.DATASET_DEFAULT_RESULTTYPE);
+ }
- public Field getStoragedate() {
- return storagedate;
- }
+ public Field getStoragedate() {
+ return storagedate;
+ }
- public void setStoragedate(Field storagedate) {
- this.storagedate = storagedate;
- }
+ public void setStoragedate(Field storagedate) {
+ this.storagedate = storagedate;
+ }
- public Field getDevice() {
- return device;
- }
+ public Field getDevice() {
+ return device;
+ }
- public void setDevice(Field device) {
- this.device = device;
- }
+ public void setDevice(Field device) {
+ this.device = device;
+ }
- public Field getSize() {
- return size;
- }
+ public Field getSize() {
+ return size;
+ }
- public void setSize(Field size) {
- this.size = size;
- }
+ public void setSize(Field size) {
+ this.size = size;
+ }
- public Field getVersion() {
- return version;
- }
+ public Field getVersion() {
+ return version;
+ }
- public void setVersion(Field version) {
- this.version = version;
- }
+ public void setVersion(Field