This commit is contained in:
Miriam Baglioni 2023-02-13 12:40:14 +01:00
parent 508648e1d8
commit 125657ed4c
11 changed files with 148 additions and 133 deletions

View File

@ -5,7 +5,8 @@
<groupId>eu.dnetlib.dhp</groupId>
<artifactId>dhp-code-style</artifactId>
<version>1.2.5-SNAPSHOT</version>
<!-- <version>1.2.5-SNAPSHOT</version>-->
<version>2.0.0-SNAPSHOT</version>
<packaging>jar</packaging>

View File

@ -7,7 +7,8 @@ import eu.dnetlib.dhp.schema.oaf.Qualifier;
public class ModelConstants {
private ModelConstants() {}
private ModelConstants() {
}
public static final String ORCID = "orcid";
public static final String ORCID_PENDING = "orcid_pending";
@ -80,7 +81,6 @@ public class ModelConstants {
public static final String PROVENANCE_DEDUP = "sysimport:dedup";
public static final String PROVENANCE_ENRICH = "sysimport:enrich";
public static final Qualifier PROVENANCE_ACTION_SET_QUALIFIER = qualifier(
SYSIMPORT_ACTIONSET, SYSIMPORT_ACTIONSET, DNET_PROVENANCE_ACTIONS);
@ -127,8 +127,6 @@ public class ModelConstants {
public static final String IS_REQUIRED_BY = "IsRequiredBy";
public static final String REQUIRES = "Requires";
public static final String CITATION = "citation"; // subreltype
public static final String CITES = "Cites";
public static final String IS_CITED_BY = "IsCitedBy";

View File

@ -209,7 +209,8 @@ public class ModelSupport {
return idPrefixMap.get(clazz);
}
public static <X extends Oaf, Y extends Oaf, Z extends Oaf> Boolean sameClass(X left, Y right, Class<Z> superClazz) {
public static <X extends Oaf, Y extends Oaf, Z extends Oaf> Boolean sameClass(X left, Y right,
Class<Z> superClazz) {
return isSubClass(left, superClazz) && isSubClass(right, superClazz);
}

View File

@ -363,7 +363,8 @@ public class OafMapperUtils {
final Entity entity,
final String validationDate) {
final List<Provenance> provenance = getProvenance(entity.getCollectedfrom(), fromEntityDataInfo(entity.getDataInfo()));
final List<Provenance> provenance = getProvenance(
entity.getCollectedfrom(), fromEntityDataInfo(entity.getDataInfo()));
return getRelation(
source, target, relType, subRelType, relClass, provenance, validationDate, null);
}

View File

@ -1,8 +1,13 @@
package eu.dnetlib.dhp.common.vocabulary;
import eu.dnetlib.dhp.schema.oaf.Qualifier;
import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpException;
import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpService;
import static org.mockito.Mockito.lenient;
import java.io.IOException;
import java.util.Collections;
import java.util.List;
import java.util.Objects;
import org.apache.commons.io.IOUtils;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.BeforeAll;
@ -12,18 +17,13 @@ import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.Mock;
import org.mockito.junit.jupiter.MockitoExtension;
import java.io.IOException;
import java.util.Collections;
import java.util.List;
import java.util.Objects;
import static org.mockito.Mockito.lenient;
import eu.dnetlib.dhp.schema.oaf.Qualifier;
import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpException;
import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpService;
@ExtendWith(MockitoExtension.class)
public class VocabularyTest {
@Mock
protected ISLookUpService isLookUpService;
@ -56,7 +56,6 @@ public class VocabularyTest {
VocabularyTest.class.getResourceAsStream("/eu/dnetlib/dhp/transform/synonyms.txt")));
}
@Test
void testVocabularyMatch() throws Exception {
final String s = IOUtils.toString(this.getClass().getResourceAsStream("terms"));
@ -67,18 +66,14 @@ public class VocabularyTest {
if (t1 == null) {
System.err.println(s1 + " Missing");
}
else {
} else {
System.out.println("syn=" + s1 + " term = " + t1.getClassid());
System.out.println(vocabularies.getSynonymAsQualifier("dnet:result_typologies", t1.getClassid()).getClassname());
System.out
.println(
vocabularies.getSynonymAsQualifier("dnet:result_typologies", t1.getClassid()).getClassname());
}
}
}
}

View File

@ -13,8 +13,8 @@ import org.json4s.jackson.JsonMethods.parse
import scala.collection.JavaConverters._
case class CrossrefDT(doi: String, json: String, timestamp: Long) {}
object CrossrefUtility {
val DOI_PREFIX_REGEX = "(^10\\.|\\/10.)"
val DOI_PREFIX = "10."
@ -37,7 +37,6 @@ object CrossrefUtility {
ret
}
def extractDate(dt: String, datePart: List[List[Int]]): String = {
if (StringUtils.isNotBlank(dt))
return GraphCleaningFunctions.cleanDate(dt)
@ -72,11 +71,11 @@ object CrossrefUtility {
null
}
private def generateItemFromType(objectType: String, vocabularies: VocabularyGroup): (Result, String) = {
val term = vocabularies.getSynonymAsQualifier(ModelConstants.DNET_PUBLICATION_RESOURCE, objectType)
if (term != null) {
val resourceType = vocabularies.getSynonymAsQualifier(ModelConstants.DNET_RESULT_TYPOLOGIES, term.getClassid).getClassname
val resourceType =
vocabularies.getSynonymAsQualifier(ModelConstants.DNET_RESULT_TYPOLOGIES, term.getClassid).getClassname
resourceType match {
case "publication" => (new Publication, resourceType)
@ -88,7 +87,6 @@ object CrossrefUtility {
null
}
def convert(input: String, vocabularies: VocabularyGroup): List[Oaf] = {
implicit lazy val formats: DefaultFormats.type = org.json4s.DefaultFormats
lazy val json: json4s.JValue = parse(input)
@ -131,7 +129,6 @@ object CrossrefUtility {
resultList
}
def mappingResult(result: Result, json: JValue, cobjCategory: String): Result = {
implicit lazy val formats: DefaultFormats.type = org.json4s.DefaultFormats
@ -141,7 +138,8 @@ object CrossrefUtility {
result.setPid(
List(
structuredProperty(doi, PidType.doi.toString, PidType.doi.toString, ModelConstants.DNET_PID_TYPES)
).asJava)
).asJava
)
//MAPPING Crossref DOI into OriginalId
//and Other Original Identifier of dataset like clinical-trial-number
@ -149,11 +147,10 @@ object CrossrefUtility {
val alternativeIds: List[String] = for (JString(ids) <- json \ "alternative-id") yield ids
val tmp = clinicalTrialNumbers ::: alternativeIds ::: List(doi)
result.setOriginalId(tmp.filter(id => id != null).asJava)
// Add DataInfo
result.setDataInfo(dataInfo(false, false,0.9F,null, false,ModelConstants.REPOSITORY_PROVENANCE_ACTIONS))
result.setDataInfo(dataInfo(false, false, 0.9f, null, false, ModelConstants.REPOSITORY_PROVENANCE_ACTIONS))
result.setLastupdatetimestamp((json \ "indexed" \ "timestamp").extract[Long])
result.setDateofcollection((json \ "indexed" \ "date-time").extract[String])
@ -167,9 +164,10 @@ object CrossrefUtility {
// TITLE
val mainTitles =
for {JString(title) <- json \ "title" if title.nonEmpty}
yield
structuredProperty(title, ModelConstants.MAIN_TITLE_QUALIFIER)
for { JString(title) <- json \ "title" if title.nonEmpty } yield structuredProperty(
title,
ModelConstants.MAIN_TITLE_QUALIFIER
)
val originalTitles = for {
JString(title) <- json \ "original-title" if title.nonEmpty
} yield structuredProperty(title, ModelConstants.ALTERNATIVE_TITLE_QUALIFIER)
@ -177,8 +175,10 @@ object CrossrefUtility {
JString(title) <- json \ "short-title" if title.nonEmpty
} yield structuredProperty(title, ModelConstants.ALTERNATIVE_TITLE_QUALIFIER)
val subtitles =
for {JString(title) <- json \ "subtitle" if title.nonEmpty}
yield structuredProperty(title, ModelConstants.SUBTITLE_QUALIFIER)
for { JString(title) <- json \ "subtitle" if title.nonEmpty } yield structuredProperty(
title,
ModelConstants.SUBTITLE_QUALIFIER
)
result.setTitle((mainTitles ::: originalTitles ::: shortTitles ::: subtitles).asJava)
// DESCRIPTION
@ -242,8 +242,6 @@ object CrossrefUtility {
//Mapping Subject
val subjectList: List[String] = (json \ "subject").extractOrElse[List[String]](List())
if (subjectList.nonEmpty) {
result.setSubject(
subjectList.map(s => createSubject(s, "keyword", ModelConstants.DNET_SUBJECT_TYPOLOGIES)).asJava

View File

@ -5,13 +5,13 @@ import org.slf4j.{Logger, LoggerFactory}
class GenerateCrossrefDataset(propertyPath: String, args: Array[String], log: Logger)
extends AbstractScalaApplication(propertyPath, args, log: Logger) {
/** Here all the spark applications runs this method
* where the whole logic of the spark node is defined
*/
override def run(): Unit = ???
}
object GenerateCrossrefDataset {
val log: Logger = LoggerFactory.getLogger(getClass)
val propertyPath = "/eu/dnetlib/dhp/doiboost/crossref_dump_reader/generate_dataset_params.json"

View File

@ -360,10 +360,13 @@ object BioDBToOAF {
val rel = new Relation
val provenance = OafMapperUtils.getProvenance(Lists.newArrayList(
val provenance = OafMapperUtils.getProvenance(
Lists.newArrayList(
collectedFrom,
collectedFromMap("pdb")
), REL_DATA_INFO)
),
REL_DATA_INFO
)
rel.setProvenance(provenance)

View File

@ -0,0 +1,18 @@
[INFO] Scanning for projects...
[INFO]
[INFO] -------------------< eu.dnetlib.dhp:dhp-enrichment >--------------------
[INFO] Building dhp-enrichment 2.0.0-SNAPSHOT
[INFO] --------------------------------[ jar ]---------------------------------
[INFO] ------------------------------------------------------------------------
[INFO] BUILD FAILURE
[INFO] ------------------------------------------------------------------------
[INFO] Total time: 1.737 s
[INFO] Finished at: 2023-02-10T17:53:31+01:00
[INFO] ------------------------------------------------------------------------
[ERROR] Failed to execute goal on project dhp-enrichment: Could not resolve dependencies for project eu.dnetlib.dhp:dhp-enrichment:jar:2.0.0-SNAPSHOT: Failed to collect dependencies at eu.dnetlib.dhp:dhp-common:jar:2.0.0-SNAPSHOT: Failed to read artifact descriptor for eu.dnetlib.dhp:dhp-common:jar:2.0.0-SNAPSHOT: Failure to find eu.dnetlib.dhp:dhp:pom:2.0.0-SNAPSHOT in https://maven.d4science.org/nexus/content/repositories/dnet45-bootstrap-snapshot/ was cached in the local repository, resolution will not be reattempted until the update interval of dnet45-bootstrap-snapshot has elapsed or updates are forced -> [Help 1]
[ERROR]
[ERROR] To see the full stack trace of the errors, re-run Maven with the -e switch.
[ERROR] Re-run Maven using the -X switch to enable full debug logging.
[ERROR]
[ERROR] For more information about the errors and possible solutions, please read the following articles:
[ERROR] [Help 1] http://cwiki.apache.org/confluence/display/MAVEN/DependencyResolutionException