1
0
Fork 0

Merge branch 'master' of code-repo.d4science.org:D-Net/dnet-hadoop

This commit is contained in:
Sandro La Bruzzo 2020-08-06 10:28:31 +02:00
commit 4fb1821fab
15 changed files with 528 additions and 13 deletions

View File

@ -24,7 +24,9 @@ public abstract class AbstractMdRecordToOafMapper {
private final boolean invisible; private final boolean invisible;
protected static final String DATACITE_SCHEMA_KERNEL_4 = "http://datacite.org/schema/kernel-4"; protected static final String DATACITE_SCHEMA_KERNEL_4 = "http://datacite.org/schema/kernel-4";
protected static final String DATACITE_SCHEMA_KERNEL_4_SLASH = "http://datacite.org/schema/kernel-4/";
protected static final String DATACITE_SCHEMA_KERNEL_3 = "http://datacite.org/schema/kernel-3"; protected static final String DATACITE_SCHEMA_KERNEL_3 = "http://datacite.org/schema/kernel-3";
protected static final String DATACITE_SCHEMA_KERNEL_3_SLASH = "http://datacite.org/schema/kernel-3/";
protected static final Qualifier ORCID_PID_TYPE = qualifier( protected static final Qualifier ORCID_PID_TYPE = qualifier(
"ORCID", "Open Researcher and Contributor ID", DNET_PID_TYPES, DNET_PID_TYPES); "ORCID", "Open Researcher and Contributor ID", DNET_PID_TYPES, DNET_PID_TYPES);
protected static final Qualifier MAG_PID_TYPE = qualifier( protected static final Qualifier MAG_PID_TYPE = qualifier(
@ -55,7 +57,11 @@ public abstract class AbstractMdRecordToOafMapper {
DocumentFactory.getInstance().setXPathNamespaceURIs(nsContext); DocumentFactory.getInstance().setXPathNamespaceURIs(nsContext);
final Document doc = DocumentHelper final Document doc = DocumentHelper
.parseText(xml.replaceAll(DATACITE_SCHEMA_KERNEL_4, DATACITE_SCHEMA_KERNEL_3)); .parseText(
xml
.replaceAll(DATACITE_SCHEMA_KERNEL_4, DATACITE_SCHEMA_KERNEL_3)
.replaceAll(DATACITE_SCHEMA_KERNEL_4_SLASH, DATACITE_SCHEMA_KERNEL_3)
.replaceAll(DATACITE_SCHEMA_KERNEL_3_SLASH, DATACITE_SCHEMA_KERNEL_3));
final KeyValue collectedFrom = getProvenanceDatasource( final KeyValue collectedFrom = getProvenanceDatasource(
doc, "//oaf:collectedFrom/@id", "//oaf:collectedFrom/@name"); doc, "//oaf:collectedFrom/@id", "//oaf:collectedFrom/@name");

View File

@ -44,6 +44,7 @@ import java.util.Date;
import java.util.List; import java.util.List;
import java.util.function.Consumer; import java.util.function.Consumer;
import java.util.function.Function; import java.util.function.Function;
import java.util.function.Predicate;
import org.apache.commons.io.IOUtils; import org.apache.commons.io.IOUtils;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
@ -53,6 +54,7 @@ import org.slf4j.LoggerFactory;
import eu.dnetlib.dhp.application.ArgumentApplicationParser; import eu.dnetlib.dhp.application.ArgumentApplicationParser;
import eu.dnetlib.dhp.common.DbClient; import eu.dnetlib.dhp.common.DbClient;
import eu.dnetlib.dhp.oa.graph.raw.common.AbstractMigrationApplication; import eu.dnetlib.dhp.oa.graph.raw.common.AbstractMigrationApplication;
import eu.dnetlib.dhp.oa.graph.raw.common.VerifyNsPrefixPredicate;
import eu.dnetlib.dhp.oa.graph.raw.common.VocabularyGroup; import eu.dnetlib.dhp.oa.graph.raw.common.VocabularyGroup;
import eu.dnetlib.dhp.schema.oaf.Context; import eu.dnetlib.dhp.schema.oaf.Context;
import eu.dnetlib.dhp.schema.oaf.DataInfo; import eu.dnetlib.dhp.schema.oaf.DataInfo;
@ -113,6 +115,11 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationApplication i
final String hdfsPath = parser.get("hdfsPath"); final String hdfsPath = parser.get("hdfsPath");
log.info("hdfsPath: {}", hdfsPath); log.info("hdfsPath: {}", hdfsPath);
final String nsPrefixBlacklist = parser.get("nsPrefixBlacklist");
log.info("nsPrefixBlacklist: {}", nsPrefixBlacklist);
final Predicate<Oaf> verifyNamespacePrefix = new VerifyNsPrefixPredicate(nsPrefixBlacklist);
final boolean processClaims = parser.get("action") != null && parser.get("action").equalsIgnoreCase("claims"); final boolean processClaims = parser.get("action") != null && parser.get("action").equalsIgnoreCase("claims");
log.info("processClaims: {}", processClaims); log.info("processClaims: {}", processClaims);
@ -123,23 +130,25 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationApplication i
smdbe.execute("queryClaims.sql", smdbe::processClaims); smdbe.execute("queryClaims.sql", smdbe::processClaims);
} else { } else {
log.info("Processing datasources..."); log.info("Processing datasources...");
smdbe.execute("queryDatasources.sql", smdbe::processDatasource); smdbe.execute("queryDatasources.sql", smdbe::processDatasource, verifyNamespacePrefix);
log.info("Processing projects..."); log.info("Processing projects...");
if (dbSchema.equalsIgnoreCase("beta")) { if (dbSchema.equalsIgnoreCase("beta")) {
smdbe.execute("queryProjects.sql", smdbe::processProject); smdbe.execute("queryProjects.sql", smdbe::processProject, verifyNamespacePrefix);
} else { } else {
smdbe.execute("queryProjects_production.sql", smdbe::processProject); smdbe.execute("queryProjects_production.sql", smdbe::processProject, verifyNamespacePrefix);
} }
log.info("Processing orgs..."); log.info("Processing orgs...");
smdbe.execute("queryOrganizations.sql", smdbe::processOrganization); smdbe.execute("queryOrganizations.sql", smdbe::processOrganization, verifyNamespacePrefix);
log.info("Processing relationsNoRemoval ds <-> orgs ..."); log.info("Processing relationsNoRemoval ds <-> orgs ...");
smdbe.execute("queryDatasourceOrganization.sql", smdbe::processDatasourceOrganization); smdbe
.execute(
"queryDatasourceOrganization.sql", smdbe::processDatasourceOrganization, verifyNamespacePrefix);
log.info("Processing projects <-> orgs ..."); log.info("Processing projects <-> orgs ...");
smdbe.execute("queryProjectOrganization.sql", smdbe::processProjectOrganization); smdbe.execute("queryProjectOrganization.sql", smdbe::processProjectOrganization, verifyNamespacePrefix);
} }
log.info("All done."); log.info("All done.");
} }
@ -163,10 +172,20 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationApplication i
} }
public void execute(final String sqlFile, final Function<ResultSet, List<Oaf>> producer) public void execute(final String sqlFile, final Function<ResultSet, List<Oaf>> producer)
throws Exception {
execute(sqlFile, producer, oaf -> true);
}
public void execute(final String sqlFile, final Function<ResultSet, List<Oaf>> producer,
final Predicate<Oaf> predicate)
throws Exception { throws Exception {
final String sql = IOUtils.toString(getClass().getResourceAsStream("/eu/dnetlib/dhp/oa/graph/sql/" + sqlFile)); final String sql = IOUtils.toString(getClass().getResourceAsStream("/eu/dnetlib/dhp/oa/graph/sql/" + sqlFile));
final Consumer<ResultSet> consumer = rs -> producer.apply(rs).forEach(oaf -> emitOaf(oaf)); final Consumer<ResultSet> consumer = rs -> producer.apply(rs).forEach(oaf -> {
if (predicate.test(oaf)) {
emitOaf(oaf);
}
});
dbClient.processResults(sql, consumer); dbClient.processResults(sql, consumer);
} }

View File

@ -16,6 +16,8 @@ import org.apache.commons.lang3.StringUtils;
import org.dom4j.Document; import org.dom4j.Document;
import org.dom4j.Node; import org.dom4j.Node;
import com.google.common.collect.Lists;
import eu.dnetlib.dhp.common.PacePerson; import eu.dnetlib.dhp.common.PacePerson;
import eu.dnetlib.dhp.oa.graph.raw.common.VocabularyGroup; import eu.dnetlib.dhp.oa.graph.raw.common.VocabularyGroup;
import eu.dnetlib.dhp.schema.oaf.Author; import eu.dnetlib.dhp.schema.oaf.Author;
@ -366,7 +368,7 @@ public class OdfToOafMapper extends AbstractMdRecordToOafMapper {
@Override @Override
protected List<StructuredProperty> prepareResultPids(final Document doc, final DataInfo info) { protected List<StructuredProperty> prepareResultPids(final Document doc, final DataInfo info) {
final List<StructuredProperty> res = new ArrayList<>(); final Set<StructuredProperty> res = new HashSet();
res res
.addAll( .addAll(
prepareListStructPropsWithValidQualifier( prepareListStructPropsWithValidQualifier(
@ -382,7 +384,7 @@ public class OdfToOafMapper extends AbstractMdRecordToOafMapper {
doc, doc,
"//datacite:alternateIdentifier[@alternateIdentifierType != 'URL' and @alternateIdentifierType != 'landingPage']", "//datacite:alternateIdentifier[@alternateIdentifierType != 'URL' and @alternateIdentifierType != 'landingPage']",
"@alternateIdentifierType", DNET_PID_TYPES, info)); "@alternateIdentifierType", DNET_PID_TYPES, info));
return res; return Lists.newArrayList(res);
} }
} }

View File

@ -0,0 +1,62 @@
package eu.dnetlib.dhp.oa.graph.raw.common;
import java.util.HashSet;
import java.util.Set;
import java.util.function.Predicate;
import java.util.regex.Pattern;
import org.apache.commons.lang3.StringUtils;
import com.google.common.base.Splitter;
import eu.dnetlib.dhp.schema.oaf.Datasource;
import eu.dnetlib.dhp.schema.oaf.Oaf;
import eu.dnetlib.dhp.schema.oaf.OafEntity;
import eu.dnetlib.dhp.schema.oaf.Relation;
/**
* This predicate should be used to skip oaf objects using a blacklist of nsprefixes.
*
* @author michele
*/
public class VerifyNsPrefixPredicate implements Predicate<Oaf> {
final Set<String> invalids = new HashSet<>();
public VerifyNsPrefixPredicate(final String blacklist) {
if (StringUtils.isNotBlank(blacklist)) {
Splitter
.on(",")
.trimResults()
.omitEmptyStrings()
.split(blacklist)
.forEach(invalids::add);
}
}
@Override
public boolean test(final Oaf oaf) {
if (oaf instanceof Datasource) {
return testValue(((Datasource) oaf).getNamespaceprefix().getValue());
} else if (oaf instanceof OafEntity) {
return testValue(((OafEntity) oaf).getId());
} else if (oaf instanceof Relation) {
return testValue(((Relation) oaf).getSource()) && testValue(((Relation) oaf).getTarget());
} else {
return true;
}
}
protected boolean testValue(final String s) {
if (StringUtils.isNotBlank(s)) {
for (final String invalid : invalids) {
if (Pattern.matches("^(\\d\\d\\|)?" + invalid + ".*$", s)) {
return false;
}
}
}
return true;
}
}

View File

@ -40,5 +40,11 @@
"paramLongName": "dbschema", "paramLongName": "dbschema",
"paramDescription": "the database schema according to the D-Net infrastructure (beta or production)", "paramDescription": "the database schema according to the D-Net infrastructure (beta or production)",
"paramRequired": true "paramRequired": true
},
{
"paramName": "nsbl",
"paramLongName": "nsPrefixBlacklist",
"paramDescription": "a blacklist of nsprefixes (comma separeted)",
"paramRequired": false
} }
] ]

View File

@ -43,7 +43,11 @@
<name>isLookupUrl</name> <name>isLookupUrl</name>
<description>the address of the lookUp service</description> <description>the address of the lookUp service</description>
</property> </property>
<property>
<name>nsPrefixBlacklist</name>
<value></value>
<description>a blacklist of nsprefixes (comma separeted)</description>
</property>
<property> <property>
<name>sparkDriverMemory</name> <name>sparkDriverMemory</name>
<description>memory for driver process</description> <description>memory for driver process</description>
@ -131,6 +135,7 @@
<arg>--isLookupUrl</arg><arg>${isLookupUrl}</arg> <arg>--isLookupUrl</arg><arg>${isLookupUrl}</arg>
<arg>--action</arg><arg>claims</arg> <arg>--action</arg><arg>claims</arg>
<arg>--dbschema</arg><arg>${dbSchema}</arg> <arg>--dbschema</arg><arg>${dbSchema}</arg>
<arg>--nsPrefixBlacklist</arg><arg>${nsPrefixBlacklist}</arg>
</java> </java>
<ok to="ImportODF_claims"/> <ok to="ImportODF_claims"/>
<error to="Kill"/> <error to="Kill"/>
@ -182,6 +187,7 @@
<arg>--postgresPassword</arg><arg>${postgresPassword}</arg> <arg>--postgresPassword</arg><arg>${postgresPassword}</arg>
<arg>--isLookupUrl</arg><arg>${isLookupUrl}</arg> <arg>--isLookupUrl</arg><arg>${isLookupUrl}</arg>
<arg>--dbschema</arg><arg>${dbSchema}</arg> <arg>--dbschema</arg><arg>${dbSchema}</arg>
<arg>--nsPrefixBlacklist</arg><arg>${nsPrefixBlacklist}</arg>
</java> </java>
<ok to="ImportODF"/> <ok to="ImportODF"/>
<error to="Kill"/> <error to="Kill"/>

View File

@ -38,7 +38,11 @@
<name>isLookupUrl</name> <name>isLookupUrl</name>
<description>the address of the lookUp service</description> <description>the address of the lookUp service</description>
</property> </property>
<property>
<name>nsPrefixBlacklist</name>
<value></value>
<description>a blacklist of nsprefixes (comma separeted)</description>
</property>
<property> <property>
<name>sparkDriverMemory</name> <name>sparkDriverMemory</name>
<description>memory for driver process</description> <description>memory for driver process</description>
@ -113,6 +117,7 @@
<arg>--isLookupUrl</arg><arg>${isLookupUrl}</arg> <arg>--isLookupUrl</arg><arg>${isLookupUrl}</arg>
<arg>--action</arg><arg>claims</arg> <arg>--action</arg><arg>claims</arg>
<arg>--dbschema</arg><arg>${dbSchema}</arg> <arg>--dbschema</arg><arg>${dbSchema}</arg>
<arg>--nsPrefixBlacklist</arg><arg>${nsPrefixBlacklist}</arg>
</java> </java>
<ok to="ImportODF_claims"/> <ok to="ImportODF_claims"/>
<error to="Kill"/> <error to="Kill"/>

View File

@ -25,7 +25,11 @@
<name>isLookupUrl</name> <name>isLookupUrl</name>
<description>the address of the lookUp service</description> <description>the address of the lookUp service</description>
</property> </property>
<property>
<name>nsPrefixBlacklist</name>
<value></value>
<description>a blacklist of nsprefixes (comma separeted)</description>
</property>
<property> <property>
<name>sparkDriverMemory</name> <name>sparkDriverMemory</name>
<description>memory for driver process</description> <description>memory for driver process</description>
@ -99,6 +103,7 @@
<arg>--postgresPassword</arg><arg>${postgresPassword}</arg> <arg>--postgresPassword</arg><arg>${postgresPassword}</arg>
<arg>--isLookupUrl</arg><arg>${isLookupUrl}</arg> <arg>--isLookupUrl</arg><arg>${isLookupUrl}</arg>
<arg>--dbschema</arg><arg>${dbSchema}</arg> <arg>--dbschema</arg><arg>${dbSchema}</arg>
<arg>--nsPrefixBlacklist</arg><arg>${nsPrefixBlacklist}</arg>
</java> </java>
<ok to="ImportDB_claims"/> <ok to="ImportDB_claims"/>
<error to="Kill"/> <error to="Kill"/>
@ -117,6 +122,7 @@
<arg>--isLookupUrl</arg><arg>${isLookupUrl}</arg> <arg>--isLookupUrl</arg><arg>${isLookupUrl}</arg>
<arg>--dbschema</arg><arg>${dbSchema}</arg> <arg>--dbschema</arg><arg>${dbSchema}</arg>
<arg>--action</arg><arg>claims</arg> <arg>--action</arg><arg>claims</arg>
<arg>--nsPrefixBlacklist</arg><arg>${nsPrefixBlacklist}</arg>
</java> </java>
<ok to="End"/> <ok to="End"/>
<error to="Kill"/> <error to="Kill"/>

View File

@ -28,6 +28,11 @@
<name>isLookupUrl</name> <name>isLookupUrl</name>
<description>the address of the lookUp service</description> <description>the address of the lookUp service</description>
</property> </property>
<property>
<name>nsPrefixBlacklist</name>
<value></value>
<description>a blacklist of nsprefixes (comma separeted)</description>
</property>
<property> <property>
<name>sparkDriverMemory</name> <name>sparkDriverMemory</name>
<description>memory for driver process</description> <description>memory for driver process</description>
@ -67,6 +72,7 @@
<arg>-pguser</arg><arg>${postgresUser}</arg> <arg>-pguser</arg><arg>${postgresUser}</arg>
<arg>-pgpasswd</arg><arg>${postgresPassword}</arg> <arg>-pgpasswd</arg><arg>${postgresPassword}</arg>
<arg>-islookup</arg><arg>${isLookupUrl}</arg> <arg>-islookup</arg><arg>${isLookupUrl}</arg>
<arg>--nsPrefixBlacklist</arg><arg>${nsPrefixBlacklist}</arg>
</java> </java>
<ok to="ImportODF"/> <ok to="ImportODF"/>
<error to="Kill"/> <error to="Kill"/>

View File

@ -276,6 +276,39 @@ public class MappersTest {
System.out.println("***************"); System.out.println("***************");
} }
@Test
void testClaimDedup() throws IOException {
final String xml = IOUtils.toString(getClass().getResourceAsStream("oaf_claim_dedup.xml"));
final List<Oaf> list = new OafToOafMapper(vocs, false).processMdRecord(xml);
System.out.println("***************");
System.out.println(new ObjectMapper().writeValueAsString(list));
System.out.println("***************");
}
@Test
void testNakala() throws IOException {
final String xml = IOUtils.toString(getClass().getResourceAsStream("odf_nakala.xml"));
final List<Oaf> list = new OdfToOafMapper(vocs, false).processMdRecord(xml);
System.out.println("***************");
System.out.println(new ObjectMapper().writeValueAsString(list));
System.out.println("***************");
assertEquals(1, list.size());
assertTrue(list.get(0) instanceof Dataset);
final Dataset d = (Dataset) list.get(0);
assertValidId(d.getId());
assertValidId(d.getCollectedfrom().get(0).getKey());
assertTrue(StringUtils.isNotBlank(d.getTitle().get(0).getValue()));
assertEquals(1, d.getAuthor().size());
assertEquals(0, d.getSubject().size());
assertEquals(1, d.getInstance().size());
assertEquals(1, d.getPid().size());
}
private void assertValidId(final String id) { private void assertValidId(final String id) {
assertEquals(49, id.length()); assertEquals(49, id.length());
assertEquals('|', id.charAt(2)); assertEquals('|', id.charAt(2));

View File

@ -0,0 +1,92 @@
package eu.dnetlib.dhp.oa.graph.raw.common;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertTrue;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import eu.dnetlib.dhp.schema.oaf.Datasource;
import eu.dnetlib.dhp.schema.oaf.Field;
import eu.dnetlib.dhp.schema.oaf.Project;
import eu.dnetlib.dhp.schema.oaf.Relation;
class VerifyNsPrefixPredicateTest {
private VerifyNsPrefixPredicate predicate;
@BeforeEach
void setUp() throws Exception {
predicate = new VerifyNsPrefixPredicate("corda,nsf,wt");
}
@Test
void testTestValue() {
assertFalse(predicate.testValue("corda__2020"));
assertFalse(predicate.testValue("nsf________"));
assertFalse(predicate.testValue("nsf"));
assertFalse(predicate.testValue("corda"));
assertFalse(predicate.testValue("10|corda_______::fjkdsfjksdhfksj"));
assertFalse(predicate.testValue("20|corda_______::fjkdsfjksdhfksj"));
assertTrue(predicate.testValue("xxxxxx_____"));
assertTrue(predicate.testValue("10|xxxxxx_____::sdasdasaddasad"));
assertTrue(predicate.testValue(null));
assertTrue(predicate.testValue(""));
}
@Test
void testTest_ds_true() {
final Field<String> prefix = new Field<>();
prefix.setValue("xxxxxx______");
final Datasource ds = new Datasource();
ds.setNamespaceprefix(prefix);
assertTrue(predicate.test(ds));
}
@Test
void testTest_ds_false() {
final Field<String> prefix = new Field<>();
prefix.setValue("corda__2020");
final Datasource ds = new Datasource();
ds.setNamespaceprefix(prefix);
assertFalse(predicate.test(ds));
}
@Test
void testTest_rel_true() {
final Relation rel = new Relation();
rel.setSource("10|yyyyyy______:sdfsfsffsdfs");
rel.setTarget("10|xxxxxx______:sdfsfsffsdfs");
assertTrue(predicate.test(rel));
}
@Test
void testTest_rel_false() {
final Relation rel = new Relation();
rel.setSource("10|corda_______:sdfsfsffsdfs");
rel.setTarget("10|xxxxxx______:sdfsfsffsdfs");
assertFalse(predicate.test(rel));
}
@Test
void testTest_proj_true() {
final Project p = new Project();
p.setId("10|xxxxxx______:sdfsfsffsdfs");
assertTrue(predicate.test(p));
}
@Test
void testTest_proj_false() {
final Project p = new Project();
p.setId("10|corda_____:sdfsfsffsdfs");
assertFalse(predicate.test(p));
}
}

View File

@ -951,6 +951,7 @@ dnet:countries @=@ ZW @=@ ABW
dnet:protocols @=@ oai @=@ OAI-PMH dnet:protocols @=@ oai @=@ OAI-PMH
dnet:protocols @=@ oai @=@ OAI_PMH dnet:protocols @=@ oai @=@ OAI_PMH
dnet:pid_types @=@ orcid @=@ ORCID12 dnet:pid_types @=@ orcid @=@ ORCID12
dnet:pid_types @=@ handle @=@ hdl
dnet:review_levels @=@ 0000 @=@ UNKNOWN dnet:review_levels @=@ 0000 @=@ UNKNOWN
dnet:review_levels @=@ 0002 @=@ 80 大阪経大学会「Working Paper」 dnet:review_levels @=@ 0002 @=@ 80 大阪経大学会「Working Paper」
dnet:review_levels @=@ 0002 @=@ AO dnet:review_levels @=@ 0002 @=@ AO

View File

@ -1045,6 +1045,7 @@ dnet:pid_types @=@ dnet:pid_types @=@ pmid @=@ pmid
dnet:pid_types @=@ dnet:pid_types @=@ urn @=@ urn dnet:pid_types @=@ dnet:pid_types @=@ urn @=@ urn
dnet:pid_types @=@ dnet:pid_types @=@ who @=@ WHO Identifier dnet:pid_types @=@ dnet:pid_types @=@ who @=@ WHO Identifier
dnet:pid_types @=@ dnet:pid_types @=@ drks @=@ DRKS Identifier dnet:pid_types @=@ dnet:pid_types @=@ drks @=@ DRKS Identifier
dnet:pid_types @=@ dnet:pid_types @=@ handle @=@ Handle
dnet:topic_types @=@ dnet:topic_types @=@ ENRICH/MISSING/SUBJECT/ACM @=@ An ACM classification term that can be associated to your publications dnet:topic_types @=@ dnet:topic_types @=@ ENRICH/MISSING/SUBJECT/ACM @=@ An ACM classification term that can be associated to your publications
dnet:topic_types @=@ dnet:topic_types @=@ ENRICH/MISSING/SUBJECT/ARXIV @=@ An ARXIV classification term that can be associated to your publications dnet:topic_types @=@ dnet:topic_types @=@ ENRICH/MISSING/SUBJECT/ARXIV @=@ An ARXIV classification term that can be associated to your publications
dnet:topic_types @=@ dnet:topic_types @=@ ENRICH/MISSING/SUBJECT/DDC @=@ A Dewey Decimal classification term (DDC) that can be associated to your publications dnet:topic_types @=@ dnet:topic_types @=@ ENRICH/MISSING/SUBJECT/DDC @=@ A Dewey Decimal classification term (DDC) that can be associated to your publications

View File

@ -0,0 +1,182 @@
<oai:record xmlns:oaf="http://namespace.openaire.eu/oaf" xmlns:dc="http://purl.org/dc/elements/1.1/"
xmlns:oai_dc="http://www.openarchives.org/OAI/2.0/oai_dc/"
xmlns:oai="http://www.openarchives.org/OAI/2.0/"
xmlns:dri="http://www.driver-repository.eu/namespace/dri"
xmlns:dr="http://www.driver-repository.eu/namespace/dr">
<oai:header>
<dri:objIdentifier>dedup_wf_001::534276867e917fe9efe0cca10e363457</dri:objIdentifier>
<dri:dateOfCollection>2020-08-02T22:55:40.866Z</dri:dateOfCollection>
<oaf:datasourceprefix>openaire____</oaf:datasourceprefix>
<dr:dateOfTransformation>2020-08-02T23:53:04.582Z</dr:dateOfTransformation>
</oai:header>
<oai:metadata>
<dc:contributor>ATLAS (IHEF, IoP, FNWI)</dc:contributor>
<dc:contributor>Doğuş Üniversitesi, Fen Edebiyat Fakültesi, Fizik Bölümü</dc:contributor>
<dc:contributor>TR3959</dc:contributor>
<dc:contributor>Doğuş Üniversitesi, Fen Edebiyat Fakültesi, Fizik Bölümü</dc:contributor>
<dc:contributor>TR3959</dc:contributor>
<dc:source>urn:issn:1748-0221</dc:source>
<dc:source>VOLUME=7;ISSUE=1;ISSN=1748-0221;TITLE=Journal of Instrumentation</dc:source>
<dc:source>ATLAS Collaboration Mitsou, Vasiliki Fiorini, Luca Ros Martínez, Eduardo Castillo
Giménez, María Victoria Fuster Verdú, Juan A. García García, Carmen Cabrera Urbán,
Susana Martí García, Salvador Salt Cairols, José Lacasta Llácer, Carlos Valls Ferrer,
Juan Antonio Higón Rodríguez, Emilio Ferrer Soria, Antonio González de la Hoz, Santiago
Kaci, Mohammed Hernández Jiménez, Yesenia Villaplana Pérez, Miguel 2012 A study of the
material in the ATLAS inner detector using secondary hadronic interactions Journal Of
Instrumentation 7 P01013 1 41</dc:source>
<dc:source>Journal of Instrumentation, 7(1)</dc:source>
<dc:source>Aad, G; Abbott, B; Abdallah, J; Abdelalim, AA; Abdesselam, A; Abdinov, O;  et
al.(2012). A study of the material in the ATLAS inner detector using secondary hadronic
interactions. Journal of Instrumentation, 7(1). doi: 10.1088/1748-0221/7/01/P01013. UC
Santa Cruz: Retrieved from: http://www.escholarship.org/uc/item/05j2j2br</dc:source>
<dc:source>Journal of Instrumentation, 7</dc:source>
<dc:source>VOLUME=7;ISSN=1748-0221;TITLE=Journal of Instrumentation</dc:source>
<dc:source>1748-0221</dc:source>
<dc:source>Journal of Instrumentation 7, P01013 (2012).
doi:10.1088/1748-0221/7/01/P01013</dc:source>
<dc:title>A measurement of the material in the ATLAS inner detector using secondary hadronic
interactions</dc:title>
<dc:language>und</dc:language>
<dc:subject classid="keyword" classname="keyword">Detector modelling and simulations I
(interaction of radiation with matter, interaction of photons with matter, interaction
of hadrons with matter, etc); Particle tracking detectors (Solid-state detectors); Si
microstrip and pad detectors; Large detector systems for particle and astroparticle
physics</dc:subject>
<dc:subject classid="keyword" classname="keyword">of photons with matter, interaction of
hadrons with matter, etc)</dc:subject>
<dc:subject classid="keyword" classname="keyword">Particle Physics - Experiment</dc:subject>
<dc:subject classid="keyword" classname="keyword">Detector Modelling and
Simulations</dc:subject>
<dc:subject classid="keyword" classname="keyword">Detector modelling and simulations I
(interaction of radiation with matter, interaction of photons with matter, interaction
of hadrons with matter, etc)</dc:subject>
<dc:subject classid="keyword" classname="keyword">Large detector systems for particle and
astroparticle physics</dc:subject>
<dc:subject classid="keyword" classname="keyword">Detector modelling and simulations I
(interaction of radiation with matter, interaction</dc:subject>
<dc:subject classid="keyword" classname="keyword">Large Detector Systems</dc:subject>
<dc:subject classid="keyword" classname="keyword">530</dc:subject>
<dc:subject classid="keyword" classname="keyword">Science &amp; Technology</dc:subject>
<dc:subject classid="keyword" classname="keyword">:Ciências Físicas [Ciências
Naturais]</dc:subject>
<dc:subject classid="keyword" classname="keyword">High Energy Physics -
Experiment</dc:subject>
<dc:subject classid="keyword" classname="keyword">Detectors de radiació</dc:subject>
<dc:subject classid="keyword" classname="keyword">Física nuclear</dc:subject>
<dc:subject classid="ddc" classname="ddc">ddc:610</dc:subject>
<dc:subject classid="keyword" classname="keyword">Si microstrip and pad
detectors</dc:subject>
<dc:subject classid="keyword" classname="keyword">Particle tracking detectors (Solid-state
detectors)</dc:subject>
<dc:subject classid="keyword" classname="keyword">Col·lisions (Física nuclear)</dc:subject>
<dc:subject classid="keyword" classname="keyword">Particle Tracking Detectors</dc:subject>
<dc:publisher>IOP Publishing</dc:publisher>
<dc:format>application/pdf</dc:format>
<dc:format>application/pdf</dc:format>
<dc:format>application/pdf</dc:format>
<dc:format>application/pdf</dc:format>
<dc:format>application/pdf</dc:format>
<dc:format>application/pdf</dc:format>
<dc:date>2016-05-02</dc:date>
<dc:description>The ATLAS inner detector is used to reconstruct secondary vertices due to
hadronic interactions of primary collision products, so probing the location and amount
of material in the inner region of ATLAS. Data collected in 7 TeV pp collisions at the
LHC, with a minimum bias trigger, are used for comparisons with simulated events. The
reconstructed secondary vertices have spatial resolutions ranging from ~ 200μm to 1 mm.
The overall material description in the simulation is validated to within an
experimental uncertainty of about 7%. This will lead to a better understanding of the
reconstruction of various objects such as tracks, leptons, jets, and missing transverse
momentum. We acknowledge the support of ANPCyT, Argentina; YerPhI, Armenia; ARC,
Australia; BMWF, Austria; ANAS, Azerbaijan; SSTC, Belarus; CNPq and FAPESP, Brazil;
NSERC, NRC and CFI, Canada; CERN; CONICYT, Chile; CAS, MOST and NSFC, China;
COLCIENCIAS, Colombia; MSMT CR, MPO CR and VSC CR, Czech Republic; DNRF, DNSRC and
Lundbeck Foundation, Denmark; ARTEMIS, European Union; IN2P3-CNRS, CEA-DSM/IRFU, France;
GNAS, Georgia; BMBF, DFG, HGF, MPG and AvH Foundation, Germany; GSRT, Greece; ISF,
MINERVA, GIF, DIP and Benoziyo Center, Israel; INFN, Italy; MEXT and JSPS, Japan; CNRST,
Morocco; FOM and NWO, Netherlands; RCN, Norway; MNiSW, Poland; GRICES and FCT, Portugal;
MERYS (MECTS), Romania; MES of Russia and ROSATOM, Russian Federation; JINR; MSTD,
Serbia; MSSR, Slovakia; ARRS and MVZT, Slovenia; DST/NRF, South Africa; MICINN, Spain;
SRC and Wallenberg Foundation, Sweden; SER, SNSF and Cantons of Bern and Geneva,
Switzerland; NSC, Taiwan; TAEK, Turkey; STFC, the Royal Society and Leverhulme Trust,
United Kingdom; DOE and NSF, United States of America.
info:eu-repo/semantics/publishedVersion</dc:description>
<dc:source>NARCIS</dc:source>
<dc:source>DSpace@Dogus</dc:source>
<dc:source>Lancaster EPrints</dc:source>
<dc:source>CERN Document Server</dc:source>
<dc:source>DESY Publication Database</dc:source>
<dc:source>OpenAIRE</dc:source>
<dc:source>Publikationenserver der Georg-August-Universität Göttingen</dc:source>
<dc:source>arXiv.org e-Print Archive</dc:source>
<dc:source>CORE (RIOXX-UK Aggregator)</dc:source>
<dc:source>eScholarship - University of California</dc:source>
<dc:source>Universidade do Minho: RepositoriUM</dc:source>
<dc:source>Dokuz Eylul University Open Archive System</dc:source>
<dc:source>Repositori d'Objectes Digitals per a l'Ensenyament la Recerca i la
Cultura</dc:source>
<dc:relation>info:eu-repo/semantics/altIdentifier/doi/10.1088/1748-0221/7/01/P01013</dc:relation>
<dc:relation>info:eu-repo/semantics/altIdentifier/doi/10.1088/1748-0221/7/01/P01013.</dc:relation>
<dc:type>Article</dc:type>
<dc:identifier>http://hdl.handle.net/11376/1605</dc:identifier>
<dc:type>Article</dc:type>
<dc:identifier>http://www.escholarship.org/uc/item/05j2j2br</dc:identifier>
<dc:type>Unknown</dc:type>
<dc:identifier>http://cds.cern.ch/record/1394292</dc:identifier>
<dc:type>Article</dc:type>
<dc:identifier>http://eprints.lancs.ac.uk/68235/</dc:identifier>
<dc:type>Article</dc:type>
<dc:identifier>http://hdl.handle.net/10550/36188</dc:identifier>
<dc:type>Article</dc:type>
<dc:identifier>http://eprints.gla.ac.uk/65933/1/65933.pdf</dc:identifier>
<dc:type>Preprint</dc:type>
<dc:identifier>http://arxiv.org/abs/1110.6191</dc:identifier>
<dc:type>Article</dc:type>
<dc:identifier>http://dare.uva.nl/personal/pure/en/publications/a-study-of-the-material-in-the-atlas-inner-detector-using-secondary-hadronic-interactions(6b7667e2-04e2-4a66-92a8-ff4edbf61a17).html</dc:identifier>
<dc:type>Article</dc:type>
<dc:identifier>http://hdl.handle.net/1822/48768</dc:identifier>
<dc:type>Article</dc:type>
<dc:identifier>http://resolver.sub.uni-goettingen.de/purl?gs-1/12231</dc:identifier>
<dc:type>Article</dc:type>
<dc:identifier>http://bib-pubdb1.desy.de/search?p=id:%22PHPPUBDB-21212%22</dc:identifier>
<dc:identifier>http://bib-pubdb1.desy.de/record/96807/files/CERN-PH-EP-2011-147_1110.6191v2.pdf</dc:identifier>
<dc:identifier>http://bib-pubdb1.desy.de//record/96807/files/CERN-PH-EP-2011-147_1110.6191v2.pdf</dc:identifier>
<dc:identifier>http://bib-pubdb1.desy.de/record/96807</dc:identifier>
<dc:type>Article</dc:type>
<dc:identifier>http://arxiv.org/abs/1110.6191</dc:identifier>
<dc:type>Article</dc:type>
<dc:identifier>http://hdl.handle.net/11376/1605</dc:identifier>
<dc:identifier>http://dx.doi.org/10.1088/1748-0221/7/01/P01013</dc:identifier>
<dc:type>Article</dc:type>
<dc:identifier>http://hdl.handle.net/2066/93736</dc:identifier>
<dc:creator>ATLAS Collaboration</dc:creator>
<dr:CobjCategory type="publication">0001</dr:CobjCategory>
<oaf:refereed>0002</oaf:refereed>
<oaf:dateAccepted>2016-05-02</oaf:dateAccepted>
<oaf:accessrights>OPEN</oaf:accessrights>
<oaf:language>und</oaf:language>
<oaf:embargoenddate/>
<oaf:hostedBy id="infrastruct_::openaire" name="OpenAIRE"/>
<oaf:collectedFrom id="infrastruct_::openaire" name="OpenAIRE"/>
<oaf:journal eissn="" ep="" iss="1748-0221" issn="1748-0221" lissn="" sp="" vol=""/>
</oai:metadata>
<about>
<provenance xmlns="http://www.openarchives.org/OAI/2.0/provenance"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://www.openarchives.org/OAI/2.0/provenance http://www.openarchives.org/OAI/2.0/provenance.xsd">
<originDescription harvestDate="2020-08-02T22:55:40.866Z" altered="true">
<baseURL>file%3A%2F%2F%2Fsrv%2Fclaims%2Frecords%2Fpublication%2Fopenaire</baseURL>
<identifier/>
<datestamp/>
<metadataNamespace/>
</originDescription>
</provenance>
<oaf:datainfo>
<oaf:inferred>false</oaf:inferred>
<oaf:deletedbyinference>false</oaf:deletedbyinference>
<oaf:trust>0.9</oaf:trust>
<oaf:inferenceprovenance/>
<oaf:provenanceaction schemename="dnet:provenanceActions"
schemeid="dnet:provenanceActions" classname="user:claim" classid="user:claim"/>
</oaf:datainfo>
</about>
</oai:record>

View File

@ -0,0 +1,88 @@
<?xml version="1.0" encoding="UTF-8"?>
<record xmlns:dr="http://www.driver-repository.eu/namespace/dr"
xmlns:oaf="http://namespace.openaire.eu/oaf" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<oai:header xmlns="http://namespace.openaire.eu/"
xmlns:dc="http://purl.org/dc/elements/1.1/"
xmlns:dri="http://www.driver-repository.eu/namespace/dri"
xmlns:oai="http://www.openarchives.org/OAI/2.0/" xmlns:prov="http://www.openarchives.org/OAI/2.0/provenance">
<dri:objIdentifier>r3f5b9831893::cca7367159bc3ff90cd2f75bf9dc21c4</dri:objIdentifier>
<dri:recordIdentifier>oai:nakala.fr:hdl_11280_847e01df</dri:recordIdentifier>
<dri:dateOfCollection>2020-08-01T00:16:24.742Z</dri:dateOfCollection>
<oaf:datasourceprefix>r3f5b9831893</oaf:datasourceprefix>
<identifier xmlns="http://www.openarchives.org/OAI/2.0/">oai:nakala.fr:hdl_11280_847e01df</identifier>
<datestamp xmlns="http://www.openarchives.org/OAI/2.0/">2020-06-08T01:01:38Z</datestamp>
<setSpec xmlns="http://www.openarchives.org/OAI/2.0/">hdl_11280_2b09fc10</setSpec>
<setSpec xmlns="http://www.openarchives.org/OAI/2.0/">hdl_11280_c1bc48d0</setSpec>
<setSpec xmlns="http://www.openarchives.org/OAI/2.0/">hdl_11280_57c8db3a</setSpec>
<dr:dateOfTransformation>2020-08-01T00:31:35.625Z</dr:dateOfTransformation>
</oai:header>
<metadata>
<datacite:resource xmlns="http://www.openarchives.org/OAI/2.0/"
xmlns:datacite="http://datacite.org/schema/kernel-4"
xmlns:dc="http://purl.org/dc/elements/1.1/"
xmlns:dri="http://www.driver-repository.eu/namespace/dri"
xmlns:oai="http://www.openarchives.org/OAI/2.0/"
xmlns:prov="http://www.openarchives.org/OAI/2.0/provenance" xsi:schemaLocation="http://datacite.org/schema/kernel-4 https://schema.datacite.org/meta/kernel-4/metadata.xsd">
<datacite:alternateIdentifier identifierType="URL" xmlns:datacite="http://datacite.org/schema/kernel-4/">277</datacite:alternateIdentifier>
<datacite:identifier identifierType="Handle" xmlns:datacite="http://datacite.org/schema/kernel-4/">http://hdl.handle.net/11280/847e01df</datacite:identifier>
<alternateIdentifiers>
<alternateIdentifier alternateIdentifierType="URL">http://hdl.handle.net/http://hdl.handle.net/11280/847e01df</alternateIdentifier>
</alternateIdentifiers>
<datacite:alternateIdentifier identifierType="URL" xmlns:datacite="http://datacite.org/schema/kernel-4/">http://nakala.fr/data/11280/847e01df</datacite:alternateIdentifier>
<datacite:creators xmlns:datacite="http://datacite.org/schema/kernel-4/">
<datacite:creator>
<datacite:creatorName>DHAAP</datacite:creatorName>
</datacite:creator>
</datacite:creators>
<datacite:titles xmlns:datacite="http://datacite.org/schema/kernel-4/">
<datacite:title>CVP_Notice277-1 place du Docteur Antoine Béclère _PHO02.jpg</datacite:title>
</datacite:titles>
<datacite:descriptions xmlns:datacite="http://datacite.org/schema/kernel-4/">
<datacite:description descriptionType="Abstract">Hôpital Saint-Antoine. Fragment de dalle funéraire trouvée en décembre 1932. Paris (XIIème arr.). Photographie d'Albert Citerne (1876-1970). Plaque de verre, 1932. Département Histoire de l'Architecture et Archéologie de Paris.</datacite:description>
<datacite:description descriptionType="Abstract">Nfa_1146</datacite:description>
<datacite:description descriptionType="Abstract">Hôpital Saint-Antoine. Fragment de dalle funéraire trouvée en décembre 1932. Paris (XIIème arr.). Photographie d'Albert Citerne (1876-1970). Plaque de verre, 1932. Département Histoire de l'Architecture et Archéologie de Paris.</datacite:description>
</datacite:descriptions>
<datacite:publisher xmlns:datacite="http://datacite.org/schema/kernel-4/">Nakala by Huma-Num</datacite:publisher>
<datacite:contributors xmlns:datacite="http://datacite.org/schema/kernel-4/">
<datacite:contributor contributorType="Other">
<datacite:contributorName>DHAAP, Pôle Archéologique</datacite:contributorName>
</datacite:contributor>
</datacite:contributors>
<datacite:dates xmlns:datacite="http://datacite.org/schema/kernel-4/">
<datacite:date dateType="Created">1932</datacite:date>
</datacite:dates>
<datacite:resourceType resourceTypeGeneral="Image" xmlns:datacite="http://datacite.org/schema/kernel-4/">StillImage</datacite:resourceType>
<datacite:rightsList xmlns:datacite="http://datacite.org/schema/kernel-4/">
<datacite:rights rightsURI="info:eu-repo/semantics/openAccess"/>
</datacite:rightsList>
</datacite:resource>
<oaf:identifier identifierType="handle">http://hdl.handle.net/11280/847e01df</oaf:identifier>
<dr:CobjCategory type="dataset">0025</dr:CobjCategory>
<oaf:dateAccepted/>
<oaf:accessrights>OPEN</oaf:accessrights>
<oaf:language>und</oaf:language>
<oaf:hostedBy id="re3data_____::r3d100012102" name="NAKALA"/>
<oaf:collectedFrom id="re3data_____::r3d100012102" name="NAKALA"/>
</metadata>
<about xmlns:dc="http://purl.org/dc/elements/1.1/"
xmlns:dri="http://www.driver-repository.eu/namespace/dri"
xmlns:oai="http://www.openarchives.org/OAI/2.0/" xmlns:prov="http://www.openarchives.org/OAI/2.0/provenance">
<provenance xmlns="http://www.openarchives.org/OAI/2.0/provenance" xsi:schemaLocation="http://www.openarchives.org/OAI/2.0/provenance http://www.openarchives.org/OAI/2.0/provenance.xsd">
<originDescription altered="true" harvestDate="2020-08-01T00:16:24.742Z">
<baseURL>https%3A%2F%2Fwww.nakala.fr%2Foai_oa%2F11280%2F8892ab4b</baseURL>
<identifier>oai:nakala.fr:hdl_11280_847e01df</identifier>
<datestamp>2020-06-08T01:01:38Z</datestamp>
<metadataNamespace/>
</originDescription>
</provenance>
<oaf:datainfo>
<oaf:inferred>false</oaf:inferred>
<oaf:deletedbyinference>false</oaf:deletedbyinference>
<oaf:trust>0.9</oaf:trust>
<oaf:inferenceprovenance/>
<oaf:provenanceaction classid="sysimport:crosswalk:datasetarchive"
classname="sysimport:crosswalk:datasetarchive"
schemeid="dnet:provenanceActions" schemename="dnet:provenanceActions"/>
</oaf:datainfo>
</about>
</record>