From f7b9a7a9af1edff432147b6cced9a6c1f1c42c9d Mon Sep 17 00:00:00 2001 From: Michele Artini Date: Fri, 10 Jan 2020 15:55:23 +0100 Subject: [PATCH 01/17] entity migration (partial implementation) --- dhp-workflows/dhp-aggregation/pom.xml | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/dhp-workflows/dhp-aggregation/pom.xml b/dhp-workflows/dhp-aggregation/pom.xml index 328e783c4..d031c0308 100644 --- a/dhp-workflows/dhp-aggregation/pom.xml +++ b/dhp-workflows/dhp-aggregation/pom.xml @@ -25,6 +25,12 @@ dhp-common ${project.version} + + + eu.dnetlib.dhp + dhp-schemas + ${project.version} + net.sf.saxon @@ -44,6 +50,11 @@ jaxen jaxen + + + org.mongodb + mongo-java-driver + org.mockito From 81f82b5d34d67d61017d546fe761daf0b717ca27 Mon Sep 17 00:00:00 2001 From: Michele Artini Date: Fri, 17 Jan 2020 15:26:21 +0100 Subject: [PATCH 02/17] partial implementation of applications to migrate entities --- .../migration/AbstractMigrateApplication.java | 61 +++ .../eu/dnetlib/dhp/migration/DbClient.java | 58 +++ .../dnetlib/dhp/migration/MdstoreClient.java | 87 ++++ .../MigrateDbEntitiesApplication.java | 390 ++++++++++++++++++ .../MigrateMongoMdstoresApplication.java | 190 +++++++++ .../dnetlib/dhp/migration/MigrationUtils.java | 164 ++++++++ .../migrate_db_entities_parameters.json | 38 ++ .../migrate_mongo_mstores_parameters.json | 50 +++ .../sql/queryDatasourceOrganization.sql | 16 + .../dhp/migration/sql/queryDatasources.sql | 147 +++++++ .../dhp/migration/sql/queryOrganizations.sql | 36 ++ .../sql/queryOrganizationsFromOpenOrgsDB.sql | 53 +++ .../sql/queryProjectOrganization.sql | 16 + .../dhp/migration/sql/queryProjects.sql | 87 ++++ .../sql/querySimilarityFromOpenOrgsDB.sql | 17 + 15 files changed, 1410 insertions(+) create mode 100644 dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/AbstractMigrateApplication.java create mode 100644 dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/DbClient.java create mode 100644 dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/MdstoreClient.java create mode 100644 dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/MigrateDbEntitiesApplication.java create mode 100644 dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/MigrateMongoMdstoresApplication.java create mode 100644 dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/MigrationUtils.java create mode 100644 dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/migration/migrate_db_entities_parameters.json create mode 100644 dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/migration/migrate_mongo_mstores_parameters.json create mode 100644 dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/migration/sql/queryDatasourceOrganization.sql create mode 100644 dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/migration/sql/queryDatasources.sql create mode 100644 dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/migration/sql/queryOrganizations.sql create mode 100644 dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/migration/sql/queryOrganizationsFromOpenOrgsDB.sql create mode 100644 dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/migration/sql/queryProjectOrganization.sql create mode 100644 dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/migration/sql/queryProjects.sql create mode 100644 dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/migration/sql/querySimilarityFromOpenOrgsDB.sql diff --git a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/AbstractMigrateApplication.java b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/AbstractMigrateApplication.java new file mode 100644 index 000000000..a5c8b2775 --- /dev/null +++ b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/AbstractMigrateApplication.java @@ -0,0 +1,61 @@ +package eu.dnetlib.dhp.migration; + +import java.io.Closeable; +import java.io.IOException; +import java.net.URI; +import java.util.concurrent.atomic.AtomicInteger; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.io.IntWritable; +import org.apache.hadoop.io.SequenceFile; +import org.apache.hadoop.io.Text; +import org.codehaus.jackson.map.ObjectMapper; + +import eu.dnetlib.dhp.schema.oaf.Oaf; + +public class AbstractMigrateApplication implements Closeable { + + private final AtomicInteger counter = new AtomicInteger(0); + + private final IntWritable key = new IntWritable(counter.get()); + + private final Text value = new Text(); + + private final ObjectMapper objectMapper = new ObjectMapper(); + + private final SequenceFile.Writer writer; + + public AbstractMigrateApplication(final String hdfsPath, final String hdfsNameNode, final String hdfsUser) throws Exception { + this.writer = SequenceFile.createWriter(getConf(hdfsNameNode, hdfsUser), SequenceFile.Writer.file(new Path(hdfsPath)), SequenceFile.Writer + .keyClass(IntWritable.class), SequenceFile.Writer.valueClass(Text.class)); + } + + private Configuration getConf(final String hdfsNameNode, final String hdfsUser) throws IOException { + final Configuration conf = new Configuration(); + conf.set("fs.defaultFS", hdfsNameNode); + conf.set("fs.hdfs.impl", org.apache.hadoop.hdfs.DistributedFileSystem.class.getName()); + conf.set("fs.file.impl", org.apache.hadoop.fs.LocalFileSystem.class.getName()); + System.setProperty("HADOOP_USER_NAME", hdfsUser); + System.setProperty("hadoop.home.dir", "/"); + FileSystem.get(URI.create(hdfsNameNode), conf); + return conf; + } + + protected void emitOaf(final Oaf oaf) { + try { + key.set(counter.getAndIncrement()); + value.set(objectMapper.writeValueAsString(oaf)); + writer.append(key, value); + } catch (final Exception e) { + e.printStackTrace(); + } + } + + @Override + public void close() throws IOException { + writer.close(); + } + +} diff --git a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/DbClient.java b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/DbClient.java new file mode 100644 index 000000000..e9fee63b9 --- /dev/null +++ b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/DbClient.java @@ -0,0 +1,58 @@ +package eu.dnetlib.dhp.migration; + +import java.io.Closeable; +import java.io.IOException; +import java.sql.Connection; +import java.sql.DriverManager; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.Statement; +import java.util.function.Consumer; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + +public class DbClient implements Closeable { + + private static final Log log = LogFactory.getLog(DbClient.class); + + private Connection connection; + + public DbClient(final String address, final String login, final String password) { + + try { + Class.forName("org.postgresql.Driver"); + this.connection = DriverManager.getConnection(address, login, password); + this.connection.setAutoCommit(false); + } catch (final Exception e) { + log.error(e.getClass().getName() + ": " + e.getMessage()); + throw new RuntimeException(e); + } + log.info("Opened database successfully"); + } + + public void processResults(final String sql, final Consumer consumer) { + + try (final Statement stmt = connection.createStatement()) { + try (final ResultSet rs = stmt.executeQuery("SELECT * FROM COMPANY;")) { + while (rs.next()) { + consumer.accept(rs); + } + } catch (final SQLException e) { + throw new RuntimeException(e); + } + } catch (final SQLException e1) { + throw new RuntimeException(e1); + } + } + + @Override + public void close() throws IOException { + try { + connection.close(); + } catch (final SQLException e) { + throw new RuntimeException(e); + } + } + +} diff --git a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/MdstoreClient.java b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/MdstoreClient.java new file mode 100644 index 000000000..971d7f165 --- /dev/null +++ b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/MdstoreClient.java @@ -0,0 +1,87 @@ +package eu.dnetlib.dhp.migration; + +import java.io.Closeable; +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; +import java.util.stream.StreamSupport; + +import org.apache.commons.lang3.StringUtils; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.bson.Document; + +import com.google.common.collect.Iterables; +import com.mongodb.MongoClient; +import com.mongodb.MongoClientURI; +import com.mongodb.client.MongoCollection; +import com.mongodb.client.MongoDatabase; + +public class MdstoreClient implements Closeable { + + private final MongoClient client; + private final MongoDatabase db; + + private static final String COLL_METADATA = "metadata"; + private static final String COLL_METADATA_MANAGER = "metadataManager"; + + private static final Log log = LogFactory.getLog(MdstoreClient.class); + + public MdstoreClient(final String baseUrl, final String dbName) { + this.client = new MongoClient(new MongoClientURI(baseUrl)); + this.db = getDb(client, dbName); + } + + public Map validCollections(final String mdFormat, final String mdLayout, final String mdInterpretation) { + + final Map transactions = new HashMap<>(); + for (final Document entry : getColl(db, COLL_METADATA_MANAGER).find()) { + final String mdId = entry.getString("mdId"); + final String currentId = entry.getString("currentId"); + if (StringUtils.isNoneBlank(mdId, currentId)) { + transactions.put(mdId, currentId); + } + } + + final Map res = new HashMap<>(); + for (final Document entry : getColl(db, COLL_METADATA).find()) { + if (entry.getString("format").equals(mdFormat) && entry.getString("layout").equals(mdLayout) + && entry.getString("interpretation").equals(mdInterpretation) && transactions.containsKey(entry.getString("mdId"))) { + res.put(entry.getString("mdId"), transactions.get(entry.getString("mdId"))); + } + } + + return res; + } + + private MongoDatabase getDb(final MongoClient client, final String dbName) { + if (!Iterables.contains(client.listDatabaseNames(), dbName)) { + final String err = String.format("Database '%s' not found in %s", dbName, client.getAddress()); + log.warn(err); + throw new RuntimeException(err); + } + return client.getDatabase(dbName); + } + + private MongoCollection getColl(final MongoDatabase db, final String collName) { + if (!Iterables.contains(db.listCollectionNames(), collName)) { + final String err = String.format(String.format("Missing collection '%s' in database '%s'", collName, db.getName())); + log.warn(err); + throw new RuntimeException(err); + } + return db.getCollection(collName); + } + + public Iterable listRecords(final String coll) { + return () -> StreamSupport.stream(getColl(db, coll).find().spliterator(), false) + .filter(e -> e.containsKey("body")) + .map(e -> e.getString("body")) + .iterator(); + } + + @Override + public void close() throws IOException { + client.close(); + } + +} diff --git a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/MigrateDbEntitiesApplication.java b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/MigrateDbEntitiesApplication.java new file mode 100644 index 000000000..60a7c24f7 --- /dev/null +++ b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/MigrateDbEntitiesApplication.java @@ -0,0 +1,390 @@ +package eu.dnetlib.dhp.migration; + +import java.io.Closeable; +import java.io.IOException; +import java.sql.ResultSet; +import java.util.Arrays; +import java.util.function.Consumer; + +import org.apache.commons.io.IOUtils; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + +import eu.dnetlib.dhp.application.ArgumentApplicationParser; +import eu.dnetlib.dhp.schema.oaf.DataInfo; +import eu.dnetlib.dhp.schema.oaf.Datasource; +import eu.dnetlib.dhp.schema.oaf.Organization; +import eu.dnetlib.dhp.schema.oaf.Project; +import eu.dnetlib.dhp.schema.oaf.Relation; + +public class MigrateDbEntitiesApplication extends AbstractMigrateApplication implements Closeable { + + private static final Log log = LogFactory.getLog(MigrateDbEntitiesApplication.class); + + private final DbClient dbClient; + + public static void main(final String[] args) throws Exception { + final ArgumentApplicationParser parser = new ArgumentApplicationParser( + IOUtils.toString(MigrateDbEntitiesApplication.class.getResourceAsStream("/eu/dnetlib/dhp/migration/migrate_db_entities_parameters.json"))); + + parser.parseArgument(args); + + final String dbUrl = parser.get("postgresUrl"); + final String dbUser = parser.get("postgresUser"); + final String dbPassword = parser.get("postgresPassword"); + + final String hdfsPath = parser.get("hdfsPath"); + final String hdfsNameNode = parser.get("namenode"); + final String hdfsUser = parser.get("hdfsUser"); + + try (final MigrateDbEntitiesApplication smdbe = new MigrateDbEntitiesApplication(hdfsPath, hdfsNameNode, hdfsUser, dbUrl, dbUser, dbPassword)) { + smdbe.execute("queryDatasources.sql", smdbe::processDatasource); + smdbe.execute("queryProjects.sql", smdbe::processProject); + smdbe.execute("queryOrganizations.sql", smdbe::processOrganization); + smdbe.execute("queryDatasourceOrganization.sql", smdbe::processDatasourceOrganization); + smdbe.execute("queryProjectOrganization.sql", smdbe::processProjectOrganization); + } + + } + + public MigrateDbEntitiesApplication(final String hdfsPath, final String hdfsNameNode, final String hdfsUser, final String dbUrl, final String dbUser, + final String dbPassword) throws Exception { + super(hdfsPath, hdfsNameNode, hdfsUser); + this.dbClient = new DbClient(dbUrl, dbUser, dbPassword); + } + + public void execute(final String sqlFile, final Consumer consumer) throws Exception { + final String sql = IOUtils.toString(getClass().getResourceAsStream("/eu/dnetlib/dhp/migration/sql/" + sqlFile)); + dbClient.processResults(sql, consumer); + } + + public void processDatasource(final ResultSet rs) { + try { + + final DataInfo info = MigrationUtils.dataInfo(null, null, null, null, null, null); // TODO + + final Datasource ds = new Datasource(); + + ds.setId(MigrationUtils.createOpenaireId("10", rs.getString("datasourceid"))); + ds.setOriginalId(Arrays.asList(rs.getString("datasourceid"))); + ds.setCollectedfrom(MigrationUtils.listKeyValues(rs.getString("collectedfromid"), rs.getString("collectedfromname"))); + ds.setPid(null); // List // TODO + ds.setDateofcollection(rs.getDate("dateofcollection").toString()); + ds.setDateoftransformation(null); // TODO + ds.setExtraInfo(null); // TODO + ds.setOaiprovenance(null); // TODO + + ds.setDatasourcetype(null); // Qualifier datasourcetype) { + ds.setOpenairecompatibility(null); // Qualifier openairecompatibility) { + ds.setOfficialname(MigrationUtils.field(rs.getString("officialname"), info)); + ds.setEnglishname(MigrationUtils.field(rs.getString("englishname"), info)); + ds.setWebsiteurl(MigrationUtils.field(rs.getString("websiteurl"), info)); + ds.setLogourl(MigrationUtils.field(rs.getString("logourl"), info)); + ds.setContactemail(MigrationUtils.field(rs.getString("contactemail"), info)); + ds.setNamespaceprefix(MigrationUtils.field(rs.getString("namespaceprefix"), info)); + ds.setLatitude(MigrationUtils.field(Double.toString(rs.getDouble("latitude")), info)); + ds.setLongitude(MigrationUtils.field(Double.toString(rs.getDouble("longitude")), info)); + ds.setDateofvalidation(MigrationUtils.field(rs.getDate("dateofvalidation").toString(), info)); + ds.setDescription(MigrationUtils.field(rs.getString("description"), info)); + ds.setSubjects(null); // List subjects) { + ds.setOdnumberofitems(MigrationUtils.field(Double.toString(rs.getInt("odnumberofitems")), info)); + ds.setOdnumberofitemsdate(MigrationUtils.field(rs.getDate("odnumberofitemsdate").toString(), info)); + ds.setOdpolicies(MigrationUtils.field(rs.getString("odpolicies"), info)); + ds.setOdlanguages(MigrationUtils.listFields(info, rs.getArray("odlanguages"))); + ds.setOdcontenttypes(MigrationUtils.listFields(info, rs.getArray("odcontenttypes"))); + ds.setAccessinfopackage(MigrationUtils.listFields(info, rs.getArray("accessinfopackage"))); + ds.setReleasestartdate(MigrationUtils.field(rs.getDate("releasestartdate").toString(), info)); + ds.setReleaseenddate(MigrationUtils.field(rs.getDate("releaseenddate").toString(), info)); + ds.setMissionstatementurl(MigrationUtils.field(rs.getString("missionstatementurl"), info)); + ds.setDataprovider(MigrationUtils.field(rs.getBoolean("dataprovider"), info)); + ds.setServiceprovider(MigrationUtils.field(rs.getBoolean("serviceprovider"), info)); + ds.setDatabaseaccesstype(MigrationUtils.field(rs.getString("databaseaccesstype"), info)); + ds.setDatauploadtype(MigrationUtils.field(rs.getString("datauploadtype"), info)); + ds.setDatabaseaccessrestriction(MigrationUtils.field(rs.getString("databaseaccessrestriction"), info)); + ds.setDatauploadrestriction(MigrationUtils.field(rs.getString("datauploadrestriction"), info)); + ds.setVersioning(MigrationUtils.field(rs.getBoolean("versioning"), info)); + ds.setCitationguidelineurl(MigrationUtils.field(rs.getString("citationguidelineurl"), info)); + ds.setQualitymanagementkind(MigrationUtils.field(rs.getString("qualitymanagementkind"), info)); + ds.setPidsystems(MigrationUtils.field(rs.getString("pidsystems"), info)); + ds.setCertificates(MigrationUtils.field(rs.getString("certificates"), info)); + ds.setPolicies(null); // List // TODO + ds.setJournal(null); // Journal // TODO + + // rs.getString("datasourceid"); + rs.getArray("identities"); + // rs.getString("officialname"); + // rs.getString("englishname"); + // rs.getString("contactemail"); + rs.getString("openairecompatibility"); // COMPLEX ...@@@... + // rs.getString("websiteurl"); + // rs.getString("logourl"); + // rs.getArray("accessinfopackage"); + // rs.getDouble("latitude"); + // rs.getDouble("longitude"); + // rs.getString("namespaceprefix"); + // rs.getInt("odnumberofitems"); // NULL + // rs.getDate("odnumberofitemsdate"); // NULL + rs.getArray("subjects"); + // rs.getString("description"); + // rs.getString("odpolicies"); // NULL + // rs.getArray("odlanguages"); + // rs.getArray("odcontenttypes"); + rs.getBoolean("inferred"); // false + rs.getBoolean("deletedbyinference");// false + rs.getDouble("trust"); // 0.9 + rs.getString("inferenceprovenance"); // NULL + // rs.getDate("dateofcollection"); + // rs.getDate("dateofvalidation"); + // rs.getDate("releasestartdate"); + // rs.getDate("releaseenddate"); + // rs.getString("missionstatementurl"); + // rs.getBoolean("dataprovider"); + // rs.getBoolean("serviceprovider"); + // rs.getString("databaseaccesstype"); + // rs.getString("datauploadtype"); + // rs.getString("databaseaccessrestriction"); + // rs.getString("datauploadrestriction"); + // rs.getBoolean("versioning"); + // rs.getString("citationguidelineurl"); + // rs.getString("qualitymanagementkind"); + // rs.getString("pidsystems"); + // rs.getString("certificates"); + rs.getArray("policies"); + // rs.getString("collectedfromid"); + // rs.getString("collectedfromname"); + rs.getString("datasourcetype"); // COMPLEX XXX@@@@.... + rs.getString("provenanceaction"); // 'sysimport:crosswalk:entityregistry@@@sysimport:crosswalk:entityregistry@@@dnet:provenance_actions@@@dnet:provenance_actions' + // AS provenanceaction, + rs.getString("journal"); // CONCAT(d.issn, '@@@', d.eissn, '@@@', d.lissn) AS journal + + emitOaf(ds); + } catch (final Exception e) { + // TODO: handle exception + } + } + + public void processProject(final ResultSet rs) { + try { + + final DataInfo info = MigrationUtils.dataInfo(null, null, null, null, null, null); // TODO + + final Project p = new Project(); + + p.setId(MigrationUtils.createOpenaireId("40", rs.getString("projectid"))); + p.setOriginalId(Arrays.asList(rs.getString("projectid"))); + p.setCollectedfrom(MigrationUtils.listKeyValues(rs.getString("collectedfromid"), rs.getString("collectedfromname"))); + p.setPid(null); // List // TODO + + p.setDateofcollection(rs.getDate("dateofcollection").toString()); + p.setDateoftransformation(rs.getDate("dateoftransformation").toString()); + p.setExtraInfo(null); // List //TODO + p.setOaiprovenance(null); // OAIProvenance /TODO + + p.setWebsiteurl(MigrationUtils.field(rs.getString("websiteurl"), info)); + p.setCode(MigrationUtils.field(rs.getString("code"), info)); + p.setAcronym(MigrationUtils.field(rs.getString("acronym"), info)); + p.setTitle(MigrationUtils.field(rs.getString("title"), info)); + p.setStartdate(MigrationUtils.field(rs.getDate("startdate").toString(), info)); + p.setEnddate(MigrationUtils.field(rs.getDate("enddate").toString(), info)); + p.setCallidentifier(MigrationUtils.field(rs.getString("callidentifier"), info)); + p.setKeywords(MigrationUtils.field(rs.getString("keywords"), info)); + p.setDuration(MigrationUtils.field(Integer.toString(rs.getInt("duration")), info)); + p.setEcsc39(MigrationUtils.field(Boolean.toString(rs.getBoolean("ecsc39")), info)); + p.setOamandatepublications(MigrationUtils.field(Boolean.toString(rs.getBoolean("oamandatepublications")), info)); + p.setEcarticle29_3(MigrationUtils.field(Boolean.toString(rs.getBoolean("ecarticle29_3")), info)); + p.setSubjects(null); // List //TODO + p.setFundingtree(null); // List> //TODO + p.setContracttype(null); // Qualifier //TODO + p.setOptional1(MigrationUtils.field(rs.getString("optional1"), info)); + p.setOptional2(MigrationUtils.field(rs.getString("optional2"), info)); + p.setJsonextrainfo(MigrationUtils.field(rs.getString("jsonextrainfo"), info)); + p.setContactfullname(MigrationUtils.field(rs.getString("contactfullname"), info)); + p.setContactfax(MigrationUtils.field(rs.getString("contactfax"), info)); + p.setContactphone(MigrationUtils.field(rs.getString("contactphone"), info)); + p.setContactemail(MigrationUtils.field(rs.getString("contactemail"), info)); + p.setSummary(MigrationUtils.field(rs.getString("summary"), info)); + p.setCurrency(MigrationUtils.field(rs.getString("currency"), info)); + p.setTotalcost(new Float(rs.getDouble("totalcost"))); + p.setFundedamount(new Float(rs.getDouble("fundedamount"))); + + // rs.getString("projectid"); + // rs.getString("code"); + // rs.getString("websiteurl"); + // rs.getString("acronym"); + // rs.getString("title"); + // rs.getDate("startdate"); + // rs.getDate("enddate"); + // rs.getString("callidentifier"); + // rs.getString("keywords"); + // rs.getInt("duration"); + // rs.getBoolean("ecsc39"); + // rs.getBoolean("oamandatepublications"); + // rs.getBoolean("ecarticle29_3"); + // rs.getDate("dateofcollection"); + // rs.getDate("dateoftransformation"); + rs.getBoolean("inferred"); + rs.getBoolean("deletedbyinference"); + rs.getDouble("trust"); + rs.getString("inferenceprovenance"); + // rs.getString("optional1"); + // rs.getString("optional2"); + rs.getString("jsonextrainfo"); + // rs.getString("contactfullname"); + // rs.getString("contactfax"); + // rs.getString("contactphone"); + // rs.getString("contactemail"); + // rs.getString("summary"); + // rs.getString("currency"); + // rs.getDouble("totalcost"); + // rs.getDouble("fundedamount"); + // rs.getString("collectedfromid"); + // rs.getString("collectedfromname"); + rs.getString("contracttype"); // COMPLEX + rs.getString("provenanceaction"); // COMPLEX + rs.getArray("pid"); + rs.getArray("subjects"); + rs.getArray("fundingtree"); + + emitOaf(p); + + } catch (final Exception e) { + // TODO: handle exception + } + } + + public void processOrganization(final ResultSet rs) { + try { + + final DataInfo info = MigrationUtils.dataInfo(null, null, null, null, null, null); // TODO + + final Organization o = new Organization(); + + o.setId(MigrationUtils.createOpenaireId("20", rs.getString("organizationid"))); // String id) { + o.setOriginalId(Arrays.asList(rs.getString("organizationid"))); + o.setCollectedfrom(MigrationUtils.listKeyValues(rs.getString("collectedfromid"), rs.getString("collectedfromname"))); + o.setPid(null); // List // TODO + o.setDateofcollection(rs.getDate("dateofcollection").toString()); + o.setDateoftransformation(rs.getDate("dateoftransformation").toString()); + o.setExtraInfo(null); // List // TODO + o.setOaiprovenance(null); // OAIProvenance // TODO + o.setLegalshortname(MigrationUtils.field("legalshortname", info)); + o.setLegalname(MigrationUtils.field("legalname", info)); + o.setAlternativeNames(null); // List> //TODO + o.setWebsiteurl(MigrationUtils.field("websiteurl", info)); + o.setLogourl(MigrationUtils.field("logourl", info)); + o.setEclegalbody(MigrationUtils.field(Boolean.toString(rs.getBoolean("eclegalbody")), info)); + o.setEclegalperson(MigrationUtils.field(Boolean.toString(rs.getBoolean("eclegalperson")), info)); + o.setEcnonprofit(MigrationUtils.field(Boolean.toString(rs.getBoolean("ecnonprofit")), info)); + o.setEcresearchorganization(MigrationUtils.field(Boolean.toString(rs.getBoolean("ecresearchorganization")), info)); + o.setEchighereducation(MigrationUtils.field(Boolean.toString(rs.getBoolean("echighereducation")), info)); + o.setEcinternationalorganizationeurinterests(MigrationUtils + .field(Boolean.toString(rs.getBoolean("ecinternationalorganizationeurinterests")), info)); + o.setEcinternationalorganization(MigrationUtils.field(Boolean.toString(rs.getBoolean("ecinternationalorganization")), info)); + o.setEcenterprise(MigrationUtils.field(Boolean.toString(rs.getBoolean("ecenterprise")), info)); + o.setEcsmevalidated(MigrationUtils.field(Boolean.toString(rs.getBoolean("ecsmevalidated")), info)); + o.setEcnutscode(MigrationUtils.field(Boolean.toString(rs.getBoolean("ecnutscode")), info)); + o.setCountry(null); // Qualifier country) { + + // rs.getString("organizationid"); + // rs.getString("legalshortname"); + // rs.getString("legalname"); + // rs.getString("websiteurl"); + // rs.getString("logourl"); + // rs.getBoolean("eclegalbody"); + // rs.getBoolean("eclegalperson"); + // rs.getBoolean("ecnonprofit"); + // rs.getBoolean("ecresearchorganization"); + // rs.getBoolean("echighereducation"); + // rs.getBoolean("ecinternationalorganizationeurinterests"); + // rs.getBoolean("ecinternationalorganization"); + // rs.getBoolean("ecenterprise"); + // rs.getBoolean("ecsmevalidated"); + // rs.getBoolean("ecnutscode"); + rs.getDate("dateofcollection"); + rs.getDate("dateoftransformation"); + rs.getBoolean("inferred"); + rs.getBoolean("deletedbyinference"); + rs.getDouble("trust"); + rs.getString("inferenceprovenance"); + // rs.getString("collectedfromid"); + // rs.getString("collectedfromname"); + rs.getString("country"); + rs.getString("provenanceaction"); + rs.getArray("pid"); + + emitOaf(o); + } catch (final Exception e) { + // TODO: handle exception + } + } + + public void processDatasourceOrganization(final ResultSet rs) { + + try { + final Relation r = new Relation(); + + r.setRelType(null); // TODO + r.setSubRelType(null); // TODO + r.setRelClass(null); // TODO + r.setSource(null); // TODO + r.setTarget(null); // TODO + r.setCollectedFrom(MigrationUtils.listKeyValues("", "")); + + rs.getString("datasource"); + rs.getString("organization"); + rs.getDate("startdate"); // NULL + rs.getDate("enddate"); // NULL + rs.getBoolean("inferred"); // false + rs.getBoolean("deletedbyinference"); // false + rs.getDouble("trust"); // 0.9 + rs.getString("inferenceprovenance"); // NULL + rs.getString("semantics"); // 'providedBy@@@provided + // by@@@dnet:datasources_organizations_typologies@@@dnet:datasources_organizations_typologies' AS + // semantics, + rs.getString("provenanceaction"); // d.provenanceaction || '@@@' || d.provenanceaction || + // '@@@dnet:provenanceActions@@@dnet:provenanceActions' AS provenanceaction + + emitOaf(r); + } catch (final Exception e) { + // TODO: handle exception + } + } + + public void processProjectOrganization(final ResultSet rs) { + try { + final Relation r = new Relation(); + + r.setRelType(null); // TODO + r.setSubRelType(null); // TODO + r.setRelClass(null); // TODO + r.setSource(null); // TODO + r.setTarget(null); // TODO + r.setCollectedFrom(null); + + rs.getString("project"); + rs.getString("resporganization"); + rs.getInt("participantnumber"); + rs.getDouble("contribution"); + rs.getDate("startdate");// null + rs.getDate("enddate");// null + rs.getBoolean("inferred");// false + rs.getBoolean("deletedbyinference"); // false + rs.getDouble("trust"); + rs.getString("inferenceprovenance"); // NULL + rs.getString("semantics"); // po.semanticclass || '@@@' || po.semanticclass || + // '@@@dnet:project_organization_relations@@@dnet:project_organization_relations' AS semantics, + rs.getString("provenanceaction"); // 'sysimport:crosswalk:entityregistry@@@sysimport:crosswalk:entityregistry@@@dnet:provenance_actions@@@dnet:provenance_actions' + // AS provenanceaction + emitOaf(r); + } catch (final Exception e) { + // TODO: handle exception + } + } + + @Override + public void close() throws IOException { + super.close(); + dbClient.close(); + } + +} diff --git a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/MigrateMongoMdstoresApplication.java b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/MigrateMongoMdstoresApplication.java new file mode 100644 index 000000000..cead2366b --- /dev/null +++ b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/MigrateMongoMdstoresApplication.java @@ -0,0 +1,190 @@ +package eu.dnetlib.dhp.migration; + +import java.io.Closeable; +import java.io.IOException; +import java.io.StringReader; +import java.util.ArrayList; +import java.util.List; +import java.util.Map.Entry; + +import org.apache.commons.io.IOUtils; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.dom4j.Document; +import org.dom4j.DocumentException; +import org.dom4j.Node; +import org.dom4j.io.SAXReader; + +import eu.dnetlib.dhp.application.ArgumentApplicationParser; +import eu.dnetlib.dhp.schema.oaf.Dataset; +import eu.dnetlib.dhp.schema.oaf.Oaf; +import eu.dnetlib.dhp.schema.oaf.OtherResearchProduct; +import eu.dnetlib.dhp.schema.oaf.Publication; +import eu.dnetlib.dhp.schema.oaf.Relation; +import eu.dnetlib.dhp.schema.oaf.Result; +import eu.dnetlib.dhp.schema.oaf.Software; + +public class MigrateMongoMdstoresApplication extends AbstractMigrateApplication implements Closeable { + + private static final Log log = LogFactory.getLog(MigrateMongoMdstoresApplication.class); + + private final MdstoreClient mdstoreClient; + + public static void main(final String[] args) throws Exception { + final ArgumentApplicationParser parser = new ArgumentApplicationParser( + IOUtils.toString(MigrateMongoMdstoresApplication.class.getResourceAsStream("/eu/dnetlib/dhp/migration/migrate_mongo_mstores_parameters.json"))); + parser.parseArgument(args); + + final String mongoBaseUrl = parser.get("mongoBaseUrl"); + final String mongoDb = parser.get("mongoDb"); + + final String mdFormat = parser.get("mdFormat"); + final String mdLayout = parser.get("mdLayout"); + final String mdInterpretation = parser.get("mdInterpretation"); + + final String hdfsPath = parser.get("hdfsPath"); + final String hdfsNameNode = parser.get("namenode"); + final String hdfsUser = parser.get("hdfsUser"); + + try (final MigrateMongoMdstoresApplication mig = new MigrateMongoMdstoresApplication(hdfsPath, hdfsNameNode, hdfsUser, mongoBaseUrl, mongoDb)) { + mig.processMdRecords(mdFormat, mdLayout, mdInterpretation); + } + + } + + public MigrateMongoMdstoresApplication(final String hdfsPath, final String hdfsNameNode, final String hdfsUser, final String mongoBaseUrl, + final String mongoDb) throws Exception { + super(hdfsPath, hdfsNameNode, hdfsUser); + this.mdstoreClient = new MdstoreClient(mongoBaseUrl, mongoDb); + + } + + public void processMdRecords(final String mdFormat, final String mdLayout, final String mdInterpretation) throws DocumentException { + + for (final Entry entry : mdstoreClient.validCollections(mdFormat, mdLayout, mdInterpretation).entrySet()) { + // final String mdId = entry.getKey(); + final String currentColl = entry.getValue(); + + for (final String xml : mdstoreClient.listRecords(currentColl)) { + for (final Oaf oaf : createOafs(xml)) { + emitOaf(oaf); + } + } + } + } + + private List createOafs(final String xml) throws DocumentException { + final SAXReader reader = new SAXReader(); + final Document doc = reader.read(new StringReader(xml)); + + final String type = doc.valueOf(""); // TODO + + final List oafs = new ArrayList<>(); + + switch (type.toLowerCase()) { + case "publication": + final Publication p = new Publication(); + populateResultFields(p, doc); + p.setJournal(null); // TODO + oafs.add(p); + break; + case "dataset": + final Dataset d = new Dataset(); + populateResultFields(d, doc); + d.setStoragedate(null); // TODO + d.setDevice(null); // TODO + d.setSize(null); // TODO + d.setVersion(null); // TODO + d.setLastmetadataupdate(null); // TODO + d.setMetadataversionnumber(null); // TODO + d.setGeolocation(null); // TODO + oafs.add(d); + break; + case "otherresearchproducts": + final OtherResearchProduct o = new OtherResearchProduct(); + populateResultFields(o, doc); + o.setContactperson(null); // TODO + o.setContactgroup(null); // TODO + o.setTool(null); // TODO + oafs.add(o); + break; + case "software": + final Software s = new Software(); + populateResultFields(s, doc); + s.setDocumentationUrl(null); // TODO + s.setLicense(null); // TODO + s.setCodeRepositoryUrl(null); // TODO + s.setProgrammingLanguage(null); // TODO + oafs.add(s); + break; + default: + log.error("Inavlid type: " + type); + break; + } + + if (!oafs.isEmpty()) { + addRelations(oafs, doc, "//*", "TYPE"); + addRelations(oafs, doc, "//*", "TYPE"); + addRelations(oafs, doc, "//*", "TYPE"); + } + + return oafs; + } + + private void addRelations(final List oafs, final Document doc, final String xpath, final String type) { + for (final Object o : doc.selectNodes(xpath)) { + final Node n = (Node) o; + final Relation r = new Relation(); + r.setRelType(null); // TODO + r.setSubRelType(null); // TODO + r.setRelClass(null); // TODO + r.setSource(null); // TODO + r.setTarget(null); // TODO + r.setCollectedFrom(null); // TODO + oafs.add(r); + } + + } + + private void populateResultFields(final Result r, final Document doc) { + r.setDataInfo(null); // TODO + r.setLastupdatetimestamp(null); // TODO + r.setId(null); // TODO + r.setOriginalId(null); // TODO + r.setCollectedfrom(null); // TODO + r.setPid(null); // TODO + r.setDateofcollection(null); // TODO + r.setDateoftransformation(null); // TODO + r.setExtraInfo(null); // TODO + r.setOaiprovenance(null); // TODO + r.setAuthor(null); // TODO + r.setResulttype(null); // TODO + r.setLanguage(null); // TODO + r.setCountry(null); // TODO + r.setSubject(null); // TODO + r.setTitle(null); // TODO + r.setRelevantdate(null); // TODO + r.setDescription(null); // TODO + r.setDateofacceptance(null); // TODO + r.setPublisher(null); // TODO + r.setEmbargoenddate(null); // TODO + r.setSource(null); // TODO + r.setFulltext(null); // TODO + r.setFormat(null); // TODO + r.setContributor(null); // TODO + r.setResourcetype(null); // TODO + r.setCoverage(null); // TODO + r.setRefereed(null); // TODO + r.setContext(null); // TODO + r.setExternalReference(null); // TODO + r.setInstance(null); // TODO + r.setProcessingchargeamount(null); // TODO + r.setProcessingchargecurrency(null); // TODO + } + + @Override + public void close() throws IOException { + super.close(); + mdstoreClient.close(); + } +} diff --git a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/MigrationUtils.java b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/MigrationUtils.java new file mode 100644 index 000000000..8346a8041 --- /dev/null +++ b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/MigrationUtils.java @@ -0,0 +1,164 @@ +package eu.dnetlib.dhp.migration; + +import java.sql.Array; +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.stream.Collectors; + +import org.apache.commons.lang3.StringUtils; + +import eu.dnetlib.dhp.schema.oaf.DataInfo; +import eu.dnetlib.dhp.schema.oaf.ExtraInfo; +import eu.dnetlib.dhp.schema.oaf.Field; +import eu.dnetlib.dhp.schema.oaf.Journal; +import eu.dnetlib.dhp.schema.oaf.KeyValue; +import eu.dnetlib.dhp.schema.oaf.OAIProvenance; +import eu.dnetlib.dhp.schema.oaf.OriginDescription; +import eu.dnetlib.dhp.schema.oaf.Qualifier; +import eu.dnetlib.dhp.schema.oaf.StructuredProperty; +import eu.dnetlib.dhp.utils.DHPUtils; + +public class MigrationUtils { + + public static KeyValue keyValue(final String k, final String v) { + final KeyValue kv = new KeyValue(); + kv.setKey(k); + kv.setValue(v); + return kv; + } + + public static List listKeyValues(final String... s) { + if (s.length % 2 > 0) { throw new RuntimeException("Invalid number of parameters (k,v,k,v,....)"); } + + final List list = new ArrayList<>(); + for (int i = 0; i < s.length; i += 2) { + list.add(keyValue(s[i], s[i + 1])); + } + return list; + } + + public static Field field(final T value, final DataInfo info) { + final Field field = new Field<>(); + field.setValue(value); + field.setDataInfo(info); + return field; + } + + public static List> listFields(final DataInfo info, final String... values) { + return Arrays.stream(values).map(v -> field(v, info)).collect(Collectors.toList()); + } + + public static List> listFields(final DataInfo info, final Array array) { + try { + return listFields(info, (String[]) array.getArray()); + } catch (final SQLException e) { + throw new RuntimeException("Invalid SQL array", e); + } + } + + public static Qualifier qualifier(final String classid, final String classname, final String schemeid, final String schemename) { + final Qualifier q = new Qualifier(); + q.setClassid(classid); + q.setClassname(classname); + q.setSchemeid(schemeid); + q.setSchemename(schemename); + return q; + } + + public static StructuredProperty structuredProperty(final String value, + final String classid, + final String classname, + final String schemeid, + final String schemename, + final DataInfo dataInfo) { + final StructuredProperty sp = new StructuredProperty(); + sp.setValue(value); + sp.setQualifier(qualifier(classid, classname, schemeid, schemename)); + sp.setDataInfo(dataInfo); + return sp; + } + + public static ExtraInfo extraInfo(final String name, final String value, final String typology, final String provenance, final String trust) { + final ExtraInfo info = new ExtraInfo(); + info.setName(name); + info.setValue(value); + info.setTypology(typology); + info.setProvenance(provenance); + info.setTrust(trust); + return info; + } + + public static OAIProvenance oaiIProvenance(final String identifier, + final String baseURL, + final String metadataNamespace, + final Boolean altered, + final String datestamp, + final String harvestDate) { + + final OriginDescription desc = new OriginDescription(); + desc.setIdentifier(identifier); + desc.setBaseURL(baseURL); + desc.setMetadataNamespace(metadataNamespace); + desc.setAltered(altered); + desc.setDatestamp(datestamp); + desc.setHarvestDate(harvestDate); + + final OAIProvenance p = new OAIProvenance(); + p.setOriginDescription(desc); + + return p; + } + + public static Journal journal(final String name, + final String issnPrinted, + final String issnOnline, + final String issnLinking, + final String ep, + final String iss, + final String sp, + final String vol, + final String edition, + final String conferenceplace, + final String conferencedate, + final DataInfo dataInfo) { + final Journal j = new Journal(); + j.setName(name); + j.setIssnPrinted(issnPrinted); + j.setIssnOnline(issnOnline); + j.setIssnLinking(issnLinking); + j.setEp(ep); + j.setIss(iss); + j.setSp(sp); + j.setVol(vol); + j.setEdition(edition); + j.setConferenceplace(conferenceplace); + j.setConferencedate(conferencedate); + j.setDataInfo(dataInfo); + return j; + } + + public static DataInfo dataInfo(final Boolean deletedbyinference, + final String inferenceprovenance, + final Boolean inferred, + final Boolean invisible, + final Qualifier provenanceaction, + final String trust) { + final DataInfo d = new DataInfo(); + d.setDeletedbyinference(deletedbyinference); + d.setInferenceprovenance(inferenceprovenance); + d.setInferred(inferred); + d.setInvisible(invisible); + d.setProvenanceaction(provenanceaction); + d.setTrust(trust); + return d; + } + + public static String createOpenaireId(final String prefix, final String originalId) { + final String nsPrefix = StringUtils.substringBefore(originalId, "::"); + final String rest = StringUtils.substringAfter(originalId, "::"); + return String.format("%s|%s::%s", prefix, nsPrefix, DHPUtils.md5(rest)); + } + +} diff --git a/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/migration/migrate_db_entities_parameters.json b/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/migration/migrate_db_entities_parameters.json new file mode 100644 index 000000000..861d297ba --- /dev/null +++ b/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/migration/migrate_db_entities_parameters.json @@ -0,0 +1,38 @@ +[ + { + "paramName": "p", + "paramLongName": "hdfsPath", + "paramDescription": "the path where storing the sequential file", + "paramRequired": true + }, + { + "paramName": "n", + "paramLongName": "namenode", + "paramDescription": "the Name Node URI", + "paramRequired": true + }, + { + "paramName": "u", + "paramLongName": "hdfsUser", + "paramDescription": "the user wich create the hdfs seq file", + "paramRequired": true + }, + { + "paramName": "dburl", + "paramLongName": "postgresUrl", + "paramDescription": "postgres url, example: jdbc:postgresql://localhost:5432/testdb", + "paramRequired": true + }, + { + "paramName": "dbuser", + "paramLongName": "postgresUser", + "paramDescription": "postgres user", + "paramRequired": true + }, + { + "paramName": "dbpasswd", + "paramLongName": "postgresPassword", + "paramDescription": "postgres password", + "paramRequired": true + } +] \ No newline at end of file diff --git a/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/migration/migrate_mongo_mstores_parameters.json b/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/migration/migrate_mongo_mstores_parameters.json new file mode 100644 index 000000000..fb5736dc0 --- /dev/null +++ b/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/migration/migrate_mongo_mstores_parameters.json @@ -0,0 +1,50 @@ +[ + { + "paramName": "p", + "paramLongName": "hdfsPath", + "paramDescription": "the path where storing the sequential file", + "paramRequired": true + }, + { + "paramName": "n", + "paramLongName": "namenode", + "paramDescription": "the Name Node URI", + "paramRequired": true + }, + { + "paramName": "u", + "paramLongName": "hdfsUser", + "paramDescription": "the user wich create the hdfs seq file", + "paramRequired": true + }, + { + "paramName": "mongourl", + "paramLongName": "mongoBaseUrl", + "paramDescription": "mongoDB url, example: mongodb://[username:password@]host[:port]", + "paramRequired": true + }, + { + "paramName": "db", + "paramLongName": "mongoDb", + "paramDescription": "mongo database", + "paramRequired": true + }, + { + "paramName": "f", + "paramLongName": "mdFormat", + "paramDescription": "metadata format", + "paramRequired": true + }, + { + "paramName": "l", + "paramLongName": "mdLayout", + "paramDescription": "metadata layout", + "paramRequired": true + }, + { + "paramName": "i", + "paramLongName": "mdInterpretation", + "paramDescription": "metadata interpretation", + "paramRequired": true + } +] \ No newline at end of file diff --git a/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/migration/sql/queryDatasourceOrganization.sql b/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/migration/sql/queryDatasourceOrganization.sql new file mode 100644 index 000000000..885b6ae09 --- /dev/null +++ b/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/migration/sql/queryDatasourceOrganization.sql @@ -0,0 +1,16 @@ +SELECT + dor.datasource AS datasource, + dor.organization AS organization, + NULL AS startdate, + NULL AS enddate, + false AS inferred, + false AS deletedbyinference, + 0.9 AS trust, + NULL AS inferenceprovenance, + + 'providedBy@@@provided by@@@dnet:datasources_organizations_typologies@@@dnet:datasources_organizations_typologies' AS semantics, + d.provenanceaction || '@@@' || d.provenanceaction || '@@@dnet:provenanceActions@@@dnet:provenanceActions' AS provenanceaction + +FROM dsm_datasource_organization dor + LEFT OUTER JOIN dsm_datasources d ON (dor.datasource = d.id) + diff --git a/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/migration/sql/queryDatasources.sql b/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/migration/sql/queryDatasources.sql new file mode 100644 index 000000000..8c587f34e --- /dev/null +++ b/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/migration/sql/queryDatasources.sql @@ -0,0 +1,147 @@ +SELECT + d.id AS datasourceid, + d.id || array_agg(distinct di.pid) AS identities, + d.officialname AS officialname, + d.englishname AS englishname, + d.contactemail AS contactemail, + CASE + WHEN (array_agg(DISTINCT COALESCE (a.compatibility_override, a.compatibility):: TEXT) @> ARRAY ['openaire-cris_1.1']) + THEN + 'openaire-cris_1.1@@@OpenAIRE CRIS v1.1@@@dnet:datasourceCompatibilityLevel@@@dnet:datasourceCompatibilityLevel' + WHEN (array_agg(DISTINCT COALESCE (a.compatibility_override, a.compatibility):: TEXT) @> ARRAY ['driver', 'openaire2.0']) + THEN + 'driver-openaire2.0@@@OpenAIRE 2.0+ (DRIVER OA, EC funding)@@@dnet:datasourceCompatibilityLevel@@@dnet:datasourceCompatibilityLevel' + WHEN (array_agg(DISTINCT COALESCE (a.compatibility_override, a.compatibility) :: TEXT) @> ARRAY ['driver']) + THEN + 'driver@@@OpenAIRE Basic (DRIVER OA)@@@dnet:datasourceCompatibilityLevel@@@dnet:datasourceCompatibilityLevel' + WHEN (array_agg(DISTINCT COALESCE (a.compatibility_override, a.compatibility) :: TEXT) @> ARRAY ['openaire2.0']) + THEN + 'openaire2.0@@@OpenAIRE 2.0 (EC funding)@@@dnet:datasourceCompatibilityLevel@@@dnet:datasourceCompatibilityLevel' + WHEN (array_agg(DISTINCT COALESCE (a.compatibility_override, a.compatibility) :: TEXT) @> ARRAY ['openaire3.0']) + THEN + 'openaire3.0@@@OpenAIRE 3.0 (OA, funding)@@@dnet:datasourceCompatibilityLevel@@@dnet:datasourceCompatibilityLevel' + WHEN (array_agg(DISTINCT COALESCE (a.compatibility_override, a.compatibility) :: TEXT) @> ARRAY ['openaire2.0_data']) + THEN + 'openaire2.0_data@@@OpenAIRE Data (funded, referenced datasets)@@@dnet:datasourceCompatibilityLevel@@@dnet:datasourceCompatibilityLevel' + WHEN (array_agg(DISTINCT COALESCE (a.compatibility_override, a.compatibility) :: TEXT) @> ARRAY ['native']) + THEN + 'native@@@proprietary@@@dnet:datasourceCompatibilityLevel@@@dnet:datasourceCompatibilityLevel' + WHEN (array_agg(DISTINCT COALESCE (a.compatibility_override, a.compatibility) :: TEXT) @> ARRAY ['hostedBy']) + THEN + 'hostedBy@@@collected from a compatible aggregator@@@dnet:datasourceCompatibilityLevel@@@dnet:datasourceCompatibilityLevel' + WHEN (array_agg(DISTINCT COALESCE (a.compatibility_override, a.compatibility) :: TEXT) @> ARRAY ['notCompatible']) + THEN + 'notCompatible@@@under validation@@@dnet:datasourceCompatibilityLevel@@@dnet:datasourceCompatibilityLevel' + ELSE + 'UNKNOWN@@@not available@@@dnet:datasourceCompatibilityLevel@@@dnet:datasourceCompatibilityLevel' + END AS openairecompatibility, + d.websiteurl AS websiteurl, + d.logourl AS logourl, + array_agg(DISTINCT CASE WHEN a.protocol = 'oai' and last_aggregation_date is not null THEN a.baseurl ELSE NULL END) AS accessinfopackage, + d.latitude AS latitude, + d.longitude AS longitude, + d.namespaceprefix AS namespaceprefix, + NULL AS odnumberofitems, + NULL AS odnumberofitemsdate, + + (SELECT array_agg(s|| '###keywords@@@keywords@@@dnet:subject_classification_typologies@@@dnet:subject_classification_typologies') + FROM UNNEST( + ARRAY( + SELECT trim(s) + FROM unnest(string_to_array(d.subjects, '@@')) AS s)) AS s) AS subjects, + + d.description AS description, + NULL AS odpolicies, + ARRAY(SELECT trim(s) + FROM unnest(string_to_array(d.languages, ',')) AS s) AS odlanguages, + ARRAY(SELECT trim(s) + FROM unnest(string_to_array(d.od_contenttypes, '-')) AS s) AS odcontenttypes, + false AS inferred, + false AS deletedbyinference, + 0.9 AS trust, + NULL AS inferenceprovenance, + d.dateofcollection AS dateofcollection, + d.dateofvalidation AS dateofvalidation, + -- re3data fields + d.releasestartdate AS releasestartdate, + d.releaseenddate AS releaseenddate, + d.missionstatementurl AS missionstatementurl, + d.dataprovider AS dataprovider, + d.serviceprovider AS serviceprovider, + d.databaseaccesstype AS databaseaccesstype, + d.datauploadtype AS datauploadtype, + d.databaseaccessrestriction AS databaseaccessrestriction, + d.datauploadrestriction AS datauploadrestriction, + d.versioning AS versioning, + d.citationguidelineurl AS citationguidelineurl, + d.qualitymanagementkind AS qualitymanagementkind, + d.pidsystems AS pidsystems, + d.certificates AS certificates, + ARRAY[]::text[] AS policies, + dc.id AS collectedfromid, + dc.officialname AS collectedfromname, + d.typology || '@@@' || CASE + WHEN (d.typology = 'crissystem') THEN 'CRIS System' + WHEN (d.typology = 'datarepository::unknown') THEN 'Data Repository' + WHEN (d.typology = 'aggregator::datarepository') THEN 'Data Repository Aggregator' + WHEN (d.typology = 'infospace') THEN 'Information Space' + WHEN (d.typology = 'pubsrepository::institutional') THEN 'Institutional Repository' + WHEN (d.typology = 'aggregator::pubsrepository::institutional') THEN 'Institutional Repository Aggregator' + WHEN (d.typology = 'pubsrepository::journal') THEN 'Journal' + WHEN (d.typology = 'aggregator::pubsrepository::journals') THEN 'Journal Aggregator/Publisher' + WHEN (d.typology = 'pubsrepository::mock') THEN 'Other' + WHEN (d.typology = 'pubscatalogue::unknown') THEN 'Publication Catalogue' + WHEN (d.typology = 'pubsrepository::unknown') THEN 'Publication Repository' + WHEN (d.typology = 'aggregator::pubsrepository::unknown') THEN 'Publication Repository Aggregator' + WHEN (d.typology = 'entityregistry') THEN 'Registry' + WHEN (d.typology = 'scholarcomminfra') THEN 'Scholarly Comm. Infrastructure' + WHEN (d.typology = 'pubsrepository::thematic') THEN 'Thematic Repository' + WHEN (d.typology = 'websource') THEN 'Web Source' + WHEN (d.typology = 'entityregistry::projects') THEN 'Funder database' + WHEN (d.typology = 'entityregistry::repositories') THEN 'Registry of repositories' + WHEN (d.typology = 'softwarerepository') THEN 'Software Repository' + WHEN (d.typology = 'aggregator::softwarerepository') THEN 'Software Repository Aggregator' + WHEN (d.typology = 'orprepository') THEN 'Repository' + ELSE 'Other' + END || '@@@dnet:datasource_typologies@@@dnet:datasource_typologies' AS datasourcetype, + 'sysimport:crosswalk:entityregistry@@@sysimport:crosswalk:entityregistry@@@dnet:provenance_actions@@@dnet:provenance_actions' AS provenanceaction, + CONCAT(d.issn, '@@@', d.eissn, '@@@', d.lissn) AS journal + +FROM dsm_datasources d + +LEFT OUTER JOIN dsm_datasources dc on (d.collectedfrom = dc.id) +LEFT OUTER JOIN dsm_api a ON (d.id = a.datasource) +LEFT OUTER JOIN dsm_datasourcepids di ON (d.id = di.datasource) + +GROUP BY + d.id, + d.officialname, + d.englishname, + d.websiteurl, + d.logourl, + d.contactemail, + d.namespaceprefix, + d.description, + d.latitude, + d.longitude, + d.dateofcollection, + d.dateofvalidation, + d.releasestartdate, + d.releaseenddate, + d.missionstatementurl, + d.dataprovider, + d.serviceprovider, + d.databaseaccesstype, + d.datauploadtype, + d.databaseaccessrestriction, + d.datauploadrestriction, + d.versioning, + d.citationguidelineurl, + d.qualitymanagementkind, + d.pidsystems, + d.certificates, + dc.id, + dc.officialname, + d.issn, + d.eissn, + d.lissn diff --git a/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/migration/sql/queryOrganizations.sql b/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/migration/sql/queryOrganizations.sql new file mode 100644 index 000000000..682ca3596 --- /dev/null +++ b/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/migration/sql/queryOrganizations.sql @@ -0,0 +1,36 @@ +SELECT + o.id AS organizationid, + o.legalshortname AS legalshortname, + o.legalname AS legalname, + o.websiteurl AS websiteurl, + o.logourl AS logourl, + o.ec_legalbody AS eclegalbody, + o.ec_legalperson AS eclegalperson, + o.ec_nonprofit AS ecnonprofit, + o.ec_researchorganization AS ecresearchorganization, + o.ec_highereducation AS echighereducation, + o.ec_internationalorganizationeurinterests AS ecinternationalorganizationeurinterests, + o.ec_internationalorganization AS ecinternationalorganization, + o.ec_enterprise AS ecenterprise, + o.ec_smevalidated AS ecsmevalidated, + o.ec_nutscode AS ecnutscode, + o.dateofcollection AS dateofcollection, + o.lastupdate AS dateoftransformation, + false AS inferred, + false AS deletedbyinference, + o.trust AS trust, + '' AS inferenceprovenance, + d.id AS collectedfromid, + d.officialname AS collectedfromname, + + o.country || '@@@dnet:countries' AS country, + 'sysimport:crosswalk:entityregistry@@@sysimport:crosswalk:entityregistry@@@dnet:provenance_actions@@@dnet:provenance_actions' AS provenanceaction, + + ARRAY[]::text[] AS pid +FROM dsm_organizations o + LEFT OUTER JOIN dsm_datasources d ON (d.id = o.collectedfrom) + + + + + diff --git a/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/migration/sql/queryOrganizationsFromOpenOrgsDB.sql b/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/migration/sql/queryOrganizationsFromOpenOrgsDB.sql new file mode 100644 index 000000000..dc9550883 --- /dev/null +++ b/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/migration/sql/queryOrganizationsFromOpenOrgsDB.sql @@ -0,0 +1,53 @@ +SELECT + o.id AS organizationid, + coalesce((array_agg(a.acronym))[1], o.name) AS legalshortname, + o.name AS legalname, + array_agg(DISTINCT n.name) AS "alternativeNames", + (array_agg(u.url))[1] AS websiteurl, + o.modification_date AS dateoftransformation, + false AS inferred, + false AS deletedbyinference, + 0.95 AS trust, + '' AS inferenceprovenance, + 'openaire____::openorgs' AS collectedfromid, + 'OpenOrgs Database' AS collectedfromname, + o.country || '@@@dnet:countries' AS country, + 'sysimport:crosswalk:entityregistry@@@sysimport:crosswalk:entityregistry@@@dnet:provenance_actions@@@dnet:provenance_actions' AS provenanceaction, + array_agg(DISTINCT i.otherid || '###' || i.type || '@@@dnet:pid_types') AS pid +FROM organizations o + LEFT OUTER JOIN acronyms a ON (a.id = o.id) + LEFT OUTER JOIN urls u ON (u.id = o.id) + LEFT OUTER JOIN other_ids i ON (i.id = o.id) + LEFT OUTER JOIN other_names n ON (n.id = o.id) +GROUP BY + o.id, + o.name, + o.modification_date, + o.country + +UNION ALL + +SELECT + 'openorgsmesh'||substring(o.id, 13)||'-'||md5(n.name) AS organizationid, + n.name AS legalshortname, + n.name AS legalname, + ARRAY[]::text[] AS "alternativeNames", + (array_agg(u.url))[1] AS websiteurl, + o.modification_date AS dateoftransformation, + false AS inferred, + false AS deletedbyinference, + 0.88 AS trust, + '' AS inferenceprovenance, + 'openaire____::openorgs' AS collectedfromid, + 'OpenOrgs Database' AS collectedfromname, + o.country || '@@@dnet:countries' AS country, + 'sysimport:crosswalk:entityregistry@@@sysimport:crosswalk:entityregistry@@@dnet:provenance_actions@@@dnet:provenance_actions' AS provenanceaction, + array_agg(DISTINCT i.otherid || '###' || i.type || '@@@dnet:pid_types') AS pid +FROM other_names n + LEFT OUTER JOIN organizations o ON (n.id = o.id) + LEFT OUTER JOIN urls u ON (u.id = o.id) + LEFT OUTER JOIN other_ids i ON (i.id = o.id) +GROUP BY + o.id, o.modification_date, o.country, n.name + + diff --git a/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/migration/sql/queryProjectOrganization.sql b/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/migration/sql/queryProjectOrganization.sql new file mode 100644 index 000000000..4483d6145 --- /dev/null +++ b/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/migration/sql/queryProjectOrganization.sql @@ -0,0 +1,16 @@ +SELECT + po.project AS project, + po.resporganization AS resporganization, + po.participantnumber AS participantnumber, + po.contribution AS contribution, + NULL AS startdate, + NULL AS enddate, + false AS inferred, + false AS deletedbyinference, + po.trust AS trust, + NULL AS inferenceprovenance, + + po.semanticclass || '@@@' || po.semanticclass || '@@@dnet:project_organization_relations@@@dnet:project_organization_relations' AS semantics, + 'sysimport:crosswalk:entityregistry@@@sysimport:crosswalk:entityregistry@@@dnet:provenance_actions@@@dnet:provenance_actions' AS provenanceaction + +FROM project_organization po diff --git a/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/migration/sql/queryProjects.sql b/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/migration/sql/queryProjects.sql new file mode 100644 index 000000000..f04f1f03b --- /dev/null +++ b/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/migration/sql/queryProjects.sql @@ -0,0 +1,87 @@ +SELECT + p.id AS projectid, + p.code AS code, + p.websiteurl AS websiteurl, + p.acronym AS acronym, + p.title AS title, + p.startdate AS startdate, + p.enddate AS enddate, + p.call_identifier AS callidentifier, + p.keywords AS keywords, + p.duration AS duration, + p.ec_sc39 AS ecsc39, + p.oa_mandate_for_publications AS oamandatepublications, + p.ec_article29_3 AS ecarticle29_3, + p.dateofcollection AS dateofcollection, + p.lastupdate AS dateoftransformation, + p.inferred AS inferred, + p.deletedbyinference AS deletedbyinference, + p.trust AS trust, + p.inferenceprovenance AS inferenceprovenance, + p.optional1 AS optional1, + p.optional2 AS optional2, + p.jsonextrainfo AS jsonextrainfo, + p.contactfullname AS contactfullname, + p.contactfax AS contactfax, + p.contactphone AS contactphone, + p.contactemail AS contactemail, + p.summary AS summary, + p.currency AS currency, + p.totalcost AS totalcost, + p.fundedamount AS fundedamount, + dc.id AS collectedfromid, + dc.officialname AS collectedfromname, + p.contracttype || '@@@' || p.contracttypename || '@@@' || p.contracttypescheme || '@@@' || p.contracttypescheme AS contracttype, + pac.code || '@@@' || pac.name || '@@@' || pas.code || '@@@' || pas.name AS provenanceaction, + array_agg(DISTINCT i.pid || '###' || i.issuertype) AS pid, + array_agg(DISTINCT s.name || '###' || sc.code || '@@@' || sc.name || '@@@' || ss.code || '@@@' || ss.name) AS subjects, + array_agg(DISTINCT fp.path) AS fundingtree + FROM projects p + LEFT OUTER JOIN class pac ON (pac.code = p.provenanceactionclass) + LEFT OUTER JOIN scheme pas ON (pas.code = p.provenanceactionscheme) + + LEFT OUTER JOIN projectpids pp ON (pp.project = p.id) + LEFT OUTER JOIN dsm_identities i ON (i.pid = pp.pid) + + LEFT OUTER JOIN dsm_datasources dc ON (dc.id = p.collectedfrom) + + LEFT OUTER JOIN project_fundingpath pf ON (pf.project = p.id) + LEFT OUTER JOIN fundingpaths fp ON (fp.id = pf.funding) + + LEFT OUTER JOIN project_subject ps ON (ps.project = p.id) + LEFT OUTER JOIN subjects s ON (s.id = ps.subject) + + LEFT OUTER JOIN class sc ON (sc.code = s.semanticclass) + LEFT OUTER JOIN scheme ss ON (ss.code = s.semanticscheme) + + GROUP BY + p.id, + p.code, + p.websiteurl, + p.acronym, + p.title, + p.startdate, + p.enddate, + p.call_identifier, + p.keywords, + p.duration, + p.ec_sc39, + p.oa_mandate_for_publications, + p.ec_article29_3, + p.dateofcollection, + p.inferred, + p.deletedbyinference, + p.trust, + p.inferenceprovenance, + p.contactfullname, + p.contactfax, + p.contactphone, + p.contactemail, + p.contracttype, + p.summary, + p.currency, + p.totalcost, + p.fundedamount, + dc.id, + dc.officialname, + pac.code, pac.name, pas.code, pas.name; \ No newline at end of file diff --git a/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/migration/sql/querySimilarityFromOpenOrgsDB.sql b/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/migration/sql/querySimilarityFromOpenOrgsDB.sql new file mode 100644 index 000000000..4407559c6 --- /dev/null +++ b/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/migration/sql/querySimilarityFromOpenOrgsDB.sql @@ -0,0 +1,17 @@ +SELECT local_id AS id1, oa_original_id AS id2 FROM openaire_simrels WHERE reltype = 'is_similar' + +UNION ALL + +SELECT + o.id AS id1, + 'openorgsmesh'||substring(o.id, 13)||'-'||md5(a.acronym) AS id2 +FROM acronyms a + LEFT OUTER JOIN organizations o ON (a.id = o.id) + +UNION ALL + +SELECT + o.id AS id1, + 'openorgsmesh'||substring(o.id, 13)||'-'||md5(n.name) AS id2 +FROM other_names n + LEFT OUTER JOIN organizations o ON (n.id = o.id) From b35c59eb42465d6e7dd9981e7f745d4a7eeb5dcb Mon Sep 17 00:00:00 2001 From: Michele Artini Date: Mon, 20 Jan 2020 16:04:19 +0100 Subject: [PATCH 03/17] partial implementation of entities from db --- .../dhp-build-properties-maven-plugin/pom.xml | 35 + .../eu/dnetlib/dhp/schema/dli/Relation.java | 61 +- .../MigrateDbEntitiesApplication.java | 255 ++++-- dhp-workflows/dhp-dedup/pom.xml | 31 + .../dnetlib/dhp/graph/GraphMappingUtils.java | 36 +- .../dhp/graph/SparkGraphImporterJob.java | 63 +- pom.xml | 759 +++++++++--------- 7 files changed, 702 insertions(+), 538 deletions(-) diff --git a/dhp-build/dhp-build-properties-maven-plugin/pom.xml b/dhp-build/dhp-build-properties-maven-plugin/pom.xml index 4f99d5298..7b50acd3d 100644 --- a/dhp-build/dhp-build-properties-maven-plugin/pom.xml +++ b/dhp-build/dhp-build-properties-maven-plugin/pom.xml @@ -76,6 +76,41 @@ + + + + + org.eclipse.m2e + lifecycle-mapping + 1.0.0 + + + + + + + org.apache.maven.plugins + + + maven-plugin-plugin + + + [3.2,) + + + descriptor + + + + + + + + + + + + diff --git a/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/dli/Relation.java b/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/dli/Relation.java index b83cccb73..66007e21d 100644 --- a/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/dli/Relation.java +++ b/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/dli/Relation.java @@ -5,43 +5,48 @@ import java.util.List; public class Relation implements Serializable { - private String source; + /** + * + */ + private static final long serialVersionUID = -9103706796710618813L; - private String target; + private String source; - private List provenance; + private String target; - private RelationSemantic semantic; + private List provenance; - public String getSource() { - return source; - } + private RelationSemantic semantic; - public void setSource(String source) { - this.source = source; - } + public String getSource() { + return source; + } - public String getTarget() { - return target; - } + public void setSource(final String source) { + this.source = source; + } - public void setTarget(String target) { - this.target = target; - } + public String getTarget() { + return target; + } - public List getProvenance() { - return provenance; - } + public void setTarget(final String target) { + this.target = target; + } - public void setProvenance(List provenance) { - this.provenance = provenance; - } + public List getProvenance() { + return provenance; + } - public RelationSemantic getSemantic() { - return semantic; - } + public void setProvenance(final List provenance) { + this.provenance = provenance; + } - public void setSemantic(RelationSemantic semantic) { - this.semantic = semantic; - } + public RelationSemantic getSemantic() { + return semantic; + } + + public void setSemantic(final RelationSemantic semantic) { + this.semantic = semantic; + } } diff --git a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/MigrateDbEntitiesApplication.java b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/MigrateDbEntitiesApplication.java index 60a7c24f7..efc395812 100644 --- a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/MigrateDbEntitiesApplication.java +++ b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/MigrateDbEntitiesApplication.java @@ -2,11 +2,17 @@ package eu.dnetlib.dhp.migration; import java.io.Closeable; import java.io.IOException; +import java.sql.Array; import java.sql.ResultSet; +import java.sql.SQLException; +import java.util.ArrayList; import java.util.Arrays; +import java.util.Date; +import java.util.List; import java.util.function.Consumer; import org.apache.commons.io.IOUtils; +import org.apache.commons.lang3.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; @@ -15,14 +21,21 @@ import eu.dnetlib.dhp.schema.oaf.DataInfo; import eu.dnetlib.dhp.schema.oaf.Datasource; import eu.dnetlib.dhp.schema.oaf.Organization; import eu.dnetlib.dhp.schema.oaf.Project; +import eu.dnetlib.dhp.schema.oaf.Qualifier; import eu.dnetlib.dhp.schema.oaf.Relation; +import eu.dnetlib.dhp.schema.oaf.StructuredProperty; public class MigrateDbEntitiesApplication extends AbstractMigrateApplication implements Closeable { + private static final Qualifier ENTITYREGISTRY_PROVENANCE_ACTION = MigrationUtils + .qualifier("sysimport:crosswalk:entityregistry", "sysimport:crosswalk:entityregistry", "dnet:provenance_actions", "dnet:provenance_actions"); + private static final Log log = LogFactory.getLog(MigrateDbEntitiesApplication.class); private final DbClient dbClient; + private final long lastUpdateTimestamp; + public static void main(final String[] args) throws Exception { final ArgumentApplicationParser parser = new ArgumentApplicationParser( IOUtils.toString(MigrateDbEntitiesApplication.class.getResourceAsStream("/eu/dnetlib/dhp/migration/migrate_db_entities_parameters.json"))); @@ -51,6 +64,7 @@ public class MigrateDbEntitiesApplication extends AbstractMigrateApplication imp final String dbPassword) throws Exception { super(hdfsPath, hdfsNameNode, hdfsUser); this.dbClient = new DbClient(dbUrl, dbUser, dbPassword); + this.lastUpdateTimestamp = new Date().getTime(); } public void execute(final String sqlFile, final Consumer consumer) throws Exception { @@ -61,7 +75,7 @@ public class MigrateDbEntitiesApplication extends AbstractMigrateApplication imp public void processDatasource(final ResultSet rs) { try { - final DataInfo info = MigrationUtils.dataInfo(null, null, null, null, null, null); // TODO + final DataInfo info = prepareDataInfo(rs); final Datasource ds = new Datasource(); @@ -74,8 +88,8 @@ public class MigrateDbEntitiesApplication extends AbstractMigrateApplication imp ds.setExtraInfo(null); // TODO ds.setOaiprovenance(null); // TODO - ds.setDatasourcetype(null); // Qualifier datasourcetype) { - ds.setOpenairecompatibility(null); // Qualifier openairecompatibility) { + ds.setDatasourcetype(prepareQualifierSplitting(rs.getString("datasourcetype"))); + ds.setOpenairecompatibility(prepareQualifierSplitting(rs.getString("openairecompatibility"))); ds.setOfficialname(MigrationUtils.field(rs.getString("officialname"), info)); ds.setEnglishname(MigrationUtils.field(rs.getString("englishname"), info)); ds.setWebsiteurl(MigrationUtils.field(rs.getString("websiteurl"), info)); @@ -86,7 +100,7 @@ public class MigrateDbEntitiesApplication extends AbstractMigrateApplication imp ds.setLongitude(MigrationUtils.field(Double.toString(rs.getDouble("longitude")), info)); ds.setDateofvalidation(MigrationUtils.field(rs.getDate("dateofvalidation").toString(), info)); ds.setDescription(MigrationUtils.field(rs.getString("description"), info)); - ds.setSubjects(null); // List subjects) { + ds.setSubjects(prepareListOfStructProps(rs.getArray("subjects"), info)); ds.setOdnumberofitems(MigrationUtils.field(Double.toString(rs.getInt("odnumberofitems")), info)); ds.setOdnumberofitemsdate(MigrationUtils.field(rs.getDate("odnumberofitemsdate").toString(), info)); ds.setOdpolicies(MigrationUtils.field(rs.getString("odpolicies"), info)); @@ -110,12 +124,15 @@ public class MigrateDbEntitiesApplication extends AbstractMigrateApplication imp ds.setPolicies(null); // List // TODO ds.setJournal(null); // Journal // TODO + ds.setDataInfo(info); + ds.setLastupdatetimestamp(lastUpdateTimestamp); + // rs.getString("datasourceid"); rs.getArray("identities"); // rs.getString("officialname"); // rs.getString("englishname"); // rs.getString("contactemail"); - rs.getString("openairecompatibility"); // COMPLEX ...@@@... + // rs.getString("openairecompatibility"); // COMPLEX ...@@@... // rs.getString("websiteurl"); // rs.getString("logourl"); // rs.getArray("accessinfopackage"); @@ -124,15 +141,15 @@ public class MigrateDbEntitiesApplication extends AbstractMigrateApplication imp // rs.getString("namespaceprefix"); // rs.getInt("odnumberofitems"); // NULL // rs.getDate("odnumberofitemsdate"); // NULL - rs.getArray("subjects"); + // rs.getArray("subjects"); // rs.getString("description"); // rs.getString("odpolicies"); // NULL // rs.getArray("odlanguages"); // rs.getArray("odcontenttypes"); - rs.getBoolean("inferred"); // false - rs.getBoolean("deletedbyinference");// false - rs.getDouble("trust"); // 0.9 - rs.getString("inferenceprovenance"); // NULL + // rs.getBoolean("inferred"); // false + // rs.getBoolean("deletedbyinference");// false + // rs.getDouble("trust"); // 0.9 + // rs.getString("inferenceprovenance"); // NULL // rs.getDate("dateofcollection"); // rs.getDate("dateofvalidation"); // rs.getDate("releasestartdate"); @@ -152,21 +169,22 @@ public class MigrateDbEntitiesApplication extends AbstractMigrateApplication imp rs.getArray("policies"); // rs.getString("collectedfromid"); // rs.getString("collectedfromname"); - rs.getString("datasourcetype"); // COMPLEX XXX@@@@.... - rs.getString("provenanceaction"); // 'sysimport:crosswalk:entityregistry@@@sysimport:crosswalk:entityregistry@@@dnet:provenance_actions@@@dnet:provenance_actions' - // AS provenanceaction, + // rs.getString("datasourcetype"); // COMPLEX XXX@@@@.... + // rs.getString("provenanceaction"); // + // 'sysimport:crosswalk:entityregistry@@@sysimport:crosswalk:entityregistry@@@dnet:provenance_actions@@@dnet:provenance_actions' + // AS provenanceaction, rs.getString("journal"); // CONCAT(d.issn, '@@@', d.eissn, '@@@', d.lissn) AS journal emitOaf(ds); } catch (final Exception e) { - // TODO: handle exception + throw new RuntimeException(e); } } public void processProject(final ResultSet rs) { try { - final DataInfo info = MigrationUtils.dataInfo(null, null, null, null, null, null); // TODO + final DataInfo info = prepareDataInfo(rs); final Project p = new Project(); @@ -192,9 +210,9 @@ public class MigrateDbEntitiesApplication extends AbstractMigrateApplication imp p.setEcsc39(MigrationUtils.field(Boolean.toString(rs.getBoolean("ecsc39")), info)); p.setOamandatepublications(MigrationUtils.field(Boolean.toString(rs.getBoolean("oamandatepublications")), info)); p.setEcarticle29_3(MigrationUtils.field(Boolean.toString(rs.getBoolean("ecarticle29_3")), info)); - p.setSubjects(null); // List //TODO + p.setSubjects(prepareListOfStructProps(rs.getArray("subjects"), info)); p.setFundingtree(null); // List> //TODO - p.setContracttype(null); // Qualifier //TODO + p.setContracttype(prepareQualifierSplitting(rs.getString("contracttype"))); p.setOptional1(MigrationUtils.field(rs.getString("optional1"), info)); p.setOptional2(MigrationUtils.field(rs.getString("optional2"), info)); p.setJsonextrainfo(MigrationUtils.field(rs.getString("jsonextrainfo"), info)); @@ -207,6 +225,9 @@ public class MigrateDbEntitiesApplication extends AbstractMigrateApplication imp p.setTotalcost(new Float(rs.getDouble("totalcost"))); p.setFundedamount(new Float(rs.getDouble("fundedamount"))); + p.setDataInfo(info); + p.setLastupdatetimestamp(lastUpdateTimestamp); + // rs.getString("projectid"); // rs.getString("code"); // rs.getString("websiteurl"); @@ -222,13 +243,13 @@ public class MigrateDbEntitiesApplication extends AbstractMigrateApplication imp // rs.getBoolean("ecarticle29_3"); // rs.getDate("dateofcollection"); // rs.getDate("dateoftransformation"); - rs.getBoolean("inferred"); - rs.getBoolean("deletedbyinference"); - rs.getDouble("trust"); - rs.getString("inferenceprovenance"); + // rs.getBoolean("inferred"); + // rs.getBoolean("deletedbyinference"); + // rs.getDouble("trust"); + // rs.getString("inferenceprovenance"); // rs.getString("optional1"); // rs.getString("optional2"); - rs.getString("jsonextrainfo"); + // rs.getString("jsonextrainfo"); // rs.getString("contactfullname"); // rs.getString("contactfax"); // rs.getString("contactphone"); @@ -248,14 +269,14 @@ public class MigrateDbEntitiesApplication extends AbstractMigrateApplication imp emitOaf(p); } catch (final Exception e) { - // TODO: handle exception + throw new RuntimeException(e); } } public void processOrganization(final ResultSet rs) { try { - final DataInfo info = MigrationUtils.dataInfo(null, null, null, null, null, null); // TODO + final DataInfo info = prepareDataInfo(rs); final Organization o = new Organization(); @@ -269,7 +290,7 @@ public class MigrateDbEntitiesApplication extends AbstractMigrateApplication imp o.setOaiprovenance(null); // OAIProvenance // TODO o.setLegalshortname(MigrationUtils.field("legalshortname", info)); o.setLegalname(MigrationUtils.field("legalname", info)); - o.setAlternativeNames(null); // List> //TODO + o.setAlternativeNames(new ArrayList<>()); o.setWebsiteurl(MigrationUtils.field("websiteurl", info)); o.setLogourl(MigrationUtils.field("logourl", info)); o.setEclegalbody(MigrationUtils.field(Boolean.toString(rs.getBoolean("eclegalbody")), info)); @@ -283,7 +304,10 @@ public class MigrateDbEntitiesApplication extends AbstractMigrateApplication imp o.setEcenterprise(MigrationUtils.field(Boolean.toString(rs.getBoolean("ecenterprise")), info)); o.setEcsmevalidated(MigrationUtils.field(Boolean.toString(rs.getBoolean("ecsmevalidated")), info)); o.setEcnutscode(MigrationUtils.field(Boolean.toString(rs.getBoolean("ecnutscode")), info)); - o.setCountry(null); // Qualifier country) { + o.setCountry(prepareQualifierSplitting(rs.getString("country"))); + + o.setDataInfo(info); + o.setLastupdatetimestamp(lastUpdateTimestamp); // rs.getString("organizationid"); // rs.getString("legalshortname"); @@ -300,87 +324,160 @@ public class MigrateDbEntitiesApplication extends AbstractMigrateApplication imp // rs.getBoolean("ecenterprise"); // rs.getBoolean("ecsmevalidated"); // rs.getBoolean("ecnutscode"); - rs.getDate("dateofcollection"); - rs.getDate("dateoftransformation"); - rs.getBoolean("inferred"); - rs.getBoolean("deletedbyinference"); - rs.getDouble("trust"); - rs.getString("inferenceprovenance"); + // rs.getDate("dateofcollection"); + // rs.getDate("dateoftransformation"); + // rs.getBoolean("inferred"); + // rs.getBoolean("deletedbyinference"); + // rs.getDouble("trust"); + // rs.getString("inferenceprovenance"); // rs.getString("collectedfromid"); // rs.getString("collectedfromname"); - rs.getString("country"); + // rs.getString("country"); rs.getString("provenanceaction"); rs.getArray("pid"); emitOaf(o); } catch (final Exception e) { - // TODO: handle exception + throw new RuntimeException(e); } } public void processDatasourceOrganization(final ResultSet rs) { try { - final Relation r = new Relation(); + final DataInfo info = prepareDataInfo(rs); + final String orgId = MigrationUtils.createOpenaireId("20", rs.getString("organization")); + final String dsId = MigrationUtils.createOpenaireId("10", rs.getString("datasource")); - r.setRelType(null); // TODO - r.setSubRelType(null); // TODO - r.setRelClass(null); // TODO - r.setSource(null); // TODO - r.setTarget(null); // TODO - r.setCollectedFrom(MigrationUtils.listKeyValues("", "")); + final Relation r1 = new Relation(); + r1.setRelType("datasourceOrganization"); + r1.setSubRelType("provision"); + r1.setRelClass("isProvidedBy"); + r1.setSource(dsId); + r1.setTarget(orgId); + r1.setCollectedFrom(null);// TODO + r1.setDataInfo(info); + r1.setLastupdatetimestamp(lastUpdateTimestamp); + emitOaf(r1); - rs.getString("datasource"); - rs.getString("organization"); - rs.getDate("startdate"); // NULL - rs.getDate("enddate"); // NULL - rs.getBoolean("inferred"); // false - rs.getBoolean("deletedbyinference"); // false - rs.getDouble("trust"); // 0.9 - rs.getString("inferenceprovenance"); // NULL - rs.getString("semantics"); // 'providedBy@@@provided - // by@@@dnet:datasources_organizations_typologies@@@dnet:datasources_organizations_typologies' AS - // semantics, - rs.getString("provenanceaction"); // d.provenanceaction || '@@@' || d.provenanceaction || - // '@@@dnet:provenanceActions@@@dnet:provenanceActions' AS provenanceaction + final Relation r2 = new Relation(); + r2.setRelType("datasourceOrganization"); + r2.setSubRelType("provision"); + r2.setRelClass("provides"); + r2.setSource(orgId); + r2.setTarget(dsId); + r2.setCollectedFrom(null); // TODO + r2.setDataInfo(info); + r1.setLastupdatetimestamp(lastUpdateTimestamp); + emitOaf(r2); + + // rs.getString("datasource"); + // rs.getString("organization"); + // rs.getDate("startdate"); // NULL + // rs.getDate("enddate"); // NULL + // rs.getBoolean("inferred"); // false + // rs.getBoolean("deletedbyinference"); // false + // rs.getDouble("trust"); // 0.9 + // rs.getString("inferenceprovenance"); // NULL + // rs.getString("semantics"); // 'providedBy@@@provided + // by@@@dnet:datasources_organizations_typologies@@@dnet:datasources_organizations_typologies' AS + // semantics, + // rs.getString("provenanceaction"); // d.provenanceaction || '@@@' || d.provenanceaction || + // '@@@dnet:provenanceActions@@@dnet:provenanceActions' AS provenanceaction - emitOaf(r); } catch (final Exception e) { - // TODO: handle exception + throw new RuntimeException(e); } } public void processProjectOrganization(final ResultSet rs) { try { - final Relation r = new Relation(); + final DataInfo info = prepareDataInfo(rs); + final String orgId = MigrationUtils.createOpenaireId("20", rs.getString("resporganization")); + final String projectId = MigrationUtils.createOpenaireId("40", rs.getString("project")); - r.setRelType(null); // TODO - r.setSubRelType(null); // TODO - r.setRelClass(null); // TODO - r.setSource(null); // TODO - r.setTarget(null); // TODO - r.setCollectedFrom(null); + final Relation r1 = new Relation(); + r1.setRelType("projectOrganization"); + r1.setSubRelType("participation"); + r1.setRelClass("isParticipant"); + r1.setSource(projectId); + r1.setTarget(orgId); + r1.setCollectedFrom(null);// TODO + r1.setDataInfo(info); + r1.setLastupdatetimestamp(lastUpdateTimestamp); + emitOaf(r1); + + final Relation r2 = new Relation(); + r2.setRelType("projectOrganization"); + r2.setSubRelType("participation"); + r2.setRelClass("hasParticipant"); + r2.setSource(orgId); + r2.setTarget(projectId); + r2.setCollectedFrom(null); // TODO + r2.setDataInfo(info); + r1.setLastupdatetimestamp(lastUpdateTimestamp); + emitOaf(r2); + + // rs.getString("project"); + // rs.getString("resporganization"); + // rs.getInt("participantnumber"); + // rs.getDouble("contribution"); + // rs.getDate("startdate");// null + // rs.getDate("enddate");// null + // rs.getBoolean("inferred");// false + // rs.getBoolean("deletedbyinference"); // false + // rs.getDouble("trust"); + // rs.getString("inferenceprovenance"); // NULL + // rs.getString("semantics"); // po.semanticclass || '@@@' || po.semanticclass || + // '@@@dnet:project_organization_relations@@@dnet:project_organization_relations' AS semantics, + // rs.getString("provenanceaction"); // + // 'sysimport:crosswalk:entityregistry@@@sysimport:crosswalk:entityregistry@@@dnet:provenance_actions@@@dnet:provenance_actions' + // AS provenanceaction - rs.getString("project"); - rs.getString("resporganization"); - rs.getInt("participantnumber"); - rs.getDouble("contribution"); - rs.getDate("startdate");// null - rs.getDate("enddate");// null - rs.getBoolean("inferred");// false - rs.getBoolean("deletedbyinference"); // false - rs.getDouble("trust"); - rs.getString("inferenceprovenance"); // NULL - rs.getString("semantics"); // po.semanticclass || '@@@' || po.semanticclass || - // '@@@dnet:project_organization_relations@@@dnet:project_organization_relations' AS semantics, - rs.getString("provenanceaction"); // 'sysimport:crosswalk:entityregistry@@@sysimport:crosswalk:entityregistry@@@dnet:provenance_actions@@@dnet:provenance_actions' - // AS provenanceaction - emitOaf(r); } catch (final Exception e) { - // TODO: handle exception + throw new RuntimeException(e); } } + private DataInfo prepareDataInfo(final ResultSet rs) throws SQLException { + final Boolean deletedbyinference = rs.getBoolean("deletedbyinference"); + final String inferenceprovenance = rs.getString("inferenceprovenance"); + final Boolean inferred = rs.getBoolean("inferred"); + final String trust = rs.getString("trust"); + return MigrationUtils.dataInfo(deletedbyinference, inferenceprovenance, inferred, false, ENTITYREGISTRY_PROVENANCE_ACTION, trust); + } + + private Qualifier prepareQualifierSplitting(final String s) { + if (StringUtils.isBlank(s)) { return null; } + final String[] arr = s.split("@@@"); + return arr.length == 4 ? MigrationUtils.qualifier(arr[0], arr[1], arr[2], arr[3]) : null; + } + + private StructuredProperty prepareStructProp(final String s, final DataInfo dataInfo) { + if (StringUtils.isBlank(s)) { return null; } + final String[] parts = s.split("###"); + if (parts.length == 2) { + final String value = parts[0]; + final String[] arr = parts[1].split("@@@"); + if (arr.length == 4) { return MigrationUtils.structuredProperty(value, arr[0], arr[1], arr[2], arr[3], dataInfo); } + } + return null; + } + + private List prepareListOfStructProps(final Array array, final DataInfo dataInfo) throws SQLException { + final List res = new ArrayList<>(); + if (array != null) { + for (final String s : (String[]) array.getArray()) { + final StructuredProperty sp = prepareStructProp(s, dataInfo); + if (sp != null) { + res.add(sp); + } + } + } + + return res; + } + @Override public void close() throws IOException { super.close(); diff --git a/dhp-workflows/dhp-dedup/pom.xml b/dhp-workflows/dhp-dedup/pom.xml index 28ef6a453..81ac94f01 100644 --- a/dhp-workflows/dhp-dedup/pom.xml +++ b/dhp-workflows/dhp-dedup/pom.xml @@ -9,6 +9,37 @@ 4.0.0 dhp-dedup + + + + + net.alchim31.maven + scala-maven-plugin + 4.0.1 + + + scala-compile-first + initialize + + add-source + compile + + + + scala-test-compile + process-test-resources + + testCompile + + + + + ${scala.version} + + + + + diff --git a/dhp-workflows/dhp-graph-mapper/src/main/java/eu/dnetlib/dhp/graph/GraphMappingUtils.java b/dhp-workflows/dhp-graph-mapper/src/main/java/eu/dnetlib/dhp/graph/GraphMappingUtils.java index ab19ff2b5..0291be47e 100644 --- a/dhp-workflows/dhp-graph-mapper/src/main/java/eu/dnetlib/dhp/graph/GraphMappingUtils.java +++ b/dhp-workflows/dhp-graph-mapper/src/main/java/eu/dnetlib/dhp/graph/GraphMappingUtils.java @@ -1,23 +1,31 @@ package eu.dnetlib.dhp.graph; -import com.google.common.collect.Maps; -import eu.dnetlib.dhp.schema.oaf.*; - import java.util.Map; +import com.google.common.collect.Maps; + +import eu.dnetlib.dhp.schema.oaf.Dataset; +import eu.dnetlib.dhp.schema.oaf.Datasource; +import eu.dnetlib.dhp.schema.oaf.Organization; +import eu.dnetlib.dhp.schema.oaf.OtherResearchProduct; +import eu.dnetlib.dhp.schema.oaf.Project; +import eu.dnetlib.dhp.schema.oaf.Publication; +import eu.dnetlib.dhp.schema.oaf.Relation; +import eu.dnetlib.dhp.schema.oaf.Software; + public class GraphMappingUtils { - public final static Map types = Maps.newHashMap(); + public final static Map types = Maps.newHashMap(); - static { - types.put("datasource", Datasource.class); - types.put("organization", Organization.class); - types.put("project", Project.class); - types.put("dataset", Dataset.class); - types.put("otherresearchproduct", OtherResearchProduct.class); - types.put("software", Software.class); - types.put("publication", Publication.class); - types.put("relation", Relation.class); - } + static { + types.put("datasource", Datasource.class); + types.put("organization", Organization.class); + types.put("project", Project.class); + types.put("dataset", Dataset.class); + types.put("otherresearchproduct", OtherResearchProduct.class); + types.put("software", Software.class); + types.put("publication", Publication.class); + types.put("relation", Relation.class); + } } diff --git a/dhp-workflows/dhp-graph-mapper/src/main/java/eu/dnetlib/dhp/graph/SparkGraphImporterJob.java b/dhp-workflows/dhp-graph-mapper/src/main/java/eu/dnetlib/dhp/graph/SparkGraphImporterJob.java index a6a4e9291..463bffae9 100644 --- a/dhp-workflows/dhp-graph-mapper/src/main/java/eu/dnetlib/dhp/graph/SparkGraphImporterJob.java +++ b/dhp-workflows/dhp-graph-mapper/src/main/java/eu/dnetlib/dhp/graph/SparkGraphImporterJob.java @@ -1,7 +1,5 @@ package eu.dnetlib.dhp.graph; -import com.fasterxml.jackson.databind.ObjectMapper; -import eu.dnetlib.dhp.application.ArgumentApplicationParser; import org.apache.commons.io.IOUtils; import org.apache.hadoop.io.Text; import org.apache.spark.api.java.JavaRDD; @@ -9,42 +7,47 @@ import org.apache.spark.api.java.JavaSparkContext; import org.apache.spark.sql.Encoders; import org.apache.spark.sql.SaveMode; import org.apache.spark.sql.SparkSession; + +import com.fasterxml.jackson.databind.ObjectMapper; + +import eu.dnetlib.dhp.application.ArgumentApplicationParser; import scala.Tuple2; public class SparkGraphImporterJob { - public static void main(String[] args) throws Exception { + public static void main(final String[] args) throws Exception { - final ArgumentApplicationParser parser = new ArgumentApplicationParser(IOUtils.toString(SparkGraphImporterJob.class.getResourceAsStream("/eu/dnetlib/dhp/graph/input_graph_parameters.json"))); - parser.parseArgument(args); - final SparkSession spark = SparkSession - .builder() - .appName(SparkGraphImporterJob.class.getSimpleName()) - .master(parser.get("master")) - .config("hive.metastore.uris", parser.get("hive_metastore_uris")) - .enableHiveSupport() - .getOrCreate(); + final ArgumentApplicationParser parser = new ArgumentApplicationParser( + IOUtils.toString(SparkGraphImporterJob.class.getResourceAsStream("/eu/dnetlib/dhp/graph/input_graph_parameters.json"))); + parser.parseArgument(args); + final SparkSession spark = SparkSession + .builder() + .appName(SparkGraphImporterJob.class.getSimpleName()) + .master(parser.get("master")) + .config("hive.metastore.uris", parser.get("hive_metastore_uris")) + .enableHiveSupport() + .getOrCreate(); - final JavaSparkContext sc = new JavaSparkContext(spark.sparkContext()); - final String inputPath = parser.get("sourcePath"); - final String hiveDbName = parser.get("hive_db_name"); + final JavaSparkContext sc = new JavaSparkContext(spark.sparkContext()); + final String inputPath = parser.get("sourcePath"); + final String hiveDbName = parser.get("hive_db_name"); - spark.sql(String.format("CREATE DATABASE IF NOT EXISTS %s", hiveDbName)); + spark.sql(String.format("CREATE DATABASE IF NOT EXISTS %s", hiveDbName)); - // Read the input file and convert it into RDD of serializable object - GraphMappingUtils.types.forEach((name, clazz) -> { - final JavaRDD> inputRDD = sc.sequenceFile(inputPath + "/" + name, Text.class, Text.class) - .map(item -> new Tuple2<>(item._1.toString(), item._2.toString())); + // Read the input file and convert it into RDD of serializable object + GraphMappingUtils.types.forEach((name, clazz) -> { + final JavaRDD> inputRDD = sc.sequenceFile(inputPath + "/" + name, Text.class, Text.class) + .map(item -> new Tuple2<>(item._1.toString(), item._2.toString())); - spark.createDataset(inputRDD - .filter(s -> s._1().equals(clazz.getName())) - .map(Tuple2::_2) - .map(s -> new ObjectMapper().readValue(s, clazz)) - .rdd(), Encoders.bean(clazz)) - .write() - .mode(SaveMode.Overwrite) - .saveAsTable(hiveDbName + "." + name); - }); + spark.createDataset(inputRDD + .filter(s -> s._1().equals(clazz.getName())) + .map(Tuple2::_2) + .map(s -> new ObjectMapper().readValue(s, clazz)) + .rdd(), Encoders.bean(clazz)) + .write() + .mode(SaveMode.Overwrite) + .saveAsTable(hiveDbName + "." + name); + }); - } + } } diff --git a/pom.xml b/pom.xml index aedf5ebff..a27cf4fe7 100644 --- a/pom.xml +++ b/pom.xml @@ -1,426 +1,411 @@ - + - 4.0.0 - eu.dnetlib.dhp - dhp - 1.0.5-SNAPSHOT - pom + 4.0.0 + eu.dnetlib.dhp + dhp + 1.0.5-SNAPSHOT + pom - http://www.d-net.research-infrastructures.eu + http://www.d-net.research-infrastructures.eu - - - The Apache Software License, Version 2.0 - http://www.apache.org/licenses/LICENSE-2.0.txt - repo - A business-friendly OSS license - - + + + The Apache Software License, Version 2.0 + http://www.apache.org/licenses/LICENSE-2.0.txt + repo + A business-friendly OSS license + + - - dhp-build - dhp-schemas - dhp-common - dhp-workflows - + + dhp-build + dhp-schemas + dhp-common + dhp-workflows + - - Redmine - https://issue.openaire.research-infrastructures.eu/projects/openaire - + + Redmine + https://issue.openaire.research-infrastructures.eu/projects/openaire + - - jenkins - https://jenkins-dnet.d4science.org/ - + + jenkins + https://jenkins-dnet.d4science.org/ + - - scm:git:gitea@code-repo.d4science.org:D-Net/dnet-hadoop.git - scm:git:gitea@code-repo.d4science.org:D-Net/dnet-hadoop.git - https://code-repo.d4science.org/D-Net/dnet-hadoop/ - HEAD - + + scm:git:gitea@code-repo.d4science.org:D-Net/dnet-hadoop.git + scm:git:gitea@code-repo.d4science.org:D-Net/dnet-hadoop.git + https://code-repo.d4science.org/D-Net/dnet-hadoop/ + HEAD + - - + + - - - dnet45-releases - D-Net 45 releases - http://maven.research-infrastructures.eu/nexus/content/repositories/dnet45-releases - default - - false - - - true - - - - cloudera - Cloudera Repository - https://repository.cloudera.com/artifactory/cloudera-repos - - true - - - false - - - + + + dnet45-releases + D-Net 45 releases + http://maven.research-infrastructures.eu/nexus/content/repositories/dnet45-releases + default + + false + + + true + + + + cloudera + Cloudera Repository + https://repository.cloudera.com/artifactory/cloudera-repos + + true + + + false + + + - - - junit - junit - 4.12 - test - + + + junit + junit + 4.12 + test + - - org.mockito - mockito-core - 2.7.22 - test - + + org.mockito + mockito-core + 2.7.22 + test + - + - - - - org.apache.hadoop - hadoop-hdfs - ${dhp.hadoop.version} - provided - - - org.apache.hadoop - hadoop-client - ${dhp.hadoop.version} - provided - - - org.apache.spark - spark-core_2.11 - ${dhp.spark.version} - provided - - - org.apache.spark - spark-sql_2.11 - ${dhp.spark.version} - provided - - - org.apache.spark - spark-graphx_2.11 - ${dhp.spark.version} - provided - + + + + org.apache.hadoop + hadoop-hdfs + ${dhp.hadoop.version} + provided + + + org.apache.hadoop + hadoop-client + ${dhp.hadoop.version} + provided + + + org.apache.spark + spark-core_2.11 + ${dhp.spark.version} + provided + + + org.apache.spark + spark-sql_2.11 + ${dhp.spark.version} + provided + + + org.apache.spark + spark-graphx_2.11 + ${dhp.spark.version} + provided + - - org.apache.commons - commons-lang3 - ${dhp.commons.lang.version} - + + org.apache.commons + commons-lang3 + ${dhp.commons.lang.version} + - - commons-codec - commons-codec - 1.9 - + + commons-codec + commons-codec + 1.9 + - - commons-io - commons-io - 2.4 - + + commons-io + commons-io + 2.4 + - - commons-cli - commons-cli - 1.2 - provided - + + commons-cli + commons-cli + 1.2 + provided + - - net.sf.saxon - Saxon-HE - 9.5.1-5 - + + net.sf.saxon + Saxon-HE + 9.5.1-5 + - - dom4j - dom4j - 1.6.1 - + + dom4j + dom4j + 1.6.1 + - - xml-apis - xml-apis - 1.4.01 - + + xml-apis + xml-apis + 1.4.01 + - - jaxen - jaxen - 1.1.6 - + + jaxen + jaxen + 1.1.6 + - - net.schmizz - sshj - 0.10.0 - test - + + net.schmizz + sshj + 0.10.0 + test + - - com.fasterxml.jackson.core - jackson-core - ${dhp.jackson.version} - provided - + + com.fasterxml.jackson.core + jackson-core + ${dhp.jackson.version} + provided + - - com.fasterxml.jackson.core - jackson-annotations - ${dhp.jackson.version} - provided - - - com.fasterxml.jackson.core - jackson-databind - ${dhp.jackson.version} - provided - + + com.fasterxml.jackson.core + jackson-annotations + ${dhp.jackson.version} + provided + + + com.fasterxml.jackson.core + jackson-databind + ${dhp.jackson.version} + provided + - - eu.dnetlib - dnet-pace-core - 4.0.0-SNAPSHOT - + + eu.dnetlib + dnet-pace-core + 4.0.0-SNAPSHOT + - - javax.persistence - javax.persistence-api - 2.2 - provided - + + javax.persistence + javax.persistence-api + 2.2 + provided + - - com.rabbitmq - amqp-client - 5.6.0 - - - com.jayway.jsonpath - json-path - 2.4.0 - - - com.arakelian - java-jq - 0.10.1 - - - edu.cmu - secondstring - 1.0.0 - + + com.rabbitmq + amqp-client + 5.6.0 + + + com.jayway.jsonpath + json-path + 2.4.0 + + + com.arakelian + java-jq + 0.10.1 + + + edu.cmu + secondstring + 1.0.0 + - - org.apache.oozie - oozie-client - ${dhp.oozie.version} - provided - - - - slf4j-simple - org.slf4j - - - - - + + org.mongodb + mongo-java-driver + ${mongodb.driver.version} + - - target - target/classes - ${project.artifactId}-${project.version} - target/test-classes - - - - org.apache.maven.plugins - maven-compiler-plugin - ${maven.compiler.plugin.version} - - 1.8 - 1.8 - ${project.build.sourceEncoding} - - + + org.apache.oozie + oozie-client + ${dhp.oozie.version} + provided + + + + slf4j-simple + org.slf4j + + + + + - - org.apache.maven.plugins - maven-jar-plugin - 3.0.2 - + + target + target/classes + ${project.artifactId}-${project.version} + target/test-classes + + + + org.apache.maven.plugins + maven-compiler-plugin + ${maven.compiler.plugin.version} + + 1.8 + 1.8 + ${project.build.sourceEncoding} + + - - org.apache.maven.plugins - maven-source-plugin - 3.0.1 - - - attach-sources - verify - - jar-no-fork - - - - + + org.apache.maven.plugins + maven-jar-plugin + 3.0.2 + - - org.apache.maven.plugins - maven-surefire-plugin - 2.19.1 - - true - - - - org.apache.maven.plugins - maven-javadoc-plugin - 2.10.4 - - true - - - - org.apache.maven.plugins - maven-dependency-plugin - 3.0.0 - + + org.apache.maven.plugins + maven-source-plugin + 3.0.1 + + + attach-sources + verify + + jar-no-fork + + + + - - org.codehaus.mojo - build-helper-maven-plugin - 1.12 - - - - - - org.apache.maven.plugins - maven-release-plugin - 2.5.3 - - - org.jacoco - jacoco-maven-plugin - 0.7.9 - - - **/schemas/* - **/com/cloudera/**/* - **/org/apache/avro/io/**/* - - - - - default-prepare-agent - - prepare-agent - - - - default-report - prepare-package - - report - - - - - - net.alchim31.maven - scala-maven-plugin - 4.0.1 - - - scala-compile-first - initialize - - add-source - compile - - - - scala-test-compile - process-test-resources - - testCompile - - - - - ${scala.version} - - - + + org.apache.maven.plugins + maven-surefire-plugin + 2.19.1 + + true + + + + org.apache.maven.plugins + maven-javadoc-plugin + 2.10.4 + + true + + + + org.apache.maven.plugins + maven-dependency-plugin + 3.0.0 + - - - org.apache.maven.wagon - wagon-ssh - 2.10 - - - - - - dnet45-snapshots - DNet45 Snapshots - http://maven.research-infrastructures.eu/nexus/content/repositories/dnet45-snapshots - default - - - dnet45-releases - http://maven.research-infrastructures.eu/nexus/content/repositories/dnet45-releases - - - - - - org.apache.maven.plugins - maven-javadoc-plugin - 2.10.4 - - true - - - - + + org.codehaus.mojo + build-helper-maven-plugin + 1.12 + + + + + + org.apache.maven.plugins + maven-release-plugin + 2.5.3 + + + org.jacoco + jacoco-maven-plugin + 0.7.9 + + + **/schemas/* + **/com/cloudera/**/* + **/org/apache/avro/io/**/* + + + + + default-prepare-agent + + prepare-agent + + + + default-report + prepare-package + + report + + + + - - UTF-8 - UTF-8 - 3.6.0 - 2.22.2 - cdh5.9.2 - 2.6.0-${dhp.cdh.version} - 4.1.0-${dhp.cdh.version} - 2.4.0.cloudera2 - 2.9.6 - 3.5 - 2.11.12 - + + + + + org.apache.maven.wagon + wagon-ssh + 2.10 + + + + + + dnet45-snapshots + DNet45 Snapshots + http://maven.research-infrastructures.eu/nexus/content/repositories/dnet45-snapshots + default + + + dnet45-releases + http://maven.research-infrastructures.eu/nexus/content/repositories/dnet45-releases + + + + + + org.apache.maven.plugins + maven-javadoc-plugin + 2.10.4 + + true + + + + + + + UTF-8 + UTF-8 + 3.6.0 + 2.22.2 + cdh5.9.2 + 2.6.0-${dhp.cdh.version} + 4.1.0-${dhp.cdh.version} + 2.4.0.cloudera2 + 2.9.6 + 3.5 + 2.11.12 + 3.4.2 + From cd114f1c3b0cb36c045c16800dfd1251550463e5 Mon Sep 17 00:00:00 2001 From: Michele Artini Date: Tue, 21 Jan 2020 12:32:10 +0100 Subject: [PATCH 04/17] partial update --- .../MigrateDbEntitiesApplication.java | 85 ++++++++++++------- .../dnetlib/dhp/migration/MigrationUtils.java | 10 --- .../sql/queryDatasourceOrganization.sql | 5 +- .../sql/queryProjectOrganization.sql | 5 +- 4 files changed, 60 insertions(+), 45 deletions(-) diff --git a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/MigrateDbEntitiesApplication.java b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/MigrateDbEntitiesApplication.java index efc395812..6b537c840 100644 --- a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/MigrateDbEntitiesApplication.java +++ b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/MigrateDbEntitiesApplication.java @@ -19,6 +19,9 @@ import org.apache.commons.logging.LogFactory; import eu.dnetlib.dhp.application.ArgumentApplicationParser; import eu.dnetlib.dhp.schema.oaf.DataInfo; import eu.dnetlib.dhp.schema.oaf.Datasource; +import eu.dnetlib.dhp.schema.oaf.Field; +import eu.dnetlib.dhp.schema.oaf.Journal; +import eu.dnetlib.dhp.schema.oaf.KeyValue; import eu.dnetlib.dhp.schema.oaf.Organization; import eu.dnetlib.dhp.schema.oaf.Project; import eu.dnetlib.dhp.schema.oaf.Qualifier; @@ -84,10 +87,9 @@ public class MigrateDbEntitiesApplication extends AbstractMigrateApplication imp ds.setCollectedfrom(MigrationUtils.listKeyValues(rs.getString("collectedfromid"), rs.getString("collectedfromname"))); ds.setPid(null); // List // TODO ds.setDateofcollection(rs.getDate("dateofcollection").toString()); - ds.setDateoftransformation(null); // TODO + ds.setDateoftransformation(null); // Value not returned by the SQL query ds.setExtraInfo(null); // TODO - ds.setOaiprovenance(null); // TODO - + ds.setOaiprovenance(null); // Values not present in the DB ds.setDatasourcetype(prepareQualifierSplitting(rs.getString("datasourcetype"))); ds.setOpenairecompatibility(prepareQualifierSplitting(rs.getString("openairecompatibility"))); ds.setOfficialname(MigrationUtils.field(rs.getString("officialname"), info)); @@ -104,9 +106,9 @@ public class MigrateDbEntitiesApplication extends AbstractMigrateApplication imp ds.setOdnumberofitems(MigrationUtils.field(Double.toString(rs.getInt("odnumberofitems")), info)); ds.setOdnumberofitemsdate(MigrationUtils.field(rs.getDate("odnumberofitemsdate").toString(), info)); ds.setOdpolicies(MigrationUtils.field(rs.getString("odpolicies"), info)); - ds.setOdlanguages(MigrationUtils.listFields(info, rs.getArray("odlanguages"))); - ds.setOdcontenttypes(MigrationUtils.listFields(info, rs.getArray("odcontenttypes"))); - ds.setAccessinfopackage(MigrationUtils.listFields(info, rs.getArray("accessinfopackage"))); + ds.setOdlanguages(prepareListFields(rs.getArray("odlanguages"), info)); + ds.setOdcontenttypes(prepareListFields(rs.getArray("odcontenttypes"), info)); + ds.setAccessinfopackage(prepareListFields(rs.getArray("accessinfopackage"), info)); ds.setReleasestartdate(MigrationUtils.field(rs.getDate("releasestartdate").toString(), info)); ds.setReleaseenddate(MigrationUtils.field(rs.getDate("releaseenddate").toString(), info)); ds.setMissionstatementurl(MigrationUtils.field(rs.getString("missionstatementurl"), info)); @@ -121,14 +123,13 @@ public class MigrateDbEntitiesApplication extends AbstractMigrateApplication imp ds.setQualitymanagementkind(MigrationUtils.field(rs.getString("qualitymanagementkind"), info)); ds.setPidsystems(MigrationUtils.field(rs.getString("pidsystems"), info)); ds.setCertificates(MigrationUtils.field(rs.getString("certificates"), info)); - ds.setPolicies(null); // List // TODO - ds.setJournal(null); // Journal // TODO - + ds.setPolicies(new ArrayList<>()); // The sql query returns an empty array + ds.setJournal(prepareJournal(rs.getString("officialname"), rs.getString("journal"), info)); // Journal ds.setDataInfo(info); ds.setLastupdatetimestamp(lastUpdateTimestamp); // rs.getString("datasourceid"); - rs.getArray("identities"); + // rs.getArray("identities"); // rs.getString("officialname"); // rs.getString("englishname"); // rs.getString("contactemail"); @@ -166,14 +167,14 @@ public class MigrateDbEntitiesApplication extends AbstractMigrateApplication imp // rs.getString("qualitymanagementkind"); // rs.getString("pidsystems"); // rs.getString("certificates"); - rs.getArray("policies"); + // rs.getArray("policies"); // rs.getString("collectedfromid"); // rs.getString("collectedfromname"); - // rs.getString("datasourcetype"); // COMPLEX XXX@@@@.... + // rs.getString("datasourcetype"); // COMPLEX // rs.getString("provenanceaction"); // // 'sysimport:crosswalk:entityregistry@@@sysimport:crosswalk:entityregistry@@@dnet:provenance_actions@@@dnet:provenance_actions' // AS provenanceaction, - rs.getString("journal"); // CONCAT(d.issn, '@@@', d.eissn, '@@@', d.lissn) AS journal + // rs.getString("journal"); // CONCAT(d.issn, '@@@', d.eissn, '@@@', d.lissn) AS journal emitOaf(ds); } catch (final Exception e) { @@ -192,12 +193,10 @@ public class MigrateDbEntitiesApplication extends AbstractMigrateApplication imp p.setOriginalId(Arrays.asList(rs.getString("projectid"))); p.setCollectedfrom(MigrationUtils.listKeyValues(rs.getString("collectedfromid"), rs.getString("collectedfromname"))); p.setPid(null); // List // TODO - p.setDateofcollection(rs.getDate("dateofcollection").toString()); p.setDateoftransformation(rs.getDate("dateoftransformation").toString()); p.setExtraInfo(null); // List //TODO - p.setOaiprovenance(null); // OAIProvenance /TODO - + p.setOaiprovenance(null); // Values not present in the DB p.setWebsiteurl(MigrationUtils.field(rs.getString("websiteurl"), info)); p.setCode(MigrationUtils.field(rs.getString("code"), info)); p.setAcronym(MigrationUtils.field(rs.getString("acronym"), info)); @@ -211,7 +210,7 @@ public class MigrateDbEntitiesApplication extends AbstractMigrateApplication imp p.setOamandatepublications(MigrationUtils.field(Boolean.toString(rs.getBoolean("oamandatepublications")), info)); p.setEcarticle29_3(MigrationUtils.field(Boolean.toString(rs.getBoolean("ecarticle29_3")), info)); p.setSubjects(prepareListOfStructProps(rs.getArray("subjects"), info)); - p.setFundingtree(null); // List> //TODO + p.setFundingtree(prepareListFields(rs.getArray("fundingtree"), info)); p.setContracttype(prepareQualifierSplitting(rs.getString("contracttype"))); p.setOptional1(MigrationUtils.field(rs.getString("optional1"), info)); p.setOptional2(MigrationUtils.field(rs.getString("optional2"), info)); @@ -224,7 +223,6 @@ public class MigrateDbEntitiesApplication extends AbstractMigrateApplication imp p.setCurrency(MigrationUtils.field(rs.getString("currency"), info)); p.setTotalcost(new Float(rs.getDouble("totalcost"))); p.setFundedamount(new Float(rs.getDouble("fundedamount"))); - p.setDataInfo(info); p.setLastupdatetimestamp(lastUpdateTimestamp); @@ -260,11 +258,11 @@ public class MigrateDbEntitiesApplication extends AbstractMigrateApplication imp // rs.getDouble("fundedamount"); // rs.getString("collectedfromid"); // rs.getString("collectedfromname"); - rs.getString("contracttype"); // COMPLEX - rs.getString("provenanceaction"); // COMPLEX - rs.getArray("pid"); - rs.getArray("subjects"); - rs.getArray("fundingtree"); + // rs.getString("contracttype"); // COMPLEX + // rs.getString("provenanceaction"); // COMPLEX + // rs.getArray("pid"); + // rs.getArray("subjects"); + // rs.getArray("fundingtree"); emitOaf(p); @@ -287,10 +285,10 @@ public class MigrateDbEntitiesApplication extends AbstractMigrateApplication imp o.setDateofcollection(rs.getDate("dateofcollection").toString()); o.setDateoftransformation(rs.getDate("dateoftransformation").toString()); o.setExtraInfo(null); // List // TODO - o.setOaiprovenance(null); // OAIProvenance // TODO + o.setOaiprovenance(null); // Values not present in the DB o.setLegalshortname(MigrationUtils.field("legalshortname", info)); o.setLegalname(MigrationUtils.field("legalname", info)); - o.setAlternativeNames(new ArrayList<>()); + o.setAlternativeNames(new ArrayList<>()); // Values not returned by the SQL query o.setWebsiteurl(MigrationUtils.field("websiteurl", info)); o.setLogourl(MigrationUtils.field("logourl", info)); o.setEclegalbody(MigrationUtils.field(Boolean.toString(rs.getBoolean("eclegalbody")), info)); @@ -305,7 +303,6 @@ public class MigrateDbEntitiesApplication extends AbstractMigrateApplication imp o.setEcsmevalidated(MigrationUtils.field(Boolean.toString(rs.getBoolean("ecsmevalidated")), info)); o.setEcnutscode(MigrationUtils.field(Boolean.toString(rs.getBoolean("ecnutscode")), info)); o.setCountry(prepareQualifierSplitting(rs.getString("country"))); - o.setDataInfo(info); o.setLastupdatetimestamp(lastUpdateTimestamp); @@ -333,8 +330,8 @@ public class MigrateDbEntitiesApplication extends AbstractMigrateApplication imp // rs.getString("collectedfromid"); // rs.getString("collectedfromname"); // rs.getString("country"); - rs.getString("provenanceaction"); - rs.getArray("pid"); + // rs.getString("provenanceaction"); + // rs.getArray("pid"); emitOaf(o); } catch (final Exception e) { @@ -348,6 +345,7 @@ public class MigrateDbEntitiesApplication extends AbstractMigrateApplication imp final DataInfo info = prepareDataInfo(rs); final String orgId = MigrationUtils.createOpenaireId("20", rs.getString("organization")); final String dsId = MigrationUtils.createOpenaireId("10", rs.getString("datasource")); + final List collectedFrom = MigrationUtils.listKeyValues(rs.getString("collectedfromid"), rs.getString("collectedfromname")); final Relation r1 = new Relation(); r1.setRelType("datasourceOrganization"); @@ -355,7 +353,7 @@ public class MigrateDbEntitiesApplication extends AbstractMigrateApplication imp r1.setRelClass("isProvidedBy"); r1.setSource(dsId); r1.setTarget(orgId); - r1.setCollectedFrom(null);// TODO + r1.setCollectedFrom(collectedFrom); r1.setDataInfo(info); r1.setLastupdatetimestamp(lastUpdateTimestamp); emitOaf(r1); @@ -366,7 +364,7 @@ public class MigrateDbEntitiesApplication extends AbstractMigrateApplication imp r2.setRelClass("provides"); r2.setSource(orgId); r2.setTarget(dsId); - r2.setCollectedFrom(null); // TODO + r2.setCollectedFrom(collectedFrom); r2.setDataInfo(info); r1.setLastupdatetimestamp(lastUpdateTimestamp); emitOaf(r2); @@ -395,6 +393,7 @@ public class MigrateDbEntitiesApplication extends AbstractMigrateApplication imp final DataInfo info = prepareDataInfo(rs); final String orgId = MigrationUtils.createOpenaireId("20", rs.getString("resporganization")); final String projectId = MigrationUtils.createOpenaireId("40", rs.getString("project")); + final List collectedFrom = MigrationUtils.listKeyValues(rs.getString("collectedfromid"), rs.getString("collectedfromname")); final Relation r1 = new Relation(); r1.setRelType("projectOrganization"); @@ -402,7 +401,7 @@ public class MigrateDbEntitiesApplication extends AbstractMigrateApplication imp r1.setRelClass("isParticipant"); r1.setSource(projectId); r1.setTarget(orgId); - r1.setCollectedFrom(null);// TODO + r1.setCollectedFrom(collectedFrom); r1.setDataInfo(info); r1.setLastupdatetimestamp(lastUpdateTimestamp); emitOaf(r1); @@ -413,7 +412,7 @@ public class MigrateDbEntitiesApplication extends AbstractMigrateApplication imp r2.setRelClass("hasParticipant"); r2.setSource(orgId); r2.setTarget(projectId); - r2.setCollectedFrom(null); // TODO + r2.setCollectedFrom(collectedFrom); r2.setDataInfo(info); r1.setLastupdatetimestamp(lastUpdateTimestamp); emitOaf(r2); @@ -453,6 +452,14 @@ public class MigrateDbEntitiesApplication extends AbstractMigrateApplication imp return arr.length == 4 ? MigrationUtils.qualifier(arr[0], arr[1], arr[2], arr[3]) : null; } + public static List> prepareListFields(final Array array, final DataInfo info) { + try { + return MigrationUtils.listFields(info, (String[]) array.getArray()); + } catch (final SQLException e) { + throw new RuntimeException("Invalid SQL array", e); + } + } + private StructuredProperty prepareStructProp(final String s, final DataInfo dataInfo) { if (StringUtils.isBlank(s)) { return null; } final String[] parts = s.split("###"); @@ -478,6 +485,20 @@ public class MigrateDbEntitiesApplication extends AbstractMigrateApplication imp return res; } + private Journal prepareJournal(final String name, final String sj, final DataInfo info) { + if (StringUtils.isNotBlank(sj)) { + final String[] arr = sj.split("@@@"); + if (arr.length == 3) { + final String issn = StringUtils.isNotBlank(arr[0]) ? arr[0] : null; + final String eissn = StringUtils.isNotBlank(arr[1]) ? arr[1] : null;; + final String lissn = StringUtils.isNotBlank(arr[2]) ? arr[2] : null;; + if (issn != null || eissn != null || lissn != null) { return MigrationUtils + .journal(name, issn, eissn, eissn, null, null, null, null, null, null, null, info); } + } + } + return null; + } + @Override public void close() throws IOException { super.close(); diff --git a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/MigrationUtils.java b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/MigrationUtils.java index 8346a8041..c58688a79 100644 --- a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/MigrationUtils.java +++ b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/MigrationUtils.java @@ -1,7 +1,5 @@ package eu.dnetlib.dhp.migration; -import java.sql.Array; -import java.sql.SQLException; import java.util.ArrayList; import java.util.Arrays; import java.util.List; @@ -50,14 +48,6 @@ public class MigrationUtils { return Arrays.stream(values).map(v -> field(v, info)).collect(Collectors.toList()); } - public static List> listFields(final DataInfo info, final Array array) { - try { - return listFields(info, (String[]) array.getArray()); - } catch (final SQLException e) { - throw new RuntimeException("Invalid SQL array", e); - } - } - public static Qualifier qualifier(final String classid, final String classname, final String schemeid, final String schemename) { final Qualifier q = new Qualifier(); q.setClassid(classid); diff --git a/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/migration/sql/queryDatasourceOrganization.sql b/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/migration/sql/queryDatasourceOrganization.sql index 885b6ae09..745f83971 100644 --- a/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/migration/sql/queryDatasourceOrganization.sql +++ b/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/migration/sql/queryDatasourceOrganization.sql @@ -7,10 +7,11 @@ SELECT false AS deletedbyinference, 0.9 AS trust, NULL AS inferenceprovenance, - + dc.id AS collectedfromid, + dc.officialname AS collectedfromname, 'providedBy@@@provided by@@@dnet:datasources_organizations_typologies@@@dnet:datasources_organizations_typologies' AS semantics, d.provenanceaction || '@@@' || d.provenanceaction || '@@@dnet:provenanceActions@@@dnet:provenanceActions' AS provenanceaction FROM dsm_datasource_organization dor LEFT OUTER JOIN dsm_datasources d ON (dor.datasource = d.id) - + LEFT OUTER JOIN dsm_datasources dc ON (dc.id = d.collectedfrom) diff --git a/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/migration/sql/queryProjectOrganization.sql b/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/migration/sql/queryProjectOrganization.sql index 4483d6145..4c06ca5b9 100644 --- a/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/migration/sql/queryProjectOrganization.sql +++ b/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/migration/sql/queryProjectOrganization.sql @@ -9,8 +9,11 @@ SELECT false AS deletedbyinference, po.trust AS trust, NULL AS inferenceprovenance, - + dc.id AS collectedfromid, + dc.officialname AS collectedfromname, po.semanticclass || '@@@' || po.semanticclass || '@@@dnet:project_organization_relations@@@dnet:project_organization_relations' AS semantics, 'sysimport:crosswalk:entityregistry@@@sysimport:crosswalk:entityregistry@@@dnet:provenance_actions@@@dnet:provenance_actions' AS provenanceaction FROM project_organization po + LEFT OUTER JOIN projects p ON (p.id = po.project) + LEFT OUTER JOIN dsm_datasources dc ON (dc.id = p.collectedfrom) From f6eccdde33a0d7f6d0f42e362f2b4773c3d6f10c Mon Sep 17 00:00:00 2001 From: Michele Artini Date: Tue, 21 Jan 2020 14:17:05 +0100 Subject: [PATCH 05/17] partial implementation --- .../migration/AbstractMigrateApplication.java | 147 +++++++++++++++ .../MigrateDbEntitiesApplication.java | 167 +++++++++--------- 2 files changed, 230 insertions(+), 84 deletions(-) diff --git a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/AbstractMigrateApplication.java b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/AbstractMigrateApplication.java index a5c8b2775..b8f92fb9c 100644 --- a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/AbstractMigrateApplication.java +++ b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/AbstractMigrateApplication.java @@ -3,8 +3,13 @@ package eu.dnetlib.dhp.migration; import java.io.Closeable; import java.io.IOException; import java.net.URI; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; import java.util.concurrent.atomic.AtomicInteger; +import java.util.stream.Collectors; +import org.apache.commons.lang3.StringUtils; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -13,7 +18,17 @@ import org.apache.hadoop.io.SequenceFile; import org.apache.hadoop.io.Text; import org.codehaus.jackson.map.ObjectMapper; +import eu.dnetlib.dhp.schema.oaf.DataInfo; +import eu.dnetlib.dhp.schema.oaf.ExtraInfo; +import eu.dnetlib.dhp.schema.oaf.Field; +import eu.dnetlib.dhp.schema.oaf.Journal; +import eu.dnetlib.dhp.schema.oaf.KeyValue; +import eu.dnetlib.dhp.schema.oaf.OAIProvenance; import eu.dnetlib.dhp.schema.oaf.Oaf; +import eu.dnetlib.dhp.schema.oaf.OriginDescription; +import eu.dnetlib.dhp.schema.oaf.Qualifier; +import eu.dnetlib.dhp.schema.oaf.StructuredProperty; +import eu.dnetlib.dhp.utils.DHPUtils; public class AbstractMigrateApplication implements Closeable { @@ -58,4 +73,136 @@ public class AbstractMigrateApplication implements Closeable { writer.close(); } + public static KeyValue keyValue(final String k, final String v) { + final KeyValue kv = new KeyValue(); + kv.setKey(k); + kv.setValue(v); + return kv; + } + + public static List listKeyValues(final String... s) { + if (s.length % 2 > 0) { throw new RuntimeException("Invalid number of parameters (k,v,k,v,....)"); } + + final List list = new ArrayList<>(); + for (int i = 0; i < s.length; i += 2) { + list.add(keyValue(s[i], s[i + 1])); + } + return list; + } + + public static Field field(final T value, final DataInfo info) { + final Field field = new Field<>(); + field.setValue(value); + field.setDataInfo(info); + return field; + } + + public static List> listFields(final DataInfo info, final String... values) { + return Arrays.stream(values).map(v -> field(v, info)).collect(Collectors.toList()); + } + + public static Qualifier qualifier(final String classid, final String classname, final String schemeid, final String schemename) { + final Qualifier q = new Qualifier(); + q.setClassid(classid); + q.setClassname(classname); + q.setSchemeid(schemeid); + q.setSchemename(schemename); + return q; + } + + public static StructuredProperty structuredProperty(final String value, + final String classid, + final String classname, + final String schemeid, + final String schemename, + final DataInfo dataInfo) { + final StructuredProperty sp = new StructuredProperty(); + sp.setValue(value); + sp.setQualifier(qualifier(classid, classname, schemeid, schemename)); + sp.setDataInfo(dataInfo); + return sp; + } + + public static ExtraInfo extraInfo(final String name, final String value, final String typology, final String provenance, final String trust) { + final ExtraInfo info = new ExtraInfo(); + info.setName(name); + info.setValue(value); + info.setTypology(typology); + info.setProvenance(provenance); + info.setTrust(trust); + return info; + } + + public static OAIProvenance oaiIProvenance(final String identifier, + final String baseURL, + final String metadataNamespace, + final Boolean altered, + final String datestamp, + final String harvestDate) { + + final OriginDescription desc = new OriginDescription(); + desc.setIdentifier(identifier); + desc.setBaseURL(baseURL); + desc.setMetadataNamespace(metadataNamespace); + desc.setAltered(altered); + desc.setDatestamp(datestamp); + desc.setHarvestDate(harvestDate); + + final OAIProvenance p = new OAIProvenance(); + p.setOriginDescription(desc); + + return p; + } + + public static Journal journal(final String name, + final String issnPrinted, + final String issnOnline, + final String issnLinking, + final String ep, + final String iss, + final String sp, + final String vol, + final String edition, + final String conferenceplace, + final String conferencedate, + final DataInfo dataInfo) { + final Journal j = new Journal(); + j.setName(name); + j.setIssnPrinted(issnPrinted); + j.setIssnOnline(issnOnline); + j.setIssnLinking(issnLinking); + j.setEp(ep); + j.setIss(iss); + j.setSp(sp); + j.setVol(vol); + j.setEdition(edition); + j.setConferenceplace(conferenceplace); + j.setConferencedate(conferencedate); + j.setDataInfo(dataInfo); + return j; + } + + public static DataInfo dataInfo(final Boolean deletedbyinference, + final String inferenceprovenance, + final Boolean inferred, + final Boolean invisible, + final Qualifier provenanceaction, + final String trust) { + final DataInfo d = new DataInfo(); + d.setDeletedbyinference(deletedbyinference); + d.setInferenceprovenance(inferenceprovenance); + d.setInferred(inferred); + d.setInvisible(invisible); + d.setProvenanceaction(provenanceaction); + d.setTrust(trust); + return d; + } + + public static String createOpenaireId(final String prefix, final String originalId) { + final String nsPrefix = StringUtils.substringBefore(originalId, "::"); + final String rest = StringUtils.substringAfter(originalId, "::"); + return String.format("%s|%s::%s", prefix, nsPrefix, DHPUtils.md5(rest)); + + } + } diff --git a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/MigrateDbEntitiesApplication.java b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/MigrateDbEntitiesApplication.java index 6b537c840..deb7fdd69 100644 --- a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/MigrateDbEntitiesApplication.java +++ b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/MigrateDbEntitiesApplication.java @@ -30,8 +30,8 @@ import eu.dnetlib.dhp.schema.oaf.StructuredProperty; public class MigrateDbEntitiesApplication extends AbstractMigrateApplication implements Closeable { - private static final Qualifier ENTITYREGISTRY_PROVENANCE_ACTION = MigrationUtils - .qualifier("sysimport:crosswalk:entityregistry", "sysimport:crosswalk:entityregistry", "dnet:provenance_actions", "dnet:provenance_actions"); + private static final Qualifier ENTITYREGISTRY_PROVENANCE_ACTION = + qualifier("sysimport:crosswalk:entityregistry", "sysimport:crosswalk:entityregistry", "dnet:provenance_actions", "dnet:provenance_actions"); private static final Log log = LogFactory.getLog(MigrateDbEntitiesApplication.class); @@ -82,9 +82,9 @@ public class MigrateDbEntitiesApplication extends AbstractMigrateApplication imp final Datasource ds = new Datasource(); - ds.setId(MigrationUtils.createOpenaireId("10", rs.getString("datasourceid"))); + ds.setId(createOpenaireId("10", rs.getString("datasourceid"))); ds.setOriginalId(Arrays.asList(rs.getString("datasourceid"))); - ds.setCollectedfrom(MigrationUtils.listKeyValues(rs.getString("collectedfromid"), rs.getString("collectedfromname"))); + ds.setCollectedfrom(listKeyValues(rs.getString("collectedfromid"), rs.getString("collectedfromname"))); ds.setPid(null); // List // TODO ds.setDateofcollection(rs.getDate("dateofcollection").toString()); ds.setDateoftransformation(null); // Value not returned by the SQL query @@ -92,37 +92,37 @@ public class MigrateDbEntitiesApplication extends AbstractMigrateApplication imp ds.setOaiprovenance(null); // Values not present in the DB ds.setDatasourcetype(prepareQualifierSplitting(rs.getString("datasourcetype"))); ds.setOpenairecompatibility(prepareQualifierSplitting(rs.getString("openairecompatibility"))); - ds.setOfficialname(MigrationUtils.field(rs.getString("officialname"), info)); - ds.setEnglishname(MigrationUtils.field(rs.getString("englishname"), info)); - ds.setWebsiteurl(MigrationUtils.field(rs.getString("websiteurl"), info)); - ds.setLogourl(MigrationUtils.field(rs.getString("logourl"), info)); - ds.setContactemail(MigrationUtils.field(rs.getString("contactemail"), info)); - ds.setNamespaceprefix(MigrationUtils.field(rs.getString("namespaceprefix"), info)); - ds.setLatitude(MigrationUtils.field(Double.toString(rs.getDouble("latitude")), info)); - ds.setLongitude(MigrationUtils.field(Double.toString(rs.getDouble("longitude")), info)); - ds.setDateofvalidation(MigrationUtils.field(rs.getDate("dateofvalidation").toString(), info)); - ds.setDescription(MigrationUtils.field(rs.getString("description"), info)); + ds.setOfficialname(field(rs.getString("officialname"), info)); + ds.setEnglishname(field(rs.getString("englishname"), info)); + ds.setWebsiteurl(field(rs.getString("websiteurl"), info)); + ds.setLogourl(field(rs.getString("logourl"), info)); + ds.setContactemail(field(rs.getString("contactemail"), info)); + ds.setNamespaceprefix(field(rs.getString("namespaceprefix"), info)); + ds.setLatitude(field(Double.toString(rs.getDouble("latitude")), info)); + ds.setLongitude(field(Double.toString(rs.getDouble("longitude")), info)); + ds.setDateofvalidation(field(rs.getDate("dateofvalidation").toString(), info)); + ds.setDescription(field(rs.getString("description"), info)); ds.setSubjects(prepareListOfStructProps(rs.getArray("subjects"), info)); - ds.setOdnumberofitems(MigrationUtils.field(Double.toString(rs.getInt("odnumberofitems")), info)); - ds.setOdnumberofitemsdate(MigrationUtils.field(rs.getDate("odnumberofitemsdate").toString(), info)); - ds.setOdpolicies(MigrationUtils.field(rs.getString("odpolicies"), info)); + ds.setOdnumberofitems(field(Double.toString(rs.getInt("odnumberofitems")), info)); + ds.setOdnumberofitemsdate(field(rs.getDate("odnumberofitemsdate").toString(), info)); + ds.setOdpolicies(field(rs.getString("odpolicies"), info)); ds.setOdlanguages(prepareListFields(rs.getArray("odlanguages"), info)); ds.setOdcontenttypes(prepareListFields(rs.getArray("odcontenttypes"), info)); ds.setAccessinfopackage(prepareListFields(rs.getArray("accessinfopackage"), info)); - ds.setReleasestartdate(MigrationUtils.field(rs.getDate("releasestartdate").toString(), info)); - ds.setReleaseenddate(MigrationUtils.field(rs.getDate("releaseenddate").toString(), info)); - ds.setMissionstatementurl(MigrationUtils.field(rs.getString("missionstatementurl"), info)); - ds.setDataprovider(MigrationUtils.field(rs.getBoolean("dataprovider"), info)); - ds.setServiceprovider(MigrationUtils.field(rs.getBoolean("serviceprovider"), info)); - ds.setDatabaseaccesstype(MigrationUtils.field(rs.getString("databaseaccesstype"), info)); - ds.setDatauploadtype(MigrationUtils.field(rs.getString("datauploadtype"), info)); - ds.setDatabaseaccessrestriction(MigrationUtils.field(rs.getString("databaseaccessrestriction"), info)); - ds.setDatauploadrestriction(MigrationUtils.field(rs.getString("datauploadrestriction"), info)); - ds.setVersioning(MigrationUtils.field(rs.getBoolean("versioning"), info)); - ds.setCitationguidelineurl(MigrationUtils.field(rs.getString("citationguidelineurl"), info)); - ds.setQualitymanagementkind(MigrationUtils.field(rs.getString("qualitymanagementkind"), info)); - ds.setPidsystems(MigrationUtils.field(rs.getString("pidsystems"), info)); - ds.setCertificates(MigrationUtils.field(rs.getString("certificates"), info)); + ds.setReleasestartdate(field(rs.getDate("releasestartdate").toString(), info)); + ds.setReleaseenddate(field(rs.getDate("releaseenddate").toString(), info)); + ds.setMissionstatementurl(field(rs.getString("missionstatementurl"), info)); + ds.setDataprovider(field(rs.getBoolean("dataprovider"), info)); + ds.setServiceprovider(field(rs.getBoolean("serviceprovider"), info)); + ds.setDatabaseaccesstype(field(rs.getString("databaseaccesstype"), info)); + ds.setDatauploadtype(field(rs.getString("datauploadtype"), info)); + ds.setDatabaseaccessrestriction(field(rs.getString("databaseaccessrestriction"), info)); + ds.setDatauploadrestriction(field(rs.getString("datauploadrestriction"), info)); + ds.setVersioning(field(rs.getBoolean("versioning"), info)); + ds.setCitationguidelineurl(field(rs.getString("citationguidelineurl"), info)); + ds.setQualitymanagementkind(field(rs.getString("qualitymanagementkind"), info)); + ds.setPidsystems(field(rs.getString("pidsystems"), info)); + ds.setCertificates(field(rs.getString("certificates"), info)); ds.setPolicies(new ArrayList<>()); // The sql query returns an empty array ds.setJournal(prepareJournal(rs.getString("officialname"), rs.getString("journal"), info)); // Journal ds.setDataInfo(info); @@ -189,38 +189,38 @@ public class MigrateDbEntitiesApplication extends AbstractMigrateApplication imp final Project p = new Project(); - p.setId(MigrationUtils.createOpenaireId("40", rs.getString("projectid"))); + p.setId(createOpenaireId("40", rs.getString("projectid"))); p.setOriginalId(Arrays.asList(rs.getString("projectid"))); - p.setCollectedfrom(MigrationUtils.listKeyValues(rs.getString("collectedfromid"), rs.getString("collectedfromname"))); + p.setCollectedfrom(listKeyValues(rs.getString("collectedfromid"), rs.getString("collectedfromname"))); p.setPid(null); // List // TODO p.setDateofcollection(rs.getDate("dateofcollection").toString()); p.setDateoftransformation(rs.getDate("dateoftransformation").toString()); p.setExtraInfo(null); // List //TODO p.setOaiprovenance(null); // Values not present in the DB - p.setWebsiteurl(MigrationUtils.field(rs.getString("websiteurl"), info)); - p.setCode(MigrationUtils.field(rs.getString("code"), info)); - p.setAcronym(MigrationUtils.field(rs.getString("acronym"), info)); - p.setTitle(MigrationUtils.field(rs.getString("title"), info)); - p.setStartdate(MigrationUtils.field(rs.getDate("startdate").toString(), info)); - p.setEnddate(MigrationUtils.field(rs.getDate("enddate").toString(), info)); - p.setCallidentifier(MigrationUtils.field(rs.getString("callidentifier"), info)); - p.setKeywords(MigrationUtils.field(rs.getString("keywords"), info)); - p.setDuration(MigrationUtils.field(Integer.toString(rs.getInt("duration")), info)); - p.setEcsc39(MigrationUtils.field(Boolean.toString(rs.getBoolean("ecsc39")), info)); - p.setOamandatepublications(MigrationUtils.field(Boolean.toString(rs.getBoolean("oamandatepublications")), info)); - p.setEcarticle29_3(MigrationUtils.field(Boolean.toString(rs.getBoolean("ecarticle29_3")), info)); + p.setWebsiteurl(field(rs.getString("websiteurl"), info)); + p.setCode(field(rs.getString("code"), info)); + p.setAcronym(field(rs.getString("acronym"), info)); + p.setTitle(field(rs.getString("title"), info)); + p.setStartdate(field(rs.getDate("startdate").toString(), info)); + p.setEnddate(field(rs.getDate("enddate").toString(), info)); + p.setCallidentifier(field(rs.getString("callidentifier"), info)); + p.setKeywords(field(rs.getString("keywords"), info)); + p.setDuration(field(Integer.toString(rs.getInt("duration")), info)); + p.setEcsc39(field(Boolean.toString(rs.getBoolean("ecsc39")), info)); + p.setOamandatepublications(field(Boolean.toString(rs.getBoolean("oamandatepublications")), info)); + p.setEcarticle29_3(field(Boolean.toString(rs.getBoolean("ecarticle29_3")), info)); p.setSubjects(prepareListOfStructProps(rs.getArray("subjects"), info)); p.setFundingtree(prepareListFields(rs.getArray("fundingtree"), info)); p.setContracttype(prepareQualifierSplitting(rs.getString("contracttype"))); - p.setOptional1(MigrationUtils.field(rs.getString("optional1"), info)); - p.setOptional2(MigrationUtils.field(rs.getString("optional2"), info)); - p.setJsonextrainfo(MigrationUtils.field(rs.getString("jsonextrainfo"), info)); - p.setContactfullname(MigrationUtils.field(rs.getString("contactfullname"), info)); - p.setContactfax(MigrationUtils.field(rs.getString("contactfax"), info)); - p.setContactphone(MigrationUtils.field(rs.getString("contactphone"), info)); - p.setContactemail(MigrationUtils.field(rs.getString("contactemail"), info)); - p.setSummary(MigrationUtils.field(rs.getString("summary"), info)); - p.setCurrency(MigrationUtils.field(rs.getString("currency"), info)); + p.setOptional1(field(rs.getString("optional1"), info)); + p.setOptional2(field(rs.getString("optional2"), info)); + p.setJsonextrainfo(field(rs.getString("jsonextrainfo"), info)); + p.setContactfullname(field(rs.getString("contactfullname"), info)); + p.setContactfax(field(rs.getString("contactfax"), info)); + p.setContactphone(field(rs.getString("contactphone"), info)); + p.setContactemail(field(rs.getString("contactemail"), info)); + p.setSummary(field(rs.getString("summary"), info)); + p.setCurrency(field(rs.getString("currency"), info)); p.setTotalcost(new Float(rs.getDouble("totalcost"))); p.setFundedamount(new Float(rs.getDouble("fundedamount"))); p.setDataInfo(info); @@ -278,30 +278,29 @@ public class MigrateDbEntitiesApplication extends AbstractMigrateApplication imp final Organization o = new Organization(); - o.setId(MigrationUtils.createOpenaireId("20", rs.getString("organizationid"))); // String id) { + o.setId(createOpenaireId("20", rs.getString("organizationid"))); // String id) { o.setOriginalId(Arrays.asList(rs.getString("organizationid"))); - o.setCollectedfrom(MigrationUtils.listKeyValues(rs.getString("collectedfromid"), rs.getString("collectedfromname"))); + o.setCollectedfrom(listKeyValues(rs.getString("collectedfromid"), rs.getString("collectedfromname"))); o.setPid(null); // List // TODO o.setDateofcollection(rs.getDate("dateofcollection").toString()); o.setDateoftransformation(rs.getDate("dateoftransformation").toString()); o.setExtraInfo(null); // List // TODO o.setOaiprovenance(null); // Values not present in the DB - o.setLegalshortname(MigrationUtils.field("legalshortname", info)); - o.setLegalname(MigrationUtils.field("legalname", info)); + o.setLegalshortname(field("legalshortname", info)); + o.setLegalname(field("legalname", info)); o.setAlternativeNames(new ArrayList<>()); // Values not returned by the SQL query - o.setWebsiteurl(MigrationUtils.field("websiteurl", info)); - o.setLogourl(MigrationUtils.field("logourl", info)); - o.setEclegalbody(MigrationUtils.field(Boolean.toString(rs.getBoolean("eclegalbody")), info)); - o.setEclegalperson(MigrationUtils.field(Boolean.toString(rs.getBoolean("eclegalperson")), info)); - o.setEcnonprofit(MigrationUtils.field(Boolean.toString(rs.getBoolean("ecnonprofit")), info)); - o.setEcresearchorganization(MigrationUtils.field(Boolean.toString(rs.getBoolean("ecresearchorganization")), info)); - o.setEchighereducation(MigrationUtils.field(Boolean.toString(rs.getBoolean("echighereducation")), info)); - o.setEcinternationalorganizationeurinterests(MigrationUtils - .field(Boolean.toString(rs.getBoolean("ecinternationalorganizationeurinterests")), info)); - o.setEcinternationalorganization(MigrationUtils.field(Boolean.toString(rs.getBoolean("ecinternationalorganization")), info)); - o.setEcenterprise(MigrationUtils.field(Boolean.toString(rs.getBoolean("ecenterprise")), info)); - o.setEcsmevalidated(MigrationUtils.field(Boolean.toString(rs.getBoolean("ecsmevalidated")), info)); - o.setEcnutscode(MigrationUtils.field(Boolean.toString(rs.getBoolean("ecnutscode")), info)); + o.setWebsiteurl(field("websiteurl", info)); + o.setLogourl(field("logourl", info)); + o.setEclegalbody(field(Boolean.toString(rs.getBoolean("eclegalbody")), info)); + o.setEclegalperson(field(Boolean.toString(rs.getBoolean("eclegalperson")), info)); + o.setEcnonprofit(field(Boolean.toString(rs.getBoolean("ecnonprofit")), info)); + o.setEcresearchorganization(field(Boolean.toString(rs.getBoolean("ecresearchorganization")), info)); + o.setEchighereducation(field(Boolean.toString(rs.getBoolean("echighereducation")), info)); + o.setEcinternationalorganizationeurinterests(field(Boolean.toString(rs.getBoolean("ecinternationalorganizationeurinterests")), info)); + o.setEcinternationalorganization(field(Boolean.toString(rs.getBoolean("ecinternationalorganization")), info)); + o.setEcenterprise(field(Boolean.toString(rs.getBoolean("ecenterprise")), info)); + o.setEcsmevalidated(field(Boolean.toString(rs.getBoolean("ecsmevalidated")), info)); + o.setEcnutscode(field(Boolean.toString(rs.getBoolean("ecnutscode")), info)); o.setCountry(prepareQualifierSplitting(rs.getString("country"))); o.setDataInfo(info); o.setLastupdatetimestamp(lastUpdateTimestamp); @@ -343,9 +342,9 @@ public class MigrateDbEntitiesApplication extends AbstractMigrateApplication imp try { final DataInfo info = prepareDataInfo(rs); - final String orgId = MigrationUtils.createOpenaireId("20", rs.getString("organization")); - final String dsId = MigrationUtils.createOpenaireId("10", rs.getString("datasource")); - final List collectedFrom = MigrationUtils.listKeyValues(rs.getString("collectedfromid"), rs.getString("collectedfromname")); + final String orgId = createOpenaireId("20", rs.getString("organization")); + final String dsId = createOpenaireId("10", rs.getString("datasource")); + final List collectedFrom = listKeyValues(rs.getString("collectedfromid"), rs.getString("collectedfromname")); final Relation r1 = new Relation(); r1.setRelType("datasourceOrganization"); @@ -391,9 +390,9 @@ public class MigrateDbEntitiesApplication extends AbstractMigrateApplication imp public void processProjectOrganization(final ResultSet rs) { try { final DataInfo info = prepareDataInfo(rs); - final String orgId = MigrationUtils.createOpenaireId("20", rs.getString("resporganization")); - final String projectId = MigrationUtils.createOpenaireId("40", rs.getString("project")); - final List collectedFrom = MigrationUtils.listKeyValues(rs.getString("collectedfromid"), rs.getString("collectedfromname")); + final String orgId = createOpenaireId("20", rs.getString("resporganization")); + final String projectId = createOpenaireId("40", rs.getString("project")); + final List collectedFrom = listKeyValues(rs.getString("collectedfromid"), rs.getString("collectedfromname")); final Relation r1 = new Relation(); r1.setRelType("projectOrganization"); @@ -443,18 +442,18 @@ public class MigrateDbEntitiesApplication extends AbstractMigrateApplication imp final String inferenceprovenance = rs.getString("inferenceprovenance"); final Boolean inferred = rs.getBoolean("inferred"); final String trust = rs.getString("trust"); - return MigrationUtils.dataInfo(deletedbyinference, inferenceprovenance, inferred, false, ENTITYREGISTRY_PROVENANCE_ACTION, trust); + return dataInfo(deletedbyinference, inferenceprovenance, inferred, false, ENTITYREGISTRY_PROVENANCE_ACTION, trust); } private Qualifier prepareQualifierSplitting(final String s) { if (StringUtils.isBlank(s)) { return null; } final String[] arr = s.split("@@@"); - return arr.length == 4 ? MigrationUtils.qualifier(arr[0], arr[1], arr[2], arr[3]) : null; + return arr.length == 4 ? qualifier(arr[0], arr[1], arr[2], arr[3]) : null; } - public static List> prepareListFields(final Array array, final DataInfo info) { + private static List> prepareListFields(final Array array, final DataInfo info) { try { - return MigrationUtils.listFields(info, (String[]) array.getArray()); + return listFields(info, (String[]) array.getArray()); } catch (final SQLException e) { throw new RuntimeException("Invalid SQL array", e); } @@ -466,7 +465,7 @@ public class MigrateDbEntitiesApplication extends AbstractMigrateApplication imp if (parts.length == 2) { final String value = parts[0]; final String[] arr = parts[1].split("@@@"); - if (arr.length == 4) { return MigrationUtils.structuredProperty(value, arr[0], arr[1], arr[2], arr[3], dataInfo); } + if (arr.length == 4) { return structuredProperty(value, arr[0], arr[1], arr[2], arr[3], dataInfo); } } return null; } @@ -492,8 +491,8 @@ public class MigrateDbEntitiesApplication extends AbstractMigrateApplication imp final String issn = StringUtils.isNotBlank(arr[0]) ? arr[0] : null; final String eissn = StringUtils.isNotBlank(arr[1]) ? arr[1] : null;; final String lissn = StringUtils.isNotBlank(arr[2]) ? arr[2] : null;; - if (issn != null || eissn != null || lissn != null) { return MigrationUtils - .journal(name, issn, eissn, eissn, null, null, null, null, null, null, null, info); } + if (issn != null || eissn != null + || lissn != null) { return journal(name, issn, eissn, eissn, null, null, null, null, null, null, null, info); } } } return null; From 6bfe2dc96e0bb9a36008d3157dde36b0269060c9 Mon Sep 17 00:00:00 2001 From: Michele Artini Date: Wed, 22 Jan 2020 16:00:23 +0100 Subject: [PATCH 06/17] partial implementation --- .../migration/AbstractMigrateApplication.java | 8 +- .../MigrateDbEntitiesApplication.java | 29 +- .../MigrateMongoMdstoresApplication.java | 265 +++++++++++++++--- .../migrate_mongo_mstores_parameters.json | 18 ++ 4 files changed, 258 insertions(+), 62 deletions(-) diff --git a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/AbstractMigrateApplication.java b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/AbstractMigrateApplication.java index b8f92fb9c..73ee7f822 100644 --- a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/AbstractMigrateApplication.java +++ b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/AbstractMigrateApplication.java @@ -116,9 +116,13 @@ public class AbstractMigrateApplication implements Closeable { final String schemeid, final String schemename, final DataInfo dataInfo) { + return structuredProperty(value, qualifier(classid, classname, schemeid, schemename), dataInfo); + } + + public static StructuredProperty structuredProperty(final String value, final Qualifier qualifier, final DataInfo dataInfo) { final StructuredProperty sp = new StructuredProperty(); sp.setValue(value); - sp.setQualifier(qualifier(classid, classname, schemeid, schemename)); + sp.setQualifier(qualifier); sp.setDataInfo(dataInfo); return sp; } @@ -198,7 +202,7 @@ public class AbstractMigrateApplication implements Closeable { return d; } - public static String createOpenaireId(final String prefix, final String originalId) { + public static String createOpenaireId(final int prefix, final String originalId) { final String nsPrefix = StringUtils.substringBefore(originalId, "::"); final String rest = StringUtils.substringAfter(originalId, "::"); return String.format("%s|%s::%s", prefix, nsPrefix, DHPUtils.md5(rest)); diff --git a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/MigrateDbEntitiesApplication.java b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/MigrateDbEntitiesApplication.java index deb7fdd69..0b47c5282 100644 --- a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/MigrateDbEntitiesApplication.java +++ b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/MigrateDbEntitiesApplication.java @@ -60,7 +60,6 @@ public class MigrateDbEntitiesApplication extends AbstractMigrateApplication imp smdbe.execute("queryDatasourceOrganization.sql", smdbe::processDatasourceOrganization); smdbe.execute("queryProjectOrganization.sql", smdbe::processProjectOrganization); } - } public MigrateDbEntitiesApplication(final String hdfsPath, final String hdfsNameNode, final String hdfsUser, final String dbUrl, final String dbUser, @@ -82,13 +81,13 @@ public class MigrateDbEntitiesApplication extends AbstractMigrateApplication imp final Datasource ds = new Datasource(); - ds.setId(createOpenaireId("10", rs.getString("datasourceid"))); + ds.setId(createOpenaireId(10, rs.getString("datasourceid"))); ds.setOriginalId(Arrays.asList(rs.getString("datasourceid"))); ds.setCollectedfrom(listKeyValues(rs.getString("collectedfromid"), rs.getString("collectedfromname"))); - ds.setPid(null); // List // TODO + ds.setPid(new ArrayList<>()); ds.setDateofcollection(rs.getDate("dateofcollection").toString()); ds.setDateoftransformation(null); // Value not returned by the SQL query - ds.setExtraInfo(null); // TODO + ds.setExtraInfo(new ArrayList<>()); // Values not present in the DB ds.setOaiprovenance(null); // Values not present in the DB ds.setDatasourcetype(prepareQualifierSplitting(rs.getString("datasourcetype"))); ds.setOpenairecompatibility(prepareQualifierSplitting(rs.getString("openairecompatibility"))); @@ -189,13 +188,13 @@ public class MigrateDbEntitiesApplication extends AbstractMigrateApplication imp final Project p = new Project(); - p.setId(createOpenaireId("40", rs.getString("projectid"))); + p.setId(createOpenaireId(40, rs.getString("projectid"))); p.setOriginalId(Arrays.asList(rs.getString("projectid"))); p.setCollectedfrom(listKeyValues(rs.getString("collectedfromid"), rs.getString("collectedfromname"))); - p.setPid(null); // List // TODO + p.setPid(new ArrayList<>()); p.setDateofcollection(rs.getDate("dateofcollection").toString()); p.setDateoftransformation(rs.getDate("dateoftransformation").toString()); - p.setExtraInfo(null); // List //TODO + p.setExtraInfo(new ArrayList<>()); // Values not present in the DB p.setOaiprovenance(null); // Values not present in the DB p.setWebsiteurl(field(rs.getString("websiteurl"), info)); p.setCode(field(rs.getString("code"), info)); @@ -278,13 +277,13 @@ public class MigrateDbEntitiesApplication extends AbstractMigrateApplication imp final Organization o = new Organization(); - o.setId(createOpenaireId("20", rs.getString("organizationid"))); // String id) { + o.setId(createOpenaireId(20, rs.getString("organizationid"))); o.setOriginalId(Arrays.asList(rs.getString("organizationid"))); o.setCollectedfrom(listKeyValues(rs.getString("collectedfromid"), rs.getString("collectedfromname"))); - o.setPid(null); // List // TODO + o.setPid(new ArrayList<>()); o.setDateofcollection(rs.getDate("dateofcollection").toString()); o.setDateoftransformation(rs.getDate("dateoftransformation").toString()); - o.setExtraInfo(null); // List // TODO + o.setExtraInfo(new ArrayList<>()); // Values not present in the DB o.setOaiprovenance(null); // Values not present in the DB o.setLegalshortname(field("legalshortname", info)); o.setLegalname(field("legalname", info)); @@ -342,8 +341,8 @@ public class MigrateDbEntitiesApplication extends AbstractMigrateApplication imp try { final DataInfo info = prepareDataInfo(rs); - final String orgId = createOpenaireId("20", rs.getString("organization")); - final String dsId = createOpenaireId("10", rs.getString("datasource")); + final String orgId = createOpenaireId(20, rs.getString("organization")); + final String dsId = createOpenaireId(10, rs.getString("datasource")); final List collectedFrom = listKeyValues(rs.getString("collectedfromid"), rs.getString("collectedfromname")); final Relation r1 = new Relation(); @@ -390,8 +389,8 @@ public class MigrateDbEntitiesApplication extends AbstractMigrateApplication imp public void processProjectOrganization(final ResultSet rs) { try { final DataInfo info = prepareDataInfo(rs); - final String orgId = createOpenaireId("20", rs.getString("resporganization")); - final String projectId = createOpenaireId("40", rs.getString("project")); + final String orgId = createOpenaireId(20, rs.getString("resporganization")); + final String projectId = createOpenaireId(40, rs.getString("project")); final List collectedFrom = listKeyValues(rs.getString("collectedfromid"), rs.getString("collectedfromname")); final Relation r1 = new Relation(); @@ -451,7 +450,7 @@ public class MigrateDbEntitiesApplication extends AbstractMigrateApplication imp return arr.length == 4 ? qualifier(arr[0], arr[1], arr[2], arr[3]) : null; } - private static List> prepareListFields(final Array array, final DataInfo info) { + private List> prepareListFields(final Array array, final DataInfo info) { try { return listFields(info, (String[]) array.getArray()); } catch (final SQLException e) { diff --git a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/MigrateMongoMdstoresApplication.java b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/MigrateMongoMdstoresApplication.java index cead2366b..f6dcaf0e8 100644 --- a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/MigrateMongoMdstoresApplication.java +++ b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/MigrateMongoMdstoresApplication.java @@ -2,34 +2,56 @@ package eu.dnetlib.dhp.migration; import java.io.Closeable; import java.io.IOException; -import java.io.StringReader; +import java.sql.SQLException; import java.util.ArrayList; +import java.util.Arrays; +import java.util.Date; +import java.util.HashMap; import java.util.List; +import java.util.Map; import java.util.Map.Entry; import org.apache.commons.io.IOUtils; +import org.apache.commons.lang3.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.dom4j.Document; import org.dom4j.DocumentException; +import org.dom4j.DocumentFactory; +import org.dom4j.DocumentHelper; import org.dom4j.Node; -import org.dom4j.io.SAXReader; import eu.dnetlib.dhp.application.ArgumentApplicationParser; +import eu.dnetlib.dhp.schema.oaf.DataInfo; import eu.dnetlib.dhp.schema.oaf.Dataset; +import eu.dnetlib.dhp.schema.oaf.Field; +import eu.dnetlib.dhp.schema.oaf.KeyValue; +import eu.dnetlib.dhp.schema.oaf.OAIProvenance; import eu.dnetlib.dhp.schema.oaf.Oaf; import eu.dnetlib.dhp.schema.oaf.OtherResearchProduct; import eu.dnetlib.dhp.schema.oaf.Publication; +import eu.dnetlib.dhp.schema.oaf.Qualifier; import eu.dnetlib.dhp.schema.oaf.Relation; import eu.dnetlib.dhp.schema.oaf.Result; import eu.dnetlib.dhp.schema.oaf.Software; +import eu.dnetlib.dhp.schema.oaf.StructuredProperty; public class MigrateMongoMdstoresApplication extends AbstractMigrateApplication implements Closeable { private static final Log log = LogFactory.getLog(MigrateMongoMdstoresApplication.class); + private final Map code2name = new HashMap<>(); + private final MdstoreClient mdstoreClient; + private static final Qualifier MAIN_TITLE_QUALIFIER = qualifier("main title", "main title", "dnet:dataCite_title", "dnet:dataCite_title"); + + private static final Qualifier PUBLICATION_RESULTTYPE_QUALIFIER = + qualifier("publication", "publication", "dnet:result_typologies", "dnet:result_typologies"); + private static final Qualifier DATASET_RESULTTYPE_QUALIFIER = qualifier("dataset", "dataset", "dnet:result_typologies", "dnet:result_typologies"); + private static final Qualifier SOFTWARE_RESULTTYPE_QUALIFIER = qualifier("software", "software", "dnet:result_typologies", "dnet:result_typologies"); + private static final Qualifier OTHER_RESULTTYPE_QUALIFIER = qualifier("other", "other", "dnet:result_typologies", "dnet:result_typologies"); + public static void main(final String[] args) throws Exception { final ArgumentApplicationParser parser = new ArgumentApplicationParser( IOUtils.toString(MigrateMongoMdstoresApplication.class.getResourceAsStream("/eu/dnetlib/dhp/migration/migrate_mongo_mstores_parameters.json"))); @@ -46,16 +68,46 @@ public class MigrateMongoMdstoresApplication extends AbstractMigrateApplication final String hdfsNameNode = parser.get("namenode"); final String hdfsUser = parser.get("hdfsUser"); - try (final MigrateMongoMdstoresApplication mig = new MigrateMongoMdstoresApplication(hdfsPath, hdfsNameNode, hdfsUser, mongoBaseUrl, mongoDb)) { + final String dbUrl = parser.get("postgresUrl"); + final String dbUser = parser.get("postgresUser"); + final String dbPassword = parser.get("postgresPassword"); + + try (final MigrateMongoMdstoresApplication mig = + new MigrateMongoMdstoresApplication(hdfsPath, hdfsNameNode, hdfsUser, mongoBaseUrl, mongoDb, dbUrl, dbUser, dbPassword)) { mig.processMdRecords(mdFormat, mdLayout, mdInterpretation); } } public MigrateMongoMdstoresApplication(final String hdfsPath, final String hdfsNameNode, final String hdfsUser, final String mongoBaseUrl, - final String mongoDb) throws Exception { + final String mongoDb, final String dbUrl, final String dbUser, + final String dbPassword) throws Exception { super(hdfsPath, hdfsNameNode, hdfsUser); + this.mdstoreClient = new MdstoreClient(mongoBaseUrl, mongoDb); + loadClassNames(dbUrl, dbUser, dbPassword); + + final Map nsContext = new HashMap<>(); + nsContext.put("dc", "http://purl.org/dc/elements/1.1/"); + nsContext.put("dr", "http://www.driver-repository.eu/namespace/dr"); + nsContext.put("dri", "http://www.driver-repository.eu/namespace/dri"); + nsContext.put("oaf", "http://namespace.openaire.eu/oaf"); + nsContext.put("oai", "http://www.openarchives.org/OAI/2.0/"); + nsContext.put("prov", "http://www.openarchives.org/OAI/2.0/provenance"); + DocumentFactory.getInstance().setXPathNamespaceURIs(nsContext); + } + + private void loadClassNames(final String dbUrl, final String dbUser, final String dbPassword) throws IOException { + try (DbClient dbClient = new DbClient(dbUrl, dbUser, dbPassword)) { + code2name.clear(); + dbClient.processResults("select code, name from class", rs -> { + try { + code2name.put(rs.getString("code"), rs.getString("name")); + } catch (final SQLException e) { + e.printStackTrace(); + } + }); + } } @@ -74,23 +126,29 @@ public class MigrateMongoMdstoresApplication extends AbstractMigrateApplication } private List createOafs(final String xml) throws DocumentException { - final SAXReader reader = new SAXReader(); - final Document doc = reader.read(new StringReader(xml)); - final String type = doc.valueOf(""); // TODO + final Document doc = DocumentHelper.parseText(xml); + + final String type = doc.valueOf("//dr:CobjCategory/@type"); + final KeyValue collectedFrom = keyValue(doc.valueOf("//oaf:collectedFrom/@id"), doc.valueOf("//oaf:collectedFrom/@name")); + final DataInfo info = prepareDataInfo(doc); + final long lastUpdateTimestamp = new Date().getTime(); final List oafs = new ArrayList<>(); switch (type.toLowerCase()) { + case "": case "publication": final Publication p = new Publication(); - populateResultFields(p, doc); + populateResultFields(p, doc, collectedFrom, info, lastUpdateTimestamp); + p.setResulttype(PUBLICATION_RESULTTYPE_QUALIFIER); p.setJournal(null); // TODO oafs.add(p); break; case "dataset": final Dataset d = new Dataset(); - populateResultFields(d, doc); + populateResultFields(d, doc, collectedFrom, info, lastUpdateTimestamp); + d.setResulttype(DATASET_RESULTTYPE_QUALIFIER); d.setStoragedate(null); // TODO d.setDevice(null); // TODO d.setSize(null); // TODO @@ -101,16 +159,11 @@ public class MigrateMongoMdstoresApplication extends AbstractMigrateApplication oafs.add(d); break; case "otherresearchproducts": - final OtherResearchProduct o = new OtherResearchProduct(); - populateResultFields(o, doc); - o.setContactperson(null); // TODO - o.setContactgroup(null); // TODO - o.setTool(null); // TODO - oafs.add(o); - break; + case "software": final Software s = new Software(); - populateResultFields(s, doc); + populateResultFields(s, doc, collectedFrom, info, lastUpdateTimestamp); + s.setResulttype(SOFTWARE_RESULTTYPE_QUALIFIER); s.setDocumentationUrl(null); // TODO s.setLicense(null); // TODO s.setCodeRepositoryUrl(null); // TODO @@ -118,20 +171,32 @@ public class MigrateMongoMdstoresApplication extends AbstractMigrateApplication oafs.add(s); break; default: - log.error("Inavlid type: " + type); + final OtherResearchProduct o = new OtherResearchProduct(); + populateResultFields(o, doc, collectedFrom, info, lastUpdateTimestamp); + o.setResulttype(OTHER_RESULTTYPE_QUALIFIER); + o.setContactperson(null); // TODO + o.setContactgroup(null); // TODO + o.setTool(null); // TODO + oafs.add(o); break; } if (!oafs.isEmpty()) { - addRelations(oafs, doc, "//*", "TYPE"); - addRelations(oafs, doc, "//*", "TYPE"); - addRelations(oafs, doc, "//*", "TYPE"); + addRelations(oafs, doc, "//*", "TYPE", collectedFrom, info, lastUpdateTimestamp); // TODO + addRelations(oafs, doc, "//*", "TYPE", collectedFrom, info, lastUpdateTimestamp); // TODO + addRelations(oafs, doc, "//*", "TYPE", collectedFrom, info, lastUpdateTimestamp); // TODO } return oafs; } - private void addRelations(final List oafs, final Document doc, final String xpath, final String type) { + private void addRelations(final List oafs, + final Document doc, + final String xpath, + final String type, + final KeyValue collectedFrom, + final DataInfo info, + final long lastUpdateTimestamp) { for (final Object o : doc.selectNodes(xpath)) { final Node n = (Node) o; final Relation r = new Relation(); @@ -140,40 +205,42 @@ public class MigrateMongoMdstoresApplication extends AbstractMigrateApplication r.setRelClass(null); // TODO r.setSource(null); // TODO r.setTarget(null); // TODO - r.setCollectedFrom(null); // TODO + r.setCollectedFrom(Arrays.asList(collectedFrom)); + r.setDataInfo(info); + r.setLastupdatetimestamp(lastUpdateTimestamp); oafs.add(r); } } - private void populateResultFields(final Result r, final Document doc) { - r.setDataInfo(null); // TODO - r.setLastupdatetimestamp(null); // TODO - r.setId(null); // TODO - r.setOriginalId(null); // TODO - r.setCollectedfrom(null); // TODO - r.setPid(null); // TODO - r.setDateofcollection(null); // TODO - r.setDateoftransformation(null); // TODO - r.setExtraInfo(null); // TODO - r.setOaiprovenance(null); // TODO + private void populateResultFields(final Result r, final Document doc, final KeyValue collectedFrom, final DataInfo info, final long lastUpdateTimestamp) { + + r.setDataInfo(info); + r.setLastupdatetimestamp(lastUpdateTimestamp); + r.setId(createOpenaireId(50, doc.valueOf("//dri:objIdentifier"))); + r.setOriginalId(Arrays.asList(doc.valueOf("//dri:objIdentifier"))); + r.setCollectedfrom(Arrays.asList(collectedFrom)); + r.setPid(prepareListStructProps(doc, "//oaf:identifier", "@identifierType", "dnet:pid_types", "dnet:pid_types", info)); + r.setDateofcollection(doc.valueOf("//dr:dateOfCollection")); + r.setDateoftransformation(doc.valueOf("//dr:dateOfTransformation")); + r.setExtraInfo(new ArrayList<>()); // NOT PRESENT IN MDSTORES + r.setOaiprovenance(prepareOAIprovenance(doc)); r.setAuthor(null); // TODO - r.setResulttype(null); // TODO - r.setLanguage(null); // TODO - r.setCountry(null); // TODO - r.setSubject(null); // TODO - r.setTitle(null); // TODO + r.setLanguage(prepareQualifier(doc, "//dc:language", "dnet:languages", "dnet:languages")); + r.setCountry(new ArrayList<>()); // NOT PRESENT IN MDSTORES + r.setSubject(prepareListStructProps(doc, "//dc:subject", info)); + r.setTitle(prepareListStructProps(doc, "//dc:title", MAIN_TITLE_QUALIFIER, info)); r.setRelevantdate(null); // TODO - r.setDescription(null); // TODO - r.setDateofacceptance(null); // TODO - r.setPublisher(null); // TODO + r.setDescription(prepareListFields(doc, "//dc:description", info)); + r.setDateofacceptance(prepareField(doc, "//oaf:dateAccepted", info)); + r.setPublisher(prepareField(doc, "//dc:publisher", info)); r.setEmbargoenddate(null); // TODO r.setSource(null); // TODO r.setFulltext(null); // TODO - r.setFormat(null); // TODO - r.setContributor(null); // TODO + r.setFormat(prepareListFields(doc, "//dc:format", info)); + r.setContributor(prepareListFields(doc, "//dc:contributor", info)); r.setResourcetype(null); // TODO - r.setCoverage(null); // TODO + r.setCoverage(prepareListFields(doc, "//dc:coverage", info)); r.setRefereed(null); // TODO r.setContext(null); // TODO r.setExternalReference(null); // TODO @@ -182,9 +249,117 @@ public class MigrateMongoMdstoresApplication extends AbstractMigrateApplication r.setProcessingchargecurrency(null); // TODO } + private Qualifier prepareQualifier(final Document doc, final String xpath, final String schemeId, final String schemeName) { + final String classId = doc.valueOf(xpath); + final String className = code2name.get(classId); + return qualifier(classId, className, schemeId, schemeName); + } + + private List prepareListStructProps(final Document doc, + final String xpath, + final String xpathClassId, + final String schemeId, + final String schemeName, + final DataInfo info) { + final List res = new ArrayList<>(); + for (final Object o : doc.selectNodes(xpath)) { + final Node n = (Node) o; + final String classId = n.valueOf(xpathClassId); + final String className = code2name.get(classId); + res.add(structuredProperty(n.getText(), classId, className, schemeId, schemeName, info)); + } + return res; + } + + private List prepareListStructProps(final Document doc, final String xpath, final Qualifier qualifier, final DataInfo info) { + final List res = new ArrayList<>(); + for (final Object o : doc.selectNodes(xpath)) { + final Node n = (Node) o; + res.add(structuredProperty(n.getText(), qualifier, info)); + } + return res; + } + + private List prepareListStructProps(final Document doc, final String xpath, final DataInfo info) { + final List res = new ArrayList<>(); + for (final Object o : doc.selectNodes(xpath)) { + final Node n = (Node) o; + res.add(structuredProperty(n.getText(), n.valueOf("@classid"), n.valueOf("@classname"), n.valueOf("@schemeid"), n + .valueOf("@schemename"), info)); + } + return res; + } + + private OAIProvenance prepareOAIprovenance(final Document doc) { + final Node n = doc.selectSingleNode("//*[local-name()='provenance']/*[local-name()='originDescription']"); + + final String identifier = n.valueOf("./*[local-name()='identifier']"); + final String baseURL = n.valueOf("./*[local-name()='baseURL']");; + final String metadataNamespace = n.valueOf("./*[local-name()='metadataNamespace']");; + final boolean altered = n.valueOf("@altered").equalsIgnoreCase("true"); + final String datestamp = n.valueOf("./*[local-name()='datestamp']");; + final String harvestDate = n.valueOf("@harvestDate");; + + return oaiIProvenance(identifier, baseURL, metadataNamespace, altered, datestamp, harvestDate); + } + + private DataInfo prepareDataInfo(final Document doc) { + final Node n = doc.selectSingleNode("//oaf:datainfo"); + + final String paClassId = n.valueOf("./oaf:provenanceaction/@classid"); + final String paClassName = n.valueOf("./oaf:provenanceaction/@classname"); + final String paSchemeId = n.valueOf("./oaf:provenanceaction/@schemeid"); + final String paSchemeName = n.valueOf("./oaf:provenanceaction/@schemename"); + + final boolean deletedbyinference = Boolean.parseBoolean(n.valueOf("./oaf:deletedbyinference")); + final String inferenceprovenance = n.valueOf("./oaf:inferenceprovenance"); + final Boolean inferred = Boolean.parseBoolean(n.valueOf("./oaf:inferred")); + final String trust = n.valueOf("./oaf:trust"); + + return dataInfo(deletedbyinference, inferenceprovenance, inferred, false, qualifier(paClassId, paClassName, paSchemeId, paSchemeName), trust); + } + + private Field prepareField(final Document doc, final String xpath, final DataInfo info) { + return field(doc.valueOf(xpath), info); + } + + private List> prepareListFields(final Document doc, final String xpath, final DataInfo info) { + return listFields(info, (String[]) prepareListString(doc, xpath).toArray()); + } + + private List prepareListString(final Document doc, final String xpath) { + final List res = new ArrayList<>(); + for (final Object o : doc.selectNodes(xpath)) { + final String s = ((Node) o).getText().trim(); + if (StringUtils.isNotBlank(s)) { + res.add(s); + } + } + return res; + } + /* + * private StructuredProperty prepareStructProp(final Document doc, final String xpath, final DataInfo dataInfo) { if + * (StringUtils.isBlank(s)) { return null; } final String[] parts = s.split("###"); if (parts.length == 2) { final String value = + * parts[0]; final String[] arr = parts[1].split("@@@"); if (arr.length == 4) { return structuredProperty(value, arr[0], arr[1], arr[2], + * arr[3], dataInfo); } } return null; } + * + * private List prepareListOfStructProps(final Document doc, final String xpath, final DataInfo dataInfo) { final + * List res = new ArrayList<>(); if (array != null) { for (final String s : (String[]) array.getArray()) { final + * StructuredProperty sp = prepareStructProp(s, dataInfo); if (sp != null) { res.add(sp); } } } + * + * return res; } + * + * private Journal prepareJournal(final Document doc, final String xpath, final DataInfo info) { if (StringUtils.isNotBlank(sj)) { final + * String[] arr = sj.split("@@@"); if (arr.length == 3) { final String issn = StringUtils.isNotBlank(arr[0]) ? arr[0] : null; final + * String eissn = StringUtils.isNotBlank(arr[1]) ? arr[1] : null;; final String lissn = StringUtils.isNotBlank(arr[2]) ? arr[2] : null;; + * if (issn != null || eissn != null || lissn != null) { return journal(name, issn, eissn, eissn, null, null, null, null, null, null, + * null, info); } } } return null; } + */ + @Override public void close() throws IOException { super.close(); mdstoreClient.close(); } + } diff --git a/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/migration/migrate_mongo_mstores_parameters.json b/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/migration/migrate_mongo_mstores_parameters.json index fb5736dc0..3cd6f39f5 100644 --- a/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/migration/migrate_mongo_mstores_parameters.json +++ b/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/migration/migrate_mongo_mstores_parameters.json @@ -46,5 +46,23 @@ "paramLongName": "mdInterpretation", "paramDescription": "metadata interpretation", "paramRequired": true + }, + { + "paramName": "postgresUrl", + "paramLongName": "postgresUrl", + "paramDescription": "postgres url, example: jdbc:postgresql://localhost:5432/testdb", + "paramRequired": true + }, + { + "paramName": "postgresUser", + "paramLongName": "postgresUser", + "paramDescription": "postgres user", + "paramRequired": true + }, + { + "paramName": "postgresPassword", + "paramLongName": "postgresPassword", + "paramDescription": "postgres password", + "paramRequired": true } ] \ No newline at end of file From fbb0fc140b7f8b5e3d16c78d7df8fdbd92e8b5f3 Mon Sep 17 00:00:00 2001 From: Michele Artini Date: Tue, 4 Feb 2020 15:25:47 +0100 Subject: [PATCH 07/17] partial implementation of migration --- ...on.java => AbstractMigrationExecutor.java} | 4 +- .../dhp/migration/AbstractMongoExecutor.java | 369 ++++++++++++++++++ .../MigrateDbEntitiesApplication.java | 2 +- .../MigrateMongoMdstoresApplication.java | 339 +--------------- .../dnetlib/dhp/migration/MigrationUtils.java | 154 -------- .../dhp/migration/OafMigrationExecutor.java | 246 ++++++++++++ 6 files changed, 626 insertions(+), 488 deletions(-) rename dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/{AbstractMigrateApplication.java => AbstractMigrationExecutor.java} (97%) create mode 100644 dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/AbstractMongoExecutor.java delete mode 100644 dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/MigrationUtils.java create mode 100644 dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/OafMigrationExecutor.java diff --git a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/AbstractMigrateApplication.java b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/AbstractMigrationExecutor.java similarity index 97% rename from dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/AbstractMigrateApplication.java rename to dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/AbstractMigrationExecutor.java index 73ee7f822..389790511 100644 --- a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/AbstractMigrateApplication.java +++ b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/AbstractMigrationExecutor.java @@ -30,7 +30,7 @@ import eu.dnetlib.dhp.schema.oaf.Qualifier; import eu.dnetlib.dhp.schema.oaf.StructuredProperty; import eu.dnetlib.dhp.utils.DHPUtils; -public class AbstractMigrateApplication implements Closeable { +public class AbstractMigrationExecutor implements Closeable { private final AtomicInteger counter = new AtomicInteger(0); @@ -42,7 +42,7 @@ public class AbstractMigrateApplication implements Closeable { private final SequenceFile.Writer writer; - public AbstractMigrateApplication(final String hdfsPath, final String hdfsNameNode, final String hdfsUser) throws Exception { + public AbstractMigrationExecutor(final String hdfsPath, final String hdfsNameNode, final String hdfsUser) throws Exception { this.writer = SequenceFile.createWriter(getConf(hdfsNameNode, hdfsUser), SequenceFile.Writer.file(new Path(hdfsPath)), SequenceFile.Writer .keyClass(IntWritable.class), SequenceFile.Writer.valueClass(Text.class)); } diff --git a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/AbstractMongoExecutor.java b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/AbstractMongoExecutor.java new file mode 100644 index 000000000..51c39824a --- /dev/null +++ b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/AbstractMongoExecutor.java @@ -0,0 +1,369 @@ +package eu.dnetlib.dhp.migration; + +import java.io.IOException; +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Date; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; + +import org.apache.commons.lang3.StringUtils; +import org.dom4j.Document; +import org.dom4j.DocumentException; +import org.dom4j.DocumentFactory; +import org.dom4j.DocumentHelper; +import org.dom4j.Node; + +import eu.dnetlib.dhp.schema.oaf.Author; +import eu.dnetlib.dhp.schema.oaf.DataInfo; +import eu.dnetlib.dhp.schema.oaf.Dataset; +import eu.dnetlib.dhp.schema.oaf.Field; +import eu.dnetlib.dhp.schema.oaf.GeoLocation; +import eu.dnetlib.dhp.schema.oaf.Instance; +import eu.dnetlib.dhp.schema.oaf.Journal; +import eu.dnetlib.dhp.schema.oaf.KeyValue; +import eu.dnetlib.dhp.schema.oaf.OAIProvenance; +import eu.dnetlib.dhp.schema.oaf.Oaf; +import eu.dnetlib.dhp.schema.oaf.OtherResearchProduct; +import eu.dnetlib.dhp.schema.oaf.Publication; +import eu.dnetlib.dhp.schema.oaf.Qualifier; +import eu.dnetlib.dhp.schema.oaf.Result; +import eu.dnetlib.dhp.schema.oaf.Software; +import eu.dnetlib.dhp.schema.oaf.StructuredProperty; + +public abstract class AbstractMongoExecutor extends AbstractMigrationExecutor { + + protected final Map code2name = new HashMap<>(); + + protected final MdstoreClient mdstoreClient; + + protected static final Qualifier MAIN_TITLE_QUALIFIER = qualifier("main title", "main title", "dnet:dataCite_title", "dnet:dataCite_title"); + + protected static final Qualifier PUBLICATION_RESULTTYPE_QUALIFIER = + qualifier("publication", "publication", "dnet:result_typologies", "dnet:result_typologies"); + protected static final Qualifier DATASET_RESULTTYPE_QUALIFIER = qualifier("dataset", "dataset", "dnet:result_typologies", "dnet:result_typologies"); + protected static final Qualifier SOFTWARE_RESULTTYPE_QUALIFIER = qualifier("software", "software", "dnet:result_typologies", "dnet:result_typologies"); + protected static final Qualifier OTHER_RESULTTYPE_QUALIFIER = qualifier("other", "other", "dnet:result_typologies", "dnet:result_typologies"); + + public AbstractMongoExecutor(final String hdfsPath, final String hdfsNameNode, final String hdfsUser, final String mongoBaseUrl, + final String mongoDb, final String dbUrl, final String dbUser, + final String dbPassword) throws Exception { + + super(hdfsPath, hdfsNameNode, hdfsUser); + + this.mdstoreClient = new MdstoreClient(mongoBaseUrl, mongoDb); + loadClassNames(dbUrl, dbUser, dbPassword); + + final Map nsContext = new HashMap<>(); + + registerNamespaces(nsContext); + nsContext.put("dc", "http://purl.org/dc/elements/1.1/"); + nsContext.put("dr", "http://www.driver-repository.eu/namespace/dr"); + nsContext.put("dri", "http://www.driver-repository.eu/namespace/dri"); + nsContext.put("oaf", "http://namespace.openaire.eu/oaf"); + nsContext.put("oai", "http://www.openarchives.org/OAI/2.0/"); + nsContext.put("prov", "http://www.openarchives.org/OAI/2.0/provenance"); + DocumentFactory.getInstance().setXPathNamespaceURIs(nsContext); + } + + private void loadClassNames(final String dbUrl, final String dbUser, final String dbPassword) throws IOException { + try (DbClient dbClient = new DbClient(dbUrl, dbUser, dbPassword)) { + code2name.clear(); + dbClient.processResults("select code, name from class", rs -> { + try { + code2name.put(rs.getString("code"), rs.getString("name")); + } catch (final SQLException e) { + e.printStackTrace(); + } + }); + } + + } + + public void processMdRecords(final String mdFormat, final String mdLayout, final String mdInterpretation) throws DocumentException { + + for (final Entry entry : mdstoreClient.validCollections(mdFormat, mdLayout, mdInterpretation).entrySet()) { + // final String mdId = entry.getKey(); + final String currentColl = entry.getValue(); + + for (final String xml : mdstoreClient.listRecords(currentColl)) { + final Document doc = DocumentHelper.parseText(xml); + + final String type = doc.valueOf("//dr:CobjCategory/@type"); + final KeyValue collectedFrom = keyValue(doc.valueOf("//oaf:collectedFrom/@id"), doc.valueOf("//oaf:collectedFrom/@name")); + final DataInfo info = prepareDataInfo(doc); + final long lastUpdateTimestamp = new Date().getTime(); + + for (final Oaf oaf : createOafs(doc, type, collectedFrom, info, lastUpdateTimestamp)) { + emitOaf(oaf); + } + } + } + } + + protected abstract void registerNamespaces(Map nsContext); + + protected List createOafs(final Document doc, final String type, final KeyValue collectedFrom, final DataInfo info, final long lastUpdateTimestamp) { + + final List oafs = new ArrayList<>(); + + switch (type.toLowerCase()) { + case "": + case "publication": + final Publication p = new Publication(); + populateResultFields(p, doc, collectedFrom, info, lastUpdateTimestamp); + p.setResulttype(PUBLICATION_RESULTTYPE_QUALIFIER); + p.setJournal(prepareJournal(doc, info)); + oafs.add(p); + break; + case "dataset": + final Dataset d = new Dataset(); + populateResultFields(d, doc, collectedFrom, info, lastUpdateTimestamp); + d.setResulttype(DATASET_RESULTTYPE_QUALIFIER); + d.setStoragedate(prepareDatasetStorageDate(doc, info)); + d.setDevice(prepareDatasetDevice(doc, info)); + d.setSize(prepareDatasetSize(doc, info)); + d.setVersion(prepareDatasetVersion(doc, info)); + d.setLastmetadataupdate(prepareDatasetLastMetadataUpdate(doc, info)); + d.setMetadataversionnumber(prepareDatasetMetadataVersionNumber(doc, info)); + d.setGeolocation(prepareDatasetGeoLocations(doc, info)); + oafs.add(d); + break; + case "software": + final Software s = new Software(); + populateResultFields(s, doc, collectedFrom, info, lastUpdateTimestamp); + s.setResulttype(SOFTWARE_RESULTTYPE_QUALIFIER); + s.setDocumentationUrl(prepareSoftwareDocumentationUrls(doc, info)); + s.setLicense(prepareSoftwareLicenses(doc, info)); + s.setCodeRepositoryUrl(prepareSoftwareCodeRepositoryUrl(doc, info)); + s.setProgrammingLanguage(prepareSoftwareProgrammingLanguage(doc, info)); + oafs.add(s); + break; + case "otherresearchproducts": + default: + final OtherResearchProduct o = new OtherResearchProduct(); + populateResultFields(o, doc, collectedFrom, info, lastUpdateTimestamp); + o.setResulttype(OTHER_RESULTTYPE_QUALIFIER); + o.setContactperson(prepareOtherResearchProductContactPersons(doc, info)); + o.setContactgroup(prepareOtherResearchProductContactGroups(doc, info)); + o.setTool(prepareOtherResearchProductTools(doc, info)); + oafs.add(o); + break; + } + + if (!oafs.isEmpty()) { + addRelations(oafs, doc, "TYPE", collectedFrom, info, lastUpdateTimestamp); // TODO + addRelations(oafs, doc, "TYPE", collectedFrom, info, lastUpdateTimestamp); // TODO + addRelations(oafs, doc, "TYPE", collectedFrom, info, lastUpdateTimestamp); // TODO + } + + return oafs; + } + + private void populateResultFields(final Result r, final Document doc, final KeyValue collectedFrom, final DataInfo info, final long lastUpdateTimestamp) { + r.setDataInfo(info); + r.setLastupdatetimestamp(lastUpdateTimestamp); + r.setId(createOpenaireId(50, doc.valueOf("//dri:objIdentifier"))); + r.setOriginalId(Arrays.asList(doc.valueOf("//dri:objIdentifier"))); + r.setCollectedfrom(Arrays.asList(collectedFrom)); + r.setPid(prepareListStructProps(doc, "//oaf:identifier", "@identifierType", "dnet:pid_types", "dnet:pid_types", info)); + r.setDateofcollection(doc.valueOf("//dr:dateOfCollection")); + r.setDateoftransformation(doc.valueOf("//dr:dateOfTransformation")); + r.setExtraInfo(new ArrayList<>()); // NOT PRESENT IN MDSTORES + r.setOaiprovenance(prepareOAIprovenance(doc)); + r.setAuthor(prepareAuthors(doc, info)); + r.setLanguage(prepareLanguages(doc)); + r.setCountry(new ArrayList<>()); // NOT PRESENT IN MDSTORES + r.setSubject(prepareSubjects(doc, info)); + r.setTitle(prepareTitles(doc, info)); + r.setRelevantdate(prepareRelevantDates(doc, info)); + r.setDescription(prepareDescriptions(doc, info)); + r.setDateofacceptance(prepareField(doc, "//oaf:dateAccepted", info)); + r.setPublisher(preparePublisher(doc, info)); + r.setEmbargoenddate(prepareEmbargoEndDate(doc, info)); + r.setSource(prepareSources(doc, info)); + r.setFulltext(null); // NOT PRESENT IN MDSTORES + r.setFormat(prepareFormats(doc, info)); + r.setContributor(prepareContributors(doc, info)); + r.setResourcetype(null); // TODO + r.setCoverage(prepareCoverages(doc, info)); + r.setRefereed(null); // TODO + r.setContext(null); // TODO + r.setExternalReference(null); // TODO + r.setInstance(prepareInstances(doc, info)); + r.setProcessingchargeamount(null); // TODO + r.setProcessingchargecurrency(null); // TODO + } + + protected abstract List prepareInstances(Document doc, DataInfo info); + + protected abstract List> prepareSources(Document doc, DataInfo info); + + protected abstract Field prepareEmbargoEndDate(Document doc, DataInfo info); + + protected abstract List prepareRelevantDates(Document doc, DataInfo info); + + protected abstract List> prepareCoverages(Document doc, DataInfo info); + + protected abstract List> prepareContributors(Document doc, DataInfo info); + + protected abstract List> prepareFormats(Document doc, DataInfo info); + + protected abstract Field preparePublisher(Document doc, DataInfo info); + + protected abstract List> prepareDescriptions(Document doc, DataInfo info); + + protected abstract List prepareTitles(Document doc, DataInfo info); + + protected abstract List prepareSubjects(Document doc, DataInfo info); + + protected abstract Qualifier prepareLanguages(Document doc); + + protected abstract List prepareAuthors(Document doc, DataInfo info); + + protected abstract List> prepareOtherResearchProductTools(Document doc, DataInfo info); + + protected abstract List> prepareOtherResearchProductContactGroups(Document doc, DataInfo info); + + protected abstract List> prepareOtherResearchProductContactPersons(Document doc, DataInfo info); + + protected abstract Qualifier prepareSoftwareProgrammingLanguage(Document doc, DataInfo info); + + protected abstract Field prepareSoftwareCodeRepositoryUrl(Document doc, DataInfo info); + + protected abstract List prepareSoftwareLicenses(Document doc, DataInfo info); + + protected abstract List> prepareSoftwareDocumentationUrls(Document doc, DataInfo info); + + protected abstract List prepareDatasetGeoLocations(Document doc, DataInfo info); + + protected abstract Field prepareDatasetMetadataVersionNumber(Document doc, DataInfo info); + + protected abstract Field prepareDatasetLastMetadataUpdate(Document doc, DataInfo info); + + protected abstract Field prepareDatasetVersion(Document doc, DataInfo info); + + protected abstract Field prepareDatasetSize(Document doc, DataInfo info); + + protected abstract Field prepareDatasetDevice(Document doc, DataInfo info); + + protected abstract Field prepareDatasetStorageDate(Document doc, DataInfo info); + + abstract protected void addRelations(final List oafs, + final Document doc, + final String type, + final KeyValue collectedFrom, + final DataInfo info, + final long lastUpdateTimestamp); + + private Journal prepareJournal(final Document doc, final DataInfo info) { + final Node n = doc.selectSingleNode("//oaf:journal"); + if (n != null) { + final String name = n.getText(); + final String issnPrinted = n.valueOf("@issn"); + final String issnOnline = n.valueOf("@eissn"); + final String issnLinking = n.valueOf("@lissn"); + if (StringUtils.isNotBlank(name)) { return journal(name, issnPrinted, issnOnline, issnLinking, null, null, null, null, null, null, null, info); } + } + return null; + } + + protected Qualifier prepareQualifier(final Document doc, final String xpath, final String schemeId, final String schemeName) { + final String classId = doc.valueOf(xpath); + final String className = code2name.get(classId); + return qualifier(classId, className, schemeId, schemeName); + } + + protected List prepareListStructProps(final Document doc, + final String xpath, + final String xpathClassId, + final String schemeId, + final String schemeName, + final DataInfo info) { + final List res = new ArrayList<>(); + for (final Object o : doc.selectNodes(xpath)) { + final Node n = (Node) o; + final String classId = n.valueOf(xpathClassId); + final String className = code2name.get(classId); + res.add(structuredProperty(n.getText(), classId, className, schemeId, schemeName, info)); + } + return res; + } + + protected List prepareListStructProps(final Document doc, final String xpath, final Qualifier qualifier, final DataInfo info) { + final List res = new ArrayList<>(); + for (final Object o : doc.selectNodes(xpath)) { + final Node n = (Node) o; + res.add(structuredProperty(n.getText(), qualifier, info)); + } + return res; + } + + protected List prepareListStructProps(final Document doc, final String xpath, final DataInfo info) { + final List res = new ArrayList<>(); + for (final Object o : doc.selectNodes(xpath)) { + final Node n = (Node) o; + res.add(structuredProperty(n.getText(), n.valueOf("@classid"), n.valueOf("@classname"), n.valueOf("@schemeid"), n + .valueOf("@schemename"), info)); + } + return res; + } + + protected OAIProvenance prepareOAIprovenance(final Document doc) { + final Node n = doc.selectSingleNode("//*[local-name()='provenance']/*[local-name()='originDescription']"); + + final String identifier = n.valueOf("./*[local-name()='identifier']"); + final String baseURL = n.valueOf("./*[local-name()='baseURL']");; + final String metadataNamespace = n.valueOf("./*[local-name()='metadataNamespace']");; + final boolean altered = n.valueOf("@altered").equalsIgnoreCase("true"); + final String datestamp = n.valueOf("./*[local-name()='datestamp']");; + final String harvestDate = n.valueOf("@harvestDate");; + + return oaiIProvenance(identifier, baseURL, metadataNamespace, altered, datestamp, harvestDate); + } + + protected DataInfo prepareDataInfo(final Document doc) { + final Node n = doc.selectSingleNode("//oaf:datainfo"); + + final String paClassId = n.valueOf("./oaf:provenanceaction/@classid"); + final String paClassName = n.valueOf("./oaf:provenanceaction/@classname"); + final String paSchemeId = n.valueOf("./oaf:provenanceaction/@schemeid"); + final String paSchemeName = n.valueOf("./oaf:provenanceaction/@schemename"); + + final boolean deletedbyinference = Boolean.parseBoolean(n.valueOf("./oaf:deletedbyinference")); + final String inferenceprovenance = n.valueOf("./oaf:inferenceprovenance"); + final Boolean inferred = Boolean.parseBoolean(n.valueOf("./oaf:inferred")); + final String trust = n.valueOf("./oaf:trust"); + + return dataInfo(deletedbyinference, inferenceprovenance, inferred, false, qualifier(paClassId, paClassName, paSchemeId, paSchemeName), trust); + } + + protected Field prepareField(final Document doc, final String xpath, final DataInfo info) { + return field(doc.valueOf(xpath), info); + } + + protected List> prepareListFields(final Document doc, final String xpath, final DataInfo info) { + return listFields(info, (String[]) prepareListString(doc, xpath).toArray()); + } + + protected List prepareListString(final Document doc, final String xpath) { + final List res = new ArrayList<>(); + for (final Object o : doc.selectNodes(xpath)) { + final String s = ((Node) o).getText().trim(); + if (StringUtils.isNotBlank(s)) { + res.add(s); + } + } + return res; + } + + @Override + public void close() throws IOException { + super.close(); + mdstoreClient.close(); + } + +} diff --git a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/MigrateDbEntitiesApplication.java b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/MigrateDbEntitiesApplication.java index 0b47c5282..12043709f 100644 --- a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/MigrateDbEntitiesApplication.java +++ b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/MigrateDbEntitiesApplication.java @@ -28,7 +28,7 @@ import eu.dnetlib.dhp.schema.oaf.Qualifier; import eu.dnetlib.dhp.schema.oaf.Relation; import eu.dnetlib.dhp.schema.oaf.StructuredProperty; -public class MigrateDbEntitiesApplication extends AbstractMigrateApplication implements Closeable { +public class MigrateDbEntitiesApplication extends AbstractMigrationExecutor implements Closeable { private static final Qualifier ENTITYREGISTRY_PROVENANCE_ACTION = qualifier("sysimport:crosswalk:entityregistry", "sysimport:crosswalk:entityregistry", "dnet:provenance_actions", "dnet:provenance_actions"); diff --git a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/MigrateMongoMdstoresApplication.java b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/MigrateMongoMdstoresApplication.java index f6dcaf0e8..124a4f3cc 100644 --- a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/MigrateMongoMdstoresApplication.java +++ b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/MigrateMongoMdstoresApplication.java @@ -1,56 +1,10 @@ package eu.dnetlib.dhp.migration; -import java.io.Closeable; -import java.io.IOException; -import java.sql.SQLException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Date; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Map.Entry; - import org.apache.commons.io.IOUtils; -import org.apache.commons.lang3.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.dom4j.Document; -import org.dom4j.DocumentException; -import org.dom4j.DocumentFactory; -import org.dom4j.DocumentHelper; -import org.dom4j.Node; import eu.dnetlib.dhp.application.ArgumentApplicationParser; -import eu.dnetlib.dhp.schema.oaf.DataInfo; -import eu.dnetlib.dhp.schema.oaf.Dataset; -import eu.dnetlib.dhp.schema.oaf.Field; -import eu.dnetlib.dhp.schema.oaf.KeyValue; -import eu.dnetlib.dhp.schema.oaf.OAIProvenance; -import eu.dnetlib.dhp.schema.oaf.Oaf; -import eu.dnetlib.dhp.schema.oaf.OtherResearchProduct; -import eu.dnetlib.dhp.schema.oaf.Publication; -import eu.dnetlib.dhp.schema.oaf.Qualifier; -import eu.dnetlib.dhp.schema.oaf.Relation; -import eu.dnetlib.dhp.schema.oaf.Result; -import eu.dnetlib.dhp.schema.oaf.Software; -import eu.dnetlib.dhp.schema.oaf.StructuredProperty; -public class MigrateMongoMdstoresApplication extends AbstractMigrateApplication implements Closeable { - - private static final Log log = LogFactory.getLog(MigrateMongoMdstoresApplication.class); - - private final Map code2name = new HashMap<>(); - - private final MdstoreClient mdstoreClient; - - private static final Qualifier MAIN_TITLE_QUALIFIER = qualifier("main title", "main title", "dnet:dataCite_title", "dnet:dataCite_title"); - - private static final Qualifier PUBLICATION_RESULTTYPE_QUALIFIER = - qualifier("publication", "publication", "dnet:result_typologies", "dnet:result_typologies"); - private static final Qualifier DATASET_RESULTTYPE_QUALIFIER = qualifier("dataset", "dataset", "dnet:result_typologies", "dnet:result_typologies"); - private static final Qualifier SOFTWARE_RESULTTYPE_QUALIFIER = qualifier("software", "software", "dnet:result_typologies", "dnet:result_typologies"); - private static final Qualifier OTHER_RESULTTYPE_QUALIFIER = qualifier("other", "other", "dnet:result_typologies", "dnet:result_typologies"); +public class MigrateMongoMdstoresApplication { public static void main(final String[] args) throws Exception { final ArgumentApplicationParser parser = new ArgumentApplicationParser( @@ -72,294 +26,17 @@ public class MigrateMongoMdstoresApplication extends AbstractMigrateApplication final String dbUser = parser.get("postgresUser"); final String dbPassword = parser.get("postgresPassword"); - try (final MigrateMongoMdstoresApplication mig = - new MigrateMongoMdstoresApplication(hdfsPath, hdfsNameNode, hdfsUser, mongoBaseUrl, mongoDb, dbUrl, dbUser, dbPassword)) { - mig.processMdRecords(mdFormat, mdLayout, mdInterpretation); - } - - } - - public MigrateMongoMdstoresApplication(final String hdfsPath, final String hdfsNameNode, final String hdfsUser, final String mongoBaseUrl, - final String mongoDb, final String dbUrl, final String dbUser, - final String dbPassword) throws Exception { - super(hdfsPath, hdfsNameNode, hdfsUser); - - this.mdstoreClient = new MdstoreClient(mongoBaseUrl, mongoDb); - loadClassNames(dbUrl, dbUser, dbPassword); - - final Map nsContext = new HashMap<>(); - nsContext.put("dc", "http://purl.org/dc/elements/1.1/"); - nsContext.put("dr", "http://www.driver-repository.eu/namespace/dr"); - nsContext.put("dri", "http://www.driver-repository.eu/namespace/dri"); - nsContext.put("oaf", "http://namespace.openaire.eu/oaf"); - nsContext.put("oai", "http://www.openarchives.org/OAI/2.0/"); - nsContext.put("prov", "http://www.openarchives.org/OAI/2.0/provenance"); - DocumentFactory.getInstance().setXPathNamespaceURIs(nsContext); - } - - private void loadClassNames(final String dbUrl, final String dbUser, final String dbPassword) throws IOException { - try (DbClient dbClient = new DbClient(dbUrl, dbUser, dbPassword)) { - code2name.clear(); - dbClient.processResults("select code, name from class", rs -> { - try { - code2name.put(rs.getString("code"), rs.getString("name")); - } catch (final SQLException e) { - e.printStackTrace(); - } - }); - } - - } - - public void processMdRecords(final String mdFormat, final String mdLayout, final String mdInterpretation) throws DocumentException { - - for (final Entry entry : mdstoreClient.validCollections(mdFormat, mdLayout, mdInterpretation).entrySet()) { - // final String mdId = entry.getKey(); - final String currentColl = entry.getValue(); - - for (final String xml : mdstoreClient.listRecords(currentColl)) { - for (final Oaf oaf : createOafs(xml)) { - emitOaf(oaf); - } + if (mdFormat.equalsIgnoreCase("oaf")) { + try (final OafMigrationExecutor mig = + new OafMigrationExecutor(hdfsPath, hdfsNameNode, hdfsUser, mongoBaseUrl, mongoDb, dbUrl, dbUser, dbPassword)) { + mig.processMdRecords(mdFormat, mdLayout, mdInterpretation); } - } - } + } else if (mdFormat.equalsIgnoreCase("oaf")) { - private List createOafs(final String xml) throws DocumentException { - - final Document doc = DocumentHelper.parseText(xml); - - final String type = doc.valueOf("//dr:CobjCategory/@type"); - final KeyValue collectedFrom = keyValue(doc.valueOf("//oaf:collectedFrom/@id"), doc.valueOf("//oaf:collectedFrom/@name")); - final DataInfo info = prepareDataInfo(doc); - final long lastUpdateTimestamp = new Date().getTime(); - - final List oafs = new ArrayList<>(); - - switch (type.toLowerCase()) { - case "": - case "publication": - final Publication p = new Publication(); - populateResultFields(p, doc, collectedFrom, info, lastUpdateTimestamp); - p.setResulttype(PUBLICATION_RESULTTYPE_QUALIFIER); - p.setJournal(null); // TODO - oafs.add(p); - break; - case "dataset": - final Dataset d = new Dataset(); - populateResultFields(d, doc, collectedFrom, info, lastUpdateTimestamp); - d.setResulttype(DATASET_RESULTTYPE_QUALIFIER); - d.setStoragedate(null); // TODO - d.setDevice(null); // TODO - d.setSize(null); // TODO - d.setVersion(null); // TODO - d.setLastmetadataupdate(null); // TODO - d.setMetadataversionnumber(null); // TODO - d.setGeolocation(null); // TODO - oafs.add(d); - break; - case "otherresearchproducts": - - case "software": - final Software s = new Software(); - populateResultFields(s, doc, collectedFrom, info, lastUpdateTimestamp); - s.setResulttype(SOFTWARE_RESULTTYPE_QUALIFIER); - s.setDocumentationUrl(null); // TODO - s.setLicense(null); // TODO - s.setCodeRepositoryUrl(null); // TODO - s.setProgrammingLanguage(null); // TODO - oafs.add(s); - break; - default: - final OtherResearchProduct o = new OtherResearchProduct(); - populateResultFields(o, doc, collectedFrom, info, lastUpdateTimestamp); - o.setResulttype(OTHER_RESULTTYPE_QUALIFIER); - o.setContactperson(null); // TODO - o.setContactgroup(null); // TODO - o.setTool(null); // TODO - oafs.add(o); - break; + } else { + throw new RuntimeException("Format not supported: " + mdFormat); } - if (!oafs.isEmpty()) { - addRelations(oafs, doc, "//*", "TYPE", collectedFrom, info, lastUpdateTimestamp); // TODO - addRelations(oafs, doc, "//*", "TYPE", collectedFrom, info, lastUpdateTimestamp); // TODO - addRelations(oafs, doc, "//*", "TYPE", collectedFrom, info, lastUpdateTimestamp); // TODO - } - - return oafs; - } - - private void addRelations(final List oafs, - final Document doc, - final String xpath, - final String type, - final KeyValue collectedFrom, - final DataInfo info, - final long lastUpdateTimestamp) { - for (final Object o : doc.selectNodes(xpath)) { - final Node n = (Node) o; - final Relation r = new Relation(); - r.setRelType(null); // TODO - r.setSubRelType(null); // TODO - r.setRelClass(null); // TODO - r.setSource(null); // TODO - r.setTarget(null); // TODO - r.setCollectedFrom(Arrays.asList(collectedFrom)); - r.setDataInfo(info); - r.setLastupdatetimestamp(lastUpdateTimestamp); - oafs.add(r); - } - - } - - private void populateResultFields(final Result r, final Document doc, final KeyValue collectedFrom, final DataInfo info, final long lastUpdateTimestamp) { - - r.setDataInfo(info); - r.setLastupdatetimestamp(lastUpdateTimestamp); - r.setId(createOpenaireId(50, doc.valueOf("//dri:objIdentifier"))); - r.setOriginalId(Arrays.asList(doc.valueOf("//dri:objIdentifier"))); - r.setCollectedfrom(Arrays.asList(collectedFrom)); - r.setPid(prepareListStructProps(doc, "//oaf:identifier", "@identifierType", "dnet:pid_types", "dnet:pid_types", info)); - r.setDateofcollection(doc.valueOf("//dr:dateOfCollection")); - r.setDateoftransformation(doc.valueOf("//dr:dateOfTransformation")); - r.setExtraInfo(new ArrayList<>()); // NOT PRESENT IN MDSTORES - r.setOaiprovenance(prepareOAIprovenance(doc)); - r.setAuthor(null); // TODO - r.setLanguage(prepareQualifier(doc, "//dc:language", "dnet:languages", "dnet:languages")); - r.setCountry(new ArrayList<>()); // NOT PRESENT IN MDSTORES - r.setSubject(prepareListStructProps(doc, "//dc:subject", info)); - r.setTitle(prepareListStructProps(doc, "//dc:title", MAIN_TITLE_QUALIFIER, info)); - r.setRelevantdate(null); // TODO - r.setDescription(prepareListFields(doc, "//dc:description", info)); - r.setDateofacceptance(prepareField(doc, "//oaf:dateAccepted", info)); - r.setPublisher(prepareField(doc, "//dc:publisher", info)); - r.setEmbargoenddate(null); // TODO - r.setSource(null); // TODO - r.setFulltext(null); // TODO - r.setFormat(prepareListFields(doc, "//dc:format", info)); - r.setContributor(prepareListFields(doc, "//dc:contributor", info)); - r.setResourcetype(null); // TODO - r.setCoverage(prepareListFields(doc, "//dc:coverage", info)); - r.setRefereed(null); // TODO - r.setContext(null); // TODO - r.setExternalReference(null); // TODO - r.setInstance(null); // TODO - r.setProcessingchargeamount(null); // TODO - r.setProcessingchargecurrency(null); // TODO - } - - private Qualifier prepareQualifier(final Document doc, final String xpath, final String schemeId, final String schemeName) { - final String classId = doc.valueOf(xpath); - final String className = code2name.get(classId); - return qualifier(classId, className, schemeId, schemeName); - } - - private List prepareListStructProps(final Document doc, - final String xpath, - final String xpathClassId, - final String schemeId, - final String schemeName, - final DataInfo info) { - final List res = new ArrayList<>(); - for (final Object o : doc.selectNodes(xpath)) { - final Node n = (Node) o; - final String classId = n.valueOf(xpathClassId); - final String className = code2name.get(classId); - res.add(structuredProperty(n.getText(), classId, className, schemeId, schemeName, info)); - } - return res; - } - - private List prepareListStructProps(final Document doc, final String xpath, final Qualifier qualifier, final DataInfo info) { - final List res = new ArrayList<>(); - for (final Object o : doc.selectNodes(xpath)) { - final Node n = (Node) o; - res.add(structuredProperty(n.getText(), qualifier, info)); - } - return res; - } - - private List prepareListStructProps(final Document doc, final String xpath, final DataInfo info) { - final List res = new ArrayList<>(); - for (final Object o : doc.selectNodes(xpath)) { - final Node n = (Node) o; - res.add(structuredProperty(n.getText(), n.valueOf("@classid"), n.valueOf("@classname"), n.valueOf("@schemeid"), n - .valueOf("@schemename"), info)); - } - return res; - } - - private OAIProvenance prepareOAIprovenance(final Document doc) { - final Node n = doc.selectSingleNode("//*[local-name()='provenance']/*[local-name()='originDescription']"); - - final String identifier = n.valueOf("./*[local-name()='identifier']"); - final String baseURL = n.valueOf("./*[local-name()='baseURL']");; - final String metadataNamespace = n.valueOf("./*[local-name()='metadataNamespace']");; - final boolean altered = n.valueOf("@altered").equalsIgnoreCase("true"); - final String datestamp = n.valueOf("./*[local-name()='datestamp']");; - final String harvestDate = n.valueOf("@harvestDate");; - - return oaiIProvenance(identifier, baseURL, metadataNamespace, altered, datestamp, harvestDate); - } - - private DataInfo prepareDataInfo(final Document doc) { - final Node n = doc.selectSingleNode("//oaf:datainfo"); - - final String paClassId = n.valueOf("./oaf:provenanceaction/@classid"); - final String paClassName = n.valueOf("./oaf:provenanceaction/@classname"); - final String paSchemeId = n.valueOf("./oaf:provenanceaction/@schemeid"); - final String paSchemeName = n.valueOf("./oaf:provenanceaction/@schemename"); - - final boolean deletedbyinference = Boolean.parseBoolean(n.valueOf("./oaf:deletedbyinference")); - final String inferenceprovenance = n.valueOf("./oaf:inferenceprovenance"); - final Boolean inferred = Boolean.parseBoolean(n.valueOf("./oaf:inferred")); - final String trust = n.valueOf("./oaf:trust"); - - return dataInfo(deletedbyinference, inferenceprovenance, inferred, false, qualifier(paClassId, paClassName, paSchemeId, paSchemeName), trust); - } - - private Field prepareField(final Document doc, final String xpath, final DataInfo info) { - return field(doc.valueOf(xpath), info); - } - - private List> prepareListFields(final Document doc, final String xpath, final DataInfo info) { - return listFields(info, (String[]) prepareListString(doc, xpath).toArray()); - } - - private List prepareListString(final Document doc, final String xpath) { - final List res = new ArrayList<>(); - for (final Object o : doc.selectNodes(xpath)) { - final String s = ((Node) o).getText().trim(); - if (StringUtils.isNotBlank(s)) { - res.add(s); - } - } - return res; - } - /* - * private StructuredProperty prepareStructProp(final Document doc, final String xpath, final DataInfo dataInfo) { if - * (StringUtils.isBlank(s)) { return null; } final String[] parts = s.split("###"); if (parts.length == 2) { final String value = - * parts[0]; final String[] arr = parts[1].split("@@@"); if (arr.length == 4) { return structuredProperty(value, arr[0], arr[1], arr[2], - * arr[3], dataInfo); } } return null; } - * - * private List prepareListOfStructProps(final Document doc, final String xpath, final DataInfo dataInfo) { final - * List res = new ArrayList<>(); if (array != null) { for (final String s : (String[]) array.getArray()) { final - * StructuredProperty sp = prepareStructProp(s, dataInfo); if (sp != null) { res.add(sp); } } } - * - * return res; } - * - * private Journal prepareJournal(final Document doc, final String xpath, final DataInfo info) { if (StringUtils.isNotBlank(sj)) { final - * String[] arr = sj.split("@@@"); if (arr.length == 3) { final String issn = StringUtils.isNotBlank(arr[0]) ? arr[0] : null; final - * String eissn = StringUtils.isNotBlank(arr[1]) ? arr[1] : null;; final String lissn = StringUtils.isNotBlank(arr[2]) ? arr[2] : null;; - * if (issn != null || eissn != null || lissn != null) { return journal(name, issn, eissn, eissn, null, null, null, null, null, null, - * null, info); } } } return null; } - */ - - @Override - public void close() throws IOException { - super.close(); - mdstoreClient.close(); } } diff --git a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/MigrationUtils.java b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/MigrationUtils.java deleted file mode 100644 index c58688a79..000000000 --- a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/MigrationUtils.java +++ /dev/null @@ -1,154 +0,0 @@ -package eu.dnetlib.dhp.migration; - -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; -import java.util.stream.Collectors; - -import org.apache.commons.lang3.StringUtils; - -import eu.dnetlib.dhp.schema.oaf.DataInfo; -import eu.dnetlib.dhp.schema.oaf.ExtraInfo; -import eu.dnetlib.dhp.schema.oaf.Field; -import eu.dnetlib.dhp.schema.oaf.Journal; -import eu.dnetlib.dhp.schema.oaf.KeyValue; -import eu.dnetlib.dhp.schema.oaf.OAIProvenance; -import eu.dnetlib.dhp.schema.oaf.OriginDescription; -import eu.dnetlib.dhp.schema.oaf.Qualifier; -import eu.dnetlib.dhp.schema.oaf.StructuredProperty; -import eu.dnetlib.dhp.utils.DHPUtils; - -public class MigrationUtils { - - public static KeyValue keyValue(final String k, final String v) { - final KeyValue kv = new KeyValue(); - kv.setKey(k); - kv.setValue(v); - return kv; - } - - public static List listKeyValues(final String... s) { - if (s.length % 2 > 0) { throw new RuntimeException("Invalid number of parameters (k,v,k,v,....)"); } - - final List list = new ArrayList<>(); - for (int i = 0; i < s.length; i += 2) { - list.add(keyValue(s[i], s[i + 1])); - } - return list; - } - - public static Field field(final T value, final DataInfo info) { - final Field field = new Field<>(); - field.setValue(value); - field.setDataInfo(info); - return field; - } - - public static List> listFields(final DataInfo info, final String... values) { - return Arrays.stream(values).map(v -> field(v, info)).collect(Collectors.toList()); - } - - public static Qualifier qualifier(final String classid, final String classname, final String schemeid, final String schemename) { - final Qualifier q = new Qualifier(); - q.setClassid(classid); - q.setClassname(classname); - q.setSchemeid(schemeid); - q.setSchemename(schemename); - return q; - } - - public static StructuredProperty structuredProperty(final String value, - final String classid, - final String classname, - final String schemeid, - final String schemename, - final DataInfo dataInfo) { - final StructuredProperty sp = new StructuredProperty(); - sp.setValue(value); - sp.setQualifier(qualifier(classid, classname, schemeid, schemename)); - sp.setDataInfo(dataInfo); - return sp; - } - - public static ExtraInfo extraInfo(final String name, final String value, final String typology, final String provenance, final String trust) { - final ExtraInfo info = new ExtraInfo(); - info.setName(name); - info.setValue(value); - info.setTypology(typology); - info.setProvenance(provenance); - info.setTrust(trust); - return info; - } - - public static OAIProvenance oaiIProvenance(final String identifier, - final String baseURL, - final String metadataNamespace, - final Boolean altered, - final String datestamp, - final String harvestDate) { - - final OriginDescription desc = new OriginDescription(); - desc.setIdentifier(identifier); - desc.setBaseURL(baseURL); - desc.setMetadataNamespace(metadataNamespace); - desc.setAltered(altered); - desc.setDatestamp(datestamp); - desc.setHarvestDate(harvestDate); - - final OAIProvenance p = new OAIProvenance(); - p.setOriginDescription(desc); - - return p; - } - - public static Journal journal(final String name, - final String issnPrinted, - final String issnOnline, - final String issnLinking, - final String ep, - final String iss, - final String sp, - final String vol, - final String edition, - final String conferenceplace, - final String conferencedate, - final DataInfo dataInfo) { - final Journal j = new Journal(); - j.setName(name); - j.setIssnPrinted(issnPrinted); - j.setIssnOnline(issnOnline); - j.setIssnLinking(issnLinking); - j.setEp(ep); - j.setIss(iss); - j.setSp(sp); - j.setVol(vol); - j.setEdition(edition); - j.setConferenceplace(conferenceplace); - j.setConferencedate(conferencedate); - j.setDataInfo(dataInfo); - return j; - } - - public static DataInfo dataInfo(final Boolean deletedbyinference, - final String inferenceprovenance, - final Boolean inferred, - final Boolean invisible, - final Qualifier provenanceaction, - final String trust) { - final DataInfo d = new DataInfo(); - d.setDeletedbyinference(deletedbyinference); - d.setInferenceprovenance(inferenceprovenance); - d.setInferred(inferred); - d.setInvisible(invisible); - d.setProvenanceaction(provenanceaction); - d.setTrust(trust); - return d; - } - - public static String createOpenaireId(final String prefix, final String originalId) { - final String nsPrefix = StringUtils.substringBefore(originalId, "::"); - final String rest = StringUtils.substringAfter(originalId, "::"); - return String.format("%s|%s::%s", prefix, nsPrefix, DHPUtils.md5(rest)); - } - -} diff --git a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/OafMigrationExecutor.java b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/OafMigrationExecutor.java new file mode 100644 index 000000000..4d222f360 --- /dev/null +++ b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/OafMigrationExecutor.java @@ -0,0 +1,246 @@ +package eu.dnetlib.dhp.migration; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.Map; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.dom4j.Document; +import org.dom4j.Node; + +import eu.dnetlib.dhp.schema.oaf.Author; +import eu.dnetlib.dhp.schema.oaf.DataInfo; +import eu.dnetlib.dhp.schema.oaf.Field; +import eu.dnetlib.dhp.schema.oaf.GeoLocation; +import eu.dnetlib.dhp.schema.oaf.Instance; +import eu.dnetlib.dhp.schema.oaf.KeyValue; +import eu.dnetlib.dhp.schema.oaf.Oaf; +import eu.dnetlib.dhp.schema.oaf.Qualifier; +import eu.dnetlib.dhp.schema.oaf.Relation; +import eu.dnetlib.dhp.schema.oaf.StructuredProperty; + +public class OafMigrationExecutor extends AbstractMongoExecutor { + + public OafMigrationExecutor(final String hdfsPath, final String hdfsNameNode, final String hdfsUser, final String mongoBaseUrl, final String mongoDb, + final String dbUrl, final String dbUser, + final String dbPassword) throws Exception { + super(hdfsPath, hdfsNameNode, hdfsUser, mongoBaseUrl, mongoDb, dbUrl, dbUser, dbPassword); + } + + private static final Log log = LogFactory.getLog(MigrateMongoMdstoresApplication.class); + + @Override + protected void registerNamespaces(final Map nsContext) { + nsContext.put("dc", "http://purl.org/dc/elements/1.1/"); + nsContext.put("dr", "http://www.driver-repository.eu/namespace/dr"); + nsContext.put("dri", "http://www.driver-repository.eu/namespace/dri"); + nsContext.put("oaf", "http://namespace.openaire.eu/oaf"); + nsContext.put("oai", "http://www.openarchives.org/OAI/2.0/"); + nsContext.put("prov", "http://www.openarchives.org/OAI/2.0/provenance"); + } + + @Override + protected void addRelations(final List oafs, + final Document doc, + final String type, + final KeyValue collectedFrom, + final DataInfo info, + final long lastUpdateTimestamp) { + for (final Object o : doc.selectNodes("//")) { // TODO + final Node n = (Node) o; + final Relation r = new Relation(); + r.setRelType(null); // TODO + r.setSubRelType(null); // TODO + r.setRelClass(null); // TODO + r.setSource(null); // TODO + r.setTarget(null); // TODO + r.setCollectedFrom(Arrays.asList(collectedFrom)); + r.setDataInfo(info); + r.setLastupdatetimestamp(lastUpdateTimestamp); + oafs.add(r); + } + + } + + @Override + protected List prepareAuthors(final Document doc, final DataInfo info) { + final List res = new ArrayList<>(); + int pos = 1; + for (final Object o : doc.selectNodes("//dc:creator")) { + final Node n = (Node) o; + final Author author = new Author(); + author.setFullname(n.getText()); + author.setRank(pos++); + } + return res; + } + + @Override + protected Qualifier prepareLanguages(final Document doc) { + return prepareQualifier(doc, "//dc:language", "dnet:languages", "dnet:languages"); + } + + @Override + protected List prepareSubjects(final Document doc, final DataInfo info) { + return prepareListStructProps(doc, "//dc:subject", info); + } + + @Override + protected List prepareTitles(final Document doc, final DataInfo info) { + return prepareListStructProps(doc, "//dc:title", MAIN_TITLE_QUALIFIER, info); + } + + @Override + protected List> prepareDescriptions(final Document doc, final DataInfo info) { + return prepareListFields(doc, "//dc:description", info); + } + + @Override + protected Field preparePublisher(final Document doc, final DataInfo info) { + return prepareField(doc, "//dc:publisher", info); + } + + @Override + protected List> prepareFormats(final Document doc, final DataInfo info) { + return prepareListFields(doc, "//dc:format", info); + } + + @Override + protected List> prepareContributors(final Document doc, final DataInfo info) { + return prepareListFields(doc, "//dc:contributor", info); + } + + @Override + protected List> prepareCoverages(final Document doc, final DataInfo info) { + return prepareListFields(doc, "//dc:coverage", info); + } + + @Override + protected List prepareInstances(final Document doc, final DataInfo info) { + // TODO Auto-generated method stub + return null; + } + + @Override + protected List> prepareSources(final Document doc, final DataInfo info) { + return prepareListFields(doc, "//dc:source", info); + } + + @Override + protected Field prepareEmbargoEndDate(final Document doc, final DataInfo info) { + // TODO Auto-generated method stub + return null; + } + + @Override + protected List prepareRelevantDates(final Document doc, final DataInfo info) { + // TODO Auto-generated method stub + return null; + } + + @Override + protected List> prepareOtherResearchProductTools(final Document doc, final DataInfo info) { + // TODO Auto-generated method stub + return null; + } + + @Override + protected List> prepareOtherResearchProductContactGroups(final Document doc, final DataInfo info) { + // TODO Auto-generated method stub + return null; + } + + @Override + protected List> prepareOtherResearchProductContactPersons(final Document doc, final DataInfo info) { + // TODO Auto-generated method stub + return null; + } + + @Override + protected Qualifier prepareSoftwareProgrammingLanguage(final Document doc, final DataInfo info) { + // TODO Auto-generated method stub + return null; + } + + @Override + protected Field prepareSoftwareCodeRepositoryUrl(final Document doc, final DataInfo info) { + // TODO Auto-generated method stub + return null; + } + + @Override + protected List prepareSoftwareLicenses(final Document doc, final DataInfo info) { + // TODO Auto-generated method stub + return null; + } + + @Override + protected List> prepareSoftwareDocumentationUrls(final Document doc, final DataInfo info) { + // TODO Auto-generated method stub + return null; + } + + @Override + protected List prepareDatasetGeoLocations(final Document doc, final DataInfo info) { + // TODO Auto-generated method stub + return null; + } + + @Override + protected Field prepareDatasetMetadataVersionNumber(final Document doc, final DataInfo info) { + // TODO Auto-generated method stub + return null; + } + + @Override + protected Field prepareDatasetLastMetadataUpdate(final Document doc, final DataInfo info) { + // TODO Auto-generated method stub + return null; + } + + @Override + protected Field prepareDatasetVersion(final Document doc, final DataInfo info) { + // TODO Auto-generated method stub + return null; + } + + @Override + protected Field prepareDatasetSize(final Document doc, final DataInfo info) { + // TODO Auto-generated method stub + return null; + } + + @Override + protected Field prepareDatasetDevice(final Document doc, final DataInfo info) { + // TODO Auto-generated method stub + return null; + } + + @Override + protected Field prepareDatasetStorageDate(final Document doc, final DataInfo info) { + // TODO Auto-generated method stub + return null; + } + + /* + * private StructuredProperty prepareStructProp(final Document doc, final String xpath, final DataInfo dataInfo) { if + * (StringUtils.isBlank(s)) { return null; } final String[] parts = s.split("###"); if (parts.length == 2) { final String value = + * parts[0]; final String[] arr = parts[1].split("@@@"); if (arr.length == 4) { return structuredProperty(value, arr[0], arr[1], arr[2], + * arr[3], dataInfo); } } return null; } + * + * private List prepareListOfStructProps(final Document doc, final String xpath, final DataInfo dataInfo) { final + * List res = new ArrayList<>(); if (array != null) { for (final String s : (String[]) array.getArray()) { final + * StructuredProperty sp = prepareStructProp(s, dataInfo); if (sp != null) { res.add(sp); } } } + * + * return res; } + * + * private Journal prepareJournal(final Document doc, final String xpath, final DataInfo info) { if (StringUtils.isNotBlank(sj)) { final + * String[] arr = sj.split("@@@"); if (arr.length == 3) { final String issn = StringUtils.isNotBlank(arr[0]) ? arr[0] : null; final + * String eissn = StringUtils.isNotBlank(arr[1]) ? arr[1] : null;; final String lissn = StringUtils.isNotBlank(arr[2]) ? arr[2] : null;; + * if (issn != null || eissn != null || lissn != null) { return journal(name, issn, eissn, eissn, null, null, null, null, null, null, + * null, info); } } } return null; } + */ + +} From bb1533a07e0dbaac9e71bbcf29f523e69e9dcdc2 Mon Sep 17 00:00:00 2001 From: Michele Artini Date: Wed, 5 Feb 2020 15:35:40 +0100 Subject: [PATCH 08/17] partial commit --- .../dhp/migration/AbstractMongoExecutor.java | 38 +++- .../dhp/migration/OafMigrationExecutor.java | 82 +++---- .../dhp/migration/OdfMigrationExecutor.java | 209 ++++++++++++++++++ 3 files changed, 279 insertions(+), 50 deletions(-) create mode 100644 dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/OdfMigrationExecutor.java diff --git a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/AbstractMongoExecutor.java b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/AbstractMongoExecutor.java index 51c39824a..cf1581b4d 100644 --- a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/AbstractMongoExecutor.java +++ b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/AbstractMongoExecutor.java @@ -94,10 +94,13 @@ public abstract class AbstractMongoExecutor extends AbstractMigrationExecutor { final String type = doc.valueOf("//dr:CobjCategory/@type"); final KeyValue collectedFrom = keyValue(doc.valueOf("//oaf:collectedFrom/@id"), doc.valueOf("//oaf:collectedFrom/@name")); + final KeyValue hostedBy = StringUtils.isBlank(doc.valueOf("//oaf:hostedBy/@id")) ? collectedFrom + : keyValue(doc.valueOf("//oaf:hostedBy/@id"), doc.valueOf("//oaf:hostedBy/@name")); + final DataInfo info = prepareDataInfo(doc); final long lastUpdateTimestamp = new Date().getTime(); - for (final Oaf oaf : createOafs(doc, type, collectedFrom, info, lastUpdateTimestamp)) { + for (final Oaf oaf : createOafs(doc, type, collectedFrom, hostedBy, info, lastUpdateTimestamp)) { emitOaf(oaf); } } @@ -106,7 +109,12 @@ public abstract class AbstractMongoExecutor extends AbstractMigrationExecutor { protected abstract void registerNamespaces(Map nsContext); - protected List createOafs(final Document doc, final String type, final KeyValue collectedFrom, final DataInfo info, final long lastUpdateTimestamp) { + protected List createOafs(final Document doc, + final String type, + final KeyValue collectedFrom, + final KeyValue hostedBy, + final DataInfo info, + final long lastUpdateTimestamp) { final List oafs = new ArrayList<>(); @@ -114,14 +122,14 @@ public abstract class AbstractMongoExecutor extends AbstractMigrationExecutor { case "": case "publication": final Publication p = new Publication(); - populateResultFields(p, doc, collectedFrom, info, lastUpdateTimestamp); + populateResultFields(p, doc, collectedFrom, hostedBy, info, lastUpdateTimestamp); p.setResulttype(PUBLICATION_RESULTTYPE_QUALIFIER); p.setJournal(prepareJournal(doc, info)); oafs.add(p); break; case "dataset": final Dataset d = new Dataset(); - populateResultFields(d, doc, collectedFrom, info, lastUpdateTimestamp); + populateResultFields(d, doc, collectedFrom, hostedBy, info, lastUpdateTimestamp); d.setResulttype(DATASET_RESULTTYPE_QUALIFIER); d.setStoragedate(prepareDatasetStorageDate(doc, info)); d.setDevice(prepareDatasetDevice(doc, info)); @@ -134,7 +142,7 @@ public abstract class AbstractMongoExecutor extends AbstractMigrationExecutor { break; case "software": final Software s = new Software(); - populateResultFields(s, doc, collectedFrom, info, lastUpdateTimestamp); + populateResultFields(s, doc, collectedFrom, hostedBy, info, lastUpdateTimestamp); s.setResulttype(SOFTWARE_RESULTTYPE_QUALIFIER); s.setDocumentationUrl(prepareSoftwareDocumentationUrls(doc, info)); s.setLicense(prepareSoftwareLicenses(doc, info)); @@ -145,7 +153,7 @@ public abstract class AbstractMongoExecutor extends AbstractMigrationExecutor { case "otherresearchproducts": default: final OtherResearchProduct o = new OtherResearchProduct(); - populateResultFields(o, doc, collectedFrom, info, lastUpdateTimestamp); + populateResultFields(o, doc, collectedFrom, hostedBy, info, lastUpdateTimestamp); o.setResulttype(OTHER_RESULTTYPE_QUALIFIER); o.setContactperson(prepareOtherResearchProductContactPersons(doc, info)); o.setContactgroup(prepareOtherResearchProductContactGroups(doc, info)); @@ -163,7 +171,12 @@ public abstract class AbstractMongoExecutor extends AbstractMigrationExecutor { return oafs; } - private void populateResultFields(final Result r, final Document doc, final KeyValue collectedFrom, final DataInfo info, final long lastUpdateTimestamp) { + private void populateResultFields(final Result r, + final Document doc, + final KeyValue collectedFrom, + final KeyValue hostedBy, + final DataInfo info, + final long lastUpdateTimestamp) { r.setDataInfo(info); r.setLastupdatetimestamp(lastUpdateTimestamp); r.setId(createOpenaireId(50, doc.valueOf("//dri:objIdentifier"))); @@ -193,12 +206,12 @@ public abstract class AbstractMongoExecutor extends AbstractMigrationExecutor { r.setRefereed(null); // TODO r.setContext(null); // TODO r.setExternalReference(null); // TODO - r.setInstance(prepareInstances(doc, info)); + r.setInstance(prepareInstances(doc, info, collectedFrom, hostedBy)); r.setProcessingchargeamount(null); // TODO r.setProcessingchargecurrency(null); // TODO } - protected abstract List prepareInstances(Document doc, DataInfo info); + protected abstract List prepareInstances(Document doc, DataInfo info, KeyValue collectedfrom, KeyValue hostedby); protected abstract List> prepareSources(Document doc, DataInfo info); @@ -266,7 +279,12 @@ public abstract class AbstractMongoExecutor extends AbstractMigrationExecutor { final String issnPrinted = n.valueOf("@issn"); final String issnOnline = n.valueOf("@eissn"); final String issnLinking = n.valueOf("@lissn"); - if (StringUtils.isNotBlank(name)) { return journal(name, issnPrinted, issnOnline, issnLinking, null, null, null, null, null, null, null, info); } + final String ep = n.valueOf("@ep"); + final String iss = n.valueOf("@iss"); + final String sp = n.valueOf("@sp"); + final String vol = n.valueOf("@vol"); + final String edition = n.valueOf("@edition"); + if (StringUtils.isNotBlank(name)) { return journal(name, issnPrinted, issnOnline, issnLinking, ep, iss, sp, vol, edition, null, null, info); } } return null; } diff --git a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/OafMigrationExecutor.java b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/OafMigrationExecutor.java index 4d222f360..f46b31732 100644 --- a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/OafMigrationExecutor.java +++ b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/OafMigrationExecutor.java @@ -23,14 +23,14 @@ import eu.dnetlib.dhp.schema.oaf.StructuredProperty; public class OafMigrationExecutor extends AbstractMongoExecutor { + private static final Log log = LogFactory.getLog(OafMigrationExecutor.class); + public OafMigrationExecutor(final String hdfsPath, final String hdfsNameNode, final String hdfsUser, final String mongoBaseUrl, final String mongoDb, final String dbUrl, final String dbUser, final String dbPassword) throws Exception { super(hdfsPath, hdfsNameNode, hdfsUser, mongoBaseUrl, mongoDb, dbUrl, dbUser, dbPassword); } - private static final Log log = LogFactory.getLog(MigrateMongoMdstoresApplication.class); - @Override protected void registerNamespaces(final Map nsContext) { nsContext.put("dc", "http://purl.org/dc/elements/1.1/"); @@ -73,6 +73,7 @@ public class OafMigrationExecutor extends AbstractMongoExecutor { final Author author = new Author(); author.setFullname(n.getText()); author.setRank(pos++); + res.add(author); } return res; } @@ -118,9 +119,24 @@ public class OafMigrationExecutor extends AbstractMongoExecutor { } @Override - protected List prepareInstances(final Document doc, final DataInfo info) { - // TODO Auto-generated method stub - return null; + protected List prepareInstances(final Document doc, final DataInfo info, final KeyValue collectedfrom, final KeyValue hostedby) { + final List res = new ArrayList<>(); + for (final Object o : doc.selectNodes("//dc:identifier")) { + final String url = ((Node) o).getText().trim(); + if (url.startsWith("http")) { + final Instance instance = new Instance(); + instance.setUrl(url); + instance.setInstancetype(prepareQualifier(doc, "//dr:CobjCategory", "dnet:publication_resource", "dnet:publication_resource")); + instance.setCollectedfrom(collectedfrom); + instance.setHostedby(hostedby); + instance.setDateofacceptance(field(doc.valueOf("//oaf:dateAccepted"), info)); + instance.setDistributionlocation(doc.valueOf("//oaf:distributionlocation")); + instance.setAccessright(prepareQualifier(doc, "//oaf:accessrights", "dnet:access_modes", "dnet:access_modes")); + instance.setLicense(field(doc.valueOf("//oaf:license"), info)); + res.add(instance); + } + } + return res; } @Override @@ -140,23 +156,7 @@ public class OafMigrationExecutor extends AbstractMongoExecutor { return null; } - @Override - protected List> prepareOtherResearchProductTools(final Document doc, final DataInfo info) { - // TODO Auto-generated method stub - return null; - } - - @Override - protected List> prepareOtherResearchProductContactGroups(final Document doc, final DataInfo info) { - // TODO Auto-generated method stub - return null; - } - - @Override - protected List> prepareOtherResearchProductContactPersons(final Document doc, final DataInfo info) { - // TODO Auto-generated method stub - return null; - } + // SOFTWARES @Override protected Qualifier prepareSoftwareProgrammingLanguage(final Document doc, final DataInfo info) { @@ -182,6 +182,7 @@ public class OafMigrationExecutor extends AbstractMongoExecutor { return null; } + // DATASETS @Override protected List prepareDatasetGeoLocations(final Document doc, final DataInfo info) { // TODO Auto-generated method stub @@ -224,23 +225,24 @@ public class OafMigrationExecutor extends AbstractMongoExecutor { return null; } - /* - * private StructuredProperty prepareStructProp(final Document doc, final String xpath, final DataInfo dataInfo) { if - * (StringUtils.isBlank(s)) { return null; } final String[] parts = s.split("###"); if (parts.length == 2) { final String value = - * parts[0]; final String[] arr = parts[1].split("@@@"); if (arr.length == 4) { return structuredProperty(value, arr[0], arr[1], arr[2], - * arr[3], dataInfo); } } return null; } - * - * private List prepareListOfStructProps(final Document doc, final String xpath, final DataInfo dataInfo) { final - * List res = new ArrayList<>(); if (array != null) { for (final String s : (String[]) array.getArray()) { final - * StructuredProperty sp = prepareStructProp(s, dataInfo); if (sp != null) { res.add(sp); } } } - * - * return res; } - * - * private Journal prepareJournal(final Document doc, final String xpath, final DataInfo info) { if (StringUtils.isNotBlank(sj)) { final - * String[] arr = sj.split("@@@"); if (arr.length == 3) { final String issn = StringUtils.isNotBlank(arr[0]) ? arr[0] : null; final - * String eissn = StringUtils.isNotBlank(arr[1]) ? arr[1] : null;; final String lissn = StringUtils.isNotBlank(arr[2]) ? arr[2] : null;; - * if (issn != null || eissn != null || lissn != null) { return journal(name, issn, eissn, eissn, null, null, null, null, null, null, - * null, info); } } } return null; } - */ + // OTHER PRODUCTS + + @Override + protected List> prepareOtherResearchProductTools(final Document doc, final DataInfo info) { + // TODO Auto-generated method stub + return null; + } + + @Override + protected List> prepareOtherResearchProductContactGroups(final Document doc, final DataInfo info) { + // TODO Auto-generated method stub + return null; + } + + @Override + protected List> prepareOtherResearchProductContactPersons(final Document doc, final DataInfo info) { + // TODO Auto-generated method stub + return null; + } } diff --git a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/OdfMigrationExecutor.java b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/OdfMigrationExecutor.java new file mode 100644 index 000000000..bb0932883 --- /dev/null +++ b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/OdfMigrationExecutor.java @@ -0,0 +1,209 @@ +package eu.dnetlib.dhp.migration; + +import java.util.List; +import java.util.Map; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.dom4j.Document; + +import eu.dnetlib.dhp.schema.oaf.Author; +import eu.dnetlib.dhp.schema.oaf.DataInfo; +import eu.dnetlib.dhp.schema.oaf.Field; +import eu.dnetlib.dhp.schema.oaf.GeoLocation; +import eu.dnetlib.dhp.schema.oaf.Instance; +import eu.dnetlib.dhp.schema.oaf.KeyValue; +import eu.dnetlib.dhp.schema.oaf.Oaf; +import eu.dnetlib.dhp.schema.oaf.Qualifier; +import eu.dnetlib.dhp.schema.oaf.StructuredProperty; + +public class OdfMigrationExecutor extends AbstractMongoExecutor { + + private static final Log log = LogFactory.getLog(OdfMigrationExecutor.class); + + public OdfMigrationExecutor(final String hdfsPath, final String hdfsNameNode, final String hdfsUser, final String mongoBaseUrl, final String mongoDb, + final String dbUrl, final String dbUser, + final String dbPassword) throws Exception { + super(hdfsPath, hdfsNameNode, hdfsUser, mongoBaseUrl, mongoDb, dbUrl, dbUser, dbPassword); + } + + @Override + protected void registerNamespaces(final Map nsContext) { + // TODO Auto-generated method stub + + } + + @Override + protected List prepareInstances(final Document doc, final DataInfo info, final KeyValue collectedfrom, final KeyValue hostedby) { + // TODO Auto-generated method stub + return null; + } + + @Override + protected List> prepareSources(final Document doc, final DataInfo info) { + // TODO Auto-generated method stub + return null; + } + + @Override + protected Field prepareEmbargoEndDate(final Document doc, final DataInfo info) { + // TODO Auto-generated method stub + return null; + } + + @Override + protected List prepareRelevantDates(final Document doc, final DataInfo info) { + // TODO Auto-generated method stub + return null; + } + + @Override + protected List> prepareCoverages(final Document doc, final DataInfo info) { + // TODO Auto-generated method stub + return null; + } + + @Override + protected List> prepareContributors(final Document doc, final DataInfo info) { + // TODO Auto-generated method stub + return null; + } + + @Override + protected List> prepareFormats(final Document doc, final DataInfo info) { + // TODO Auto-generated method stub + return null; + } + + @Override + protected Field preparePublisher(final Document doc, final DataInfo info) { + // TODO Auto-generated method stub + return null; + } + + @Override + protected List> prepareDescriptions(final Document doc, final DataInfo info) { + // TODO Auto-generated method stub + return null; + } + + @Override + protected List prepareTitles(final Document doc, final DataInfo info) { + // TODO Auto-generated method stub + return null; + } + + @Override + protected List prepareSubjects(final Document doc, final DataInfo info) { + // TODO Auto-generated method stub + return null; + } + + @Override + protected Qualifier prepareLanguages(final Document doc) { + // TODO Auto-generated method stub + return null; + } + + @Override + protected List prepareAuthors(final Document doc, final DataInfo info) { + // TODO Auto-generated method stub + return null; + } + + @Override + protected List> prepareOtherResearchProductTools(final Document doc, final DataInfo info) { + // TODO Auto-generated method stub + return null; + } + + @Override + protected List> prepareOtherResearchProductContactGroups(final Document doc, final DataInfo info) { + // TODO Auto-generated method stub + return null; + } + + @Override + protected List> prepareOtherResearchProductContactPersons(final Document doc, final DataInfo info) { + // TODO Auto-generated method stub + return null; + } + + @Override + protected Qualifier prepareSoftwareProgrammingLanguage(final Document doc, final DataInfo info) { + // TODO Auto-generated method stub + return null; + } + + @Override + protected Field prepareSoftwareCodeRepositoryUrl(final Document doc, final DataInfo info) { + // TODO Auto-generated method stub + return null; + } + + @Override + protected List prepareSoftwareLicenses(final Document doc, final DataInfo info) { + // TODO Auto-generated method stub + return null; + } + + @Override + protected List> prepareSoftwareDocumentationUrls(final Document doc, final DataInfo info) { + // TODO Auto-generated method stub + return null; + } + + @Override + protected List prepareDatasetGeoLocations(final Document doc, final DataInfo info) { + // TODO Auto-generated method stub + return null; + } + + @Override + protected Field prepareDatasetMetadataVersionNumber(final Document doc, final DataInfo info) { + // TODO Auto-generated method stub + return null; + } + + @Override + protected Field prepareDatasetLastMetadataUpdate(final Document doc, final DataInfo info) { + // TODO Auto-generated method stub + return null; + } + + @Override + protected Field prepareDatasetVersion(final Document doc, final DataInfo info) { + // TODO Auto-generated method stub + return null; + } + + @Override + protected Field prepareDatasetSize(final Document doc, final DataInfo info) { + // TODO Auto-generated method stub + return null; + } + + @Override + protected Field prepareDatasetDevice(final Document doc, final DataInfo info) { + // TODO Auto-generated method stub + return null; + } + + @Override + protected Field prepareDatasetStorageDate(final Document doc, final DataInfo info) { + // TODO Auto-generated method stub + return null; + } + + @Override + protected void addRelations(final List oafs, + final Document doc, + final String type, + final KeyValue collectedFrom, + final DataInfo info, + final long lastUpdateTimestamp) { + // TODO Auto-generated method stub + + } + +} From 181e8498d4bfa1f848048715c2231bc3b6613907 Mon Sep 17 00:00:00 2001 From: Michele Artini Date: Fri, 7 Feb 2020 16:02:49 +0100 Subject: [PATCH 09/17] ... --- .../dhp/migration/AbstractMongoExecutor.java | 47 ++-- .../MigrateMongoMdstoresApplication.java | 7 +- .../dhp/migration/OafMigrationExecutor.java | 12 +- .../dhp/migration/OdfMigrationExecutor.java | 220 ++++++++++-------- 4 files changed, 151 insertions(+), 135 deletions(-) diff --git a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/AbstractMongoExecutor.java b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/AbstractMongoExecutor.java index cf1581b4d..1fa70dded 100644 --- a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/AbstractMongoExecutor.java +++ b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/AbstractMongoExecutor.java @@ -60,12 +60,7 @@ public abstract class AbstractMongoExecutor extends AbstractMigrationExecutor { final Map nsContext = new HashMap<>(); registerNamespaces(nsContext); - nsContext.put("dc", "http://purl.org/dc/elements/1.1/"); - nsContext.put("dr", "http://www.driver-repository.eu/namespace/dr"); - nsContext.put("dri", "http://www.driver-repository.eu/namespace/dri"); - nsContext.put("oaf", "http://namespace.openaire.eu/oaf"); - nsContext.put("oai", "http://www.openarchives.org/OAI/2.0/"); - nsContext.put("prov", "http://www.openarchives.org/OAI/2.0/provenance"); + DocumentFactory.getInstance().setXPathNamespaceURIs(nsContext); } @@ -107,7 +102,13 @@ public abstract class AbstractMongoExecutor extends AbstractMigrationExecutor { } } - protected abstract void registerNamespaces(Map nsContext); + protected void registerNamespaces(final Map nsContext) { + nsContext.put("dr", "http://www.driver-repository.eu/namespace/dr"); + nsContext.put("dri", "http://www.driver-repository.eu/namespace/dri"); + nsContext.put("oaf", "http://namespace.openaire.eu/oaf"); + nsContext.put("oai", "http://www.openarchives.org/OAI/2.0/"); + nsContext.put("prov", "http://www.openarchives.org/OAI/2.0/provenance"); + } protected List createOafs(final Document doc, final String type, @@ -196,7 +197,7 @@ public abstract class AbstractMongoExecutor extends AbstractMigrationExecutor { r.setDescription(prepareDescriptions(doc, info)); r.setDateofacceptance(prepareField(doc, "//oaf:dateAccepted", info)); r.setPublisher(preparePublisher(doc, info)); - r.setEmbargoenddate(prepareEmbargoEndDate(doc, info)); + r.setEmbargoenddate(prepareField(doc, "//oaf:embargoenddate", info)); r.setSource(prepareSources(doc, info)); r.setFulltext(null); // NOT PRESENT IN MDSTORES r.setFormat(prepareFormats(doc, info)); @@ -215,8 +216,6 @@ public abstract class AbstractMongoExecutor extends AbstractMigrationExecutor { protected abstract List> prepareSources(Document doc, DataInfo info); - protected abstract Field prepareEmbargoEndDate(Document doc, DataInfo info); - protected abstract List prepareRelevantDates(Document doc, DataInfo info); protected abstract List> prepareCoverages(Document doc, DataInfo info); @@ -289,20 +288,20 @@ public abstract class AbstractMongoExecutor extends AbstractMigrationExecutor { return null; } - protected Qualifier prepareQualifier(final Document doc, final String xpath, final String schemeId, final String schemeName) { - final String classId = doc.valueOf(xpath); + protected Qualifier prepareQualifier(final Node node, final String xpath, final String schemeId, final String schemeName) { + final String classId = node.valueOf(xpath); final String className = code2name.get(classId); return qualifier(classId, className, schemeId, schemeName); } - protected List prepareListStructProps(final Document doc, + protected List prepareListStructProps(final Node node, final String xpath, final String xpathClassId, final String schemeId, final String schemeName, final DataInfo info) { final List res = new ArrayList<>(); - for (final Object o : doc.selectNodes(xpath)) { + for (final Object o : node.selectNodes(xpath)) { final Node n = (Node) o; final String classId = n.valueOf(xpathClassId); final String className = code2name.get(classId); @@ -311,18 +310,18 @@ public abstract class AbstractMongoExecutor extends AbstractMigrationExecutor { return res; } - protected List prepareListStructProps(final Document doc, final String xpath, final Qualifier qualifier, final DataInfo info) { + protected List prepareListStructProps(final Node node, final String xpath, final Qualifier qualifier, final DataInfo info) { final List res = new ArrayList<>(); - for (final Object o : doc.selectNodes(xpath)) { + for (final Object o : node.selectNodes(xpath)) { final Node n = (Node) o; res.add(structuredProperty(n.getText(), qualifier, info)); } return res; } - protected List prepareListStructProps(final Document doc, final String xpath, final DataInfo info) { + protected List prepareListStructProps(final Node node, final String xpath, final DataInfo info) { final List res = new ArrayList<>(); - for (final Object o : doc.selectNodes(xpath)) { + for (final Object o : node.selectNodes(xpath)) { final Node n = (Node) o; res.add(structuredProperty(n.getText(), n.valueOf("@classid"), n.valueOf("@classname"), n.valueOf("@schemeid"), n .valueOf("@schemename"), info)); @@ -359,17 +358,17 @@ public abstract class AbstractMongoExecutor extends AbstractMigrationExecutor { return dataInfo(deletedbyinference, inferenceprovenance, inferred, false, qualifier(paClassId, paClassName, paSchemeId, paSchemeName), trust); } - protected Field prepareField(final Document doc, final String xpath, final DataInfo info) { - return field(doc.valueOf(xpath), info); + protected Field prepareField(final Node node, final String xpath, final DataInfo info) { + return field(node.valueOf(xpath), info); } - protected List> prepareListFields(final Document doc, final String xpath, final DataInfo info) { - return listFields(info, (String[]) prepareListString(doc, xpath).toArray()); + protected List> prepareListFields(final Node node, final String xpath, final DataInfo info) { + return listFields(info, (String[]) prepareListString(node, xpath).toArray()); } - protected List prepareListString(final Document doc, final String xpath) { + protected List prepareListString(final Node node, final String xpath) { final List res = new ArrayList<>(); - for (final Object o : doc.selectNodes(xpath)) { + for (final Object o : node.selectNodes(xpath)) { final String s = ((Node) o).getText().trim(); if (StringUtils.isNotBlank(s)) { res.add(s); diff --git a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/MigrateMongoMdstoresApplication.java b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/MigrateMongoMdstoresApplication.java index 124a4f3cc..359fe7596 100644 --- a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/MigrateMongoMdstoresApplication.java +++ b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/MigrateMongoMdstoresApplication.java @@ -31,8 +31,11 @@ public class MigrateMongoMdstoresApplication { new OafMigrationExecutor(hdfsPath, hdfsNameNode, hdfsUser, mongoBaseUrl, mongoDb, dbUrl, dbUser, dbPassword)) { mig.processMdRecords(mdFormat, mdLayout, mdInterpretation); } - } else if (mdFormat.equalsIgnoreCase("oaf")) { - + } else if (mdFormat.equalsIgnoreCase("odf")) { + try (final OdfMigrationExecutor mig = + new OdfMigrationExecutor(hdfsPath, hdfsNameNode, hdfsUser, mongoBaseUrl, mongoDb, dbUrl, dbUser, dbPassword)) { + mig.processMdRecords(mdFormat, mdLayout, mdInterpretation); + } } else { throw new RuntimeException("Format not supported: " + mdFormat); } diff --git a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/OafMigrationExecutor.java b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/OafMigrationExecutor.java index f46b31732..6dcfae71f 100644 --- a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/OafMigrationExecutor.java +++ b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/OafMigrationExecutor.java @@ -33,12 +33,8 @@ public class OafMigrationExecutor extends AbstractMongoExecutor { @Override protected void registerNamespaces(final Map nsContext) { + super.registerNamespaces(nsContext); nsContext.put("dc", "http://purl.org/dc/elements/1.1/"); - nsContext.put("dr", "http://www.driver-repository.eu/namespace/dr"); - nsContext.put("dri", "http://www.driver-repository.eu/namespace/dri"); - nsContext.put("oaf", "http://namespace.openaire.eu/oaf"); - nsContext.put("oai", "http://www.openarchives.org/OAI/2.0/"); - nsContext.put("prov", "http://www.openarchives.org/OAI/2.0/provenance"); } @Override @@ -144,12 +140,6 @@ public class OafMigrationExecutor extends AbstractMongoExecutor { return prepareListFields(doc, "//dc:source", info); } - @Override - protected Field prepareEmbargoEndDate(final Document doc, final DataInfo info) { - // TODO Auto-generated method stub - return null; - } - @Override protected List prepareRelevantDates(final Document doc, final DataInfo info) { // TODO Auto-generated method stub diff --git a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/OdfMigrationExecutor.java b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/OdfMigrationExecutor.java index bb0932883..5e9c70ae5 100644 --- a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/OdfMigrationExecutor.java +++ b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/OdfMigrationExecutor.java @@ -1,11 +1,14 @@ package eu.dnetlib.dhp.migration; +import java.util.ArrayList; import java.util.List; import java.util.Map; +import org.apache.commons.lang3.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.dom4j.Document; +import org.dom4j.Node; import eu.dnetlib.dhp.schema.oaf.Author; import eu.dnetlib.dhp.schema.oaf.DataInfo; @@ -29,134 +32,160 @@ public class OdfMigrationExecutor extends AbstractMongoExecutor { @Override protected void registerNamespaces(final Map nsContext) { - // TODO Auto-generated method stub - - } - - @Override - protected List prepareInstances(final Document doc, final DataInfo info, final KeyValue collectedfrom, final KeyValue hostedby) { - // TODO Auto-generated method stub - return null; - } - - @Override - protected List> prepareSources(final Document doc, final DataInfo info) { - // TODO Auto-generated method stub - return null; - } - - @Override - protected Field prepareEmbargoEndDate(final Document doc, final DataInfo info) { - // TODO Auto-generated method stub - return null; - } - - @Override - protected List prepareRelevantDates(final Document doc, final DataInfo info) { - // TODO Auto-generated method stub - return null; - } - - @Override - protected List> prepareCoverages(final Document doc, final DataInfo info) { - // TODO Auto-generated method stub - return null; - } - - @Override - protected List> prepareContributors(final Document doc, final DataInfo info) { - // TODO Auto-generated method stub - return null; - } - - @Override - protected List> prepareFormats(final Document doc, final DataInfo info) { - // TODO Auto-generated method stub - return null; - } - - @Override - protected Field preparePublisher(final Document doc, final DataInfo info) { - // TODO Auto-generated method stub - return null; - } - - @Override - protected List> prepareDescriptions(final Document doc, final DataInfo info) { - // TODO Auto-generated method stub - return null; + nsContext.put("dc", "http://datacite.org/schema/kernel-3"); } @Override protected List prepareTitles(final Document doc, final DataInfo info) { - // TODO Auto-generated method stub - return null; - } - - @Override - protected List prepareSubjects(final Document doc, final DataInfo info) { - // TODO Auto-generated method stub - return null; - } - - @Override - protected Qualifier prepareLanguages(final Document doc) { - // TODO Auto-generated method stub - return null; + return prepareListStructProps(doc, "//dc:title", MAIN_TITLE_QUALIFIER, info); } @Override protected List prepareAuthors(final Document doc, final DataInfo info) { - // TODO Auto-generated method stub - return null; + final List res = new ArrayList<>(); + int pos = 1; + for (final Object o : doc.selectNodes("//dc:creator")) { + final Node n = (Node) o; + final Author author = new Author(); + author.setFullname(n.valueOf("./dc:creatorName")); + author.setName(n.valueOf("./dc:givenName")); + author.setSurname(n.valueOf("./dc:familyName")); + author.setAffiliation(prepareListFields(doc, "./dc:affiliation", info)); + author.setPid(preparePids(doc, info)); + author.setRank(pos++); + res.add(author); + } + return res; + } + + private List preparePids(final Document doc, final DataInfo info) { + final List res = new ArrayList<>(); + for (final Object o : doc.selectNodes("./dc:nameIdentifier")) { + res.add(structuredProperty(((Node) o).getText(), prepareQualifier((Node) o, "./@nameIdentifierScheme", "dnet:pid_types", "dnet:pid_types"), info)); + } + return res; + } + + @Override + protected List prepareInstances(final Document doc, final DataInfo info, final KeyValue collectedfrom, final KeyValue hostedby) { + final List res = new ArrayList<>(); + for (final Object o : doc.selectNodes("//dc:alternateIdentifier[@alternateIdentifierType='URL']")) { + final Instance instance = new Instance(); + instance.setUrl(((Node) o).getText().trim()); + instance.setInstancetype(prepareQualifier(doc, "//dr:CobjCategory", "dnet:publication_resource", "dnet:publication_resource")); + instance.setCollectedfrom(collectedfrom); + instance.setHostedby(hostedby); + instance.setDateofacceptance(field(doc.valueOf("//oaf:dateAccepted"), info)); + instance.setDistributionlocation(doc.valueOf("//oaf:distributionlocation")); + instance.setAccessright(prepareQualifier(doc, "//oaf:accessrights", "dnet:access_modes", "dnet:access_modes")); + instance.setLicense(field(doc.valueOf("//oaf:license"), info)); + res.add(instance); + } + return res; + } + + @Override + protected List> prepareSources(final Document doc, final DataInfo info) { + return new ArrayList<>(); // Not present in ODF ??? + } + + @Override + protected List prepareRelevantDates(final Document doc, final DataInfo info) { + final List res = new ArrayList<>(); + for (final Object o : doc.selectNodes("//dc:date")) { + final String dateType = ((Node) o).valueOf("@dateType"); + if (StringUtils.isBlank(dateType) && !dateType.equalsIgnoreCase("Accepted") && !dateType.equalsIgnoreCase("Issued") + && !dateType.equalsIgnoreCase("Updated") && !dateType.equalsIgnoreCase("Available")) { + res.add(structuredProperty(((Node) o).getText(), "UNKNOWN", "UNKNOWN", "dnet:dataCite_date", "dnet:dataCite_date", info)); + } + } + return res; + } + + @Override + protected List> prepareCoverages(final Document doc, final DataInfo info) { + return new ArrayList<>(); // Not present in ODF ??? + } + + @Override + protected List> prepareContributors(final Document doc, final DataInfo info) { + return prepareListFields(doc, "//dc:contributorName", info); + } + + @Override + protected List> prepareFormats(final Document doc, final DataInfo info) { + return prepareListFields(doc, "//dc:format", info); + } + + @Override + protected Field preparePublisher(final Document doc, final DataInfo info) { + return prepareField(doc, "//dc:publisher", info); + } + + @Override + protected List> prepareDescriptions(final Document doc, final DataInfo info) { + return prepareListFields(doc, "//dc:description[@descriptionType='Abstract']", info); + } + + @Override + protected List prepareSubjects(final Document doc, final DataInfo info) { + return prepareListStructProps(doc, "//dc:subject", info); + } + + @Override + protected Qualifier prepareLanguages(final Document doc) { + return prepareQualifier(doc, "//dc:language", "dnet:languages", "dnet:languages"); } @Override protected List> prepareOtherResearchProductTools(final Document doc, final DataInfo info) { - // TODO Auto-generated method stub - return null; + return new ArrayList<>(); // Not present in ODF ??? } @Override protected List> prepareOtherResearchProductContactGroups(final Document doc, final DataInfo info) { - // TODO Auto-generated method stub - return null; + return prepareListFields(doc, "//dc:contributor[@contributorType='ContactGroup']/dc:contributorName", info); } @Override protected List> prepareOtherResearchProductContactPersons(final Document doc, final DataInfo info) { - // TODO Auto-generated method stub - return null; + return prepareListFields(doc, "//dc:contributor[@contributorType='ContactPerson']/dc:contributorName", info); } @Override protected Qualifier prepareSoftwareProgrammingLanguage(final Document doc, final DataInfo info) { - // TODO Auto-generated method stub - return null; + return prepareQualifier(doc, "//dc:format", "dnet:programming_languages", "dnet:programming_languages"); } @Override protected Field prepareSoftwareCodeRepositoryUrl(final Document doc, final DataInfo info) { - // TODO Auto-generated method stub - return null; + return null; // Not present in ODF ??? } @Override protected List prepareSoftwareLicenses(final Document doc, final DataInfo info) { - // TODO Auto-generated method stub - return null; + return new ArrayList<>(); // Not present in ODF ??? } @Override protected List> prepareSoftwareDocumentationUrls(final Document doc, final DataInfo info) { - // TODO Auto-generated method stub - return null; + return prepareListFields(doc, "//dc:relatedIdentifier[@relatedIdentifierType='URL' and @relationType='IsDocumentedBy']", info); } + // DATASETS + @Override protected List prepareDatasetGeoLocations(final Document doc, final DataInfo info) { - // TODO Auto-generated method stub - return null; + final List res = new ArrayList<>(); + + for (final Object o : doc.selectNodes("//dc:geoLocation")) { + final GeoLocation loc = new GeoLocation(); + loc.setBox(((Node) o).valueOf("./dc:geoLocationBox")); + loc.setPlace(((Node) o).valueOf("./dc:geoLocationPlace")); + loc.setPoint(((Node) o).valueOf("./dc:geoLocationPoint")); + res.add(loc); + } + return res; } @Override @@ -167,32 +196,27 @@ public class OdfMigrationExecutor extends AbstractMongoExecutor { @Override protected Field prepareDatasetLastMetadataUpdate(final Document doc, final DataInfo info) { - // TODO Auto-generated method stub - return null; + return prepareField(doc, "//dc:date[@dateType='Updated']", info); } @Override protected Field prepareDatasetVersion(final Document doc, final DataInfo info) { - // TODO Auto-generated method stub - return null; + return prepareField(doc, "//dc:version", info); } @Override protected Field prepareDatasetSize(final Document doc, final DataInfo info) { - // TODO Auto-generated method stub - return null; + return prepareField(doc, "//dc:size", info); } @Override protected Field prepareDatasetDevice(final Document doc, final DataInfo info) { - // TODO Auto-generated method stub - return null; + return null; // Not present in ODF ??? } @Override protected Field prepareDatasetStorageDate(final Document doc, final DataInfo info) { - // TODO Auto-generated method stub - return null; + return prepareField(doc, "//dc:date[@dateType='Issued']", info); } @Override From 95740767e07cb6360797f48c73a0dc124ec2d903 Mon Sep 17 00:00:00 2001 From: Michele Artini Date: Mon, 10 Feb 2020 16:04:06 +0100 Subject: [PATCH 10/17] Ready for tests --- .../dhp/migration/AbstractMongoExecutor.java | 72 +++++-- .../dhp/migration/OafMigrationExecutor.java | 116 ++++++------ .../dhp/migration/OdfMigrationExecutor.java | 54 +++++- .../dhp/migration/pace/PacePerson.java | 176 ++++++++++++++++++ .../dhp/migration/pace/name_particles.txt | 7 + 5 files changed, 346 insertions(+), 79 deletions(-) create mode 100644 dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/pace/PacePerson.java create mode 100644 dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/migration/pace/name_particles.txt diff --git a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/AbstractMongoExecutor.java b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/AbstractMongoExecutor.java index 1fa70dded..b2792e292 100644 --- a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/AbstractMongoExecutor.java +++ b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/AbstractMongoExecutor.java @@ -30,6 +30,7 @@ import eu.dnetlib.dhp.schema.oaf.Oaf; import eu.dnetlib.dhp.schema.oaf.OtherResearchProduct; import eu.dnetlib.dhp.schema.oaf.Publication; import eu.dnetlib.dhp.schema.oaf.Qualifier; +import eu.dnetlib.dhp.schema.oaf.Relation; import eu.dnetlib.dhp.schema.oaf.Result; import eu.dnetlib.dhp.schema.oaf.Software; import eu.dnetlib.dhp.schema.oaf.StructuredProperty; @@ -164,14 +165,56 @@ public abstract class AbstractMongoExecutor extends AbstractMigrationExecutor { } if (!oafs.isEmpty()) { - addRelations(oafs, doc, "TYPE", collectedFrom, info, lastUpdateTimestamp); // TODO - addRelations(oafs, doc, "TYPE", collectedFrom, info, lastUpdateTimestamp); // TODO - addRelations(oafs, doc, "TYPE", collectedFrom, info, lastUpdateTimestamp); // TODO + oafs.addAll(addProjectRels(doc, collectedFrom, info, lastUpdateTimestamp)); + oafs.addAll(addOtherResultRels(doc, collectedFrom, info, lastUpdateTimestamp)); } return oafs; } + private List addProjectRels(final Document doc, + final KeyValue collectedFrom, + final DataInfo info, + final long lastUpdateTimestamp) { + + final List res = new ArrayList<>(); + + final String docId = createOpenaireId(50, doc.valueOf("//dri:objIdentifier")); + + for (final Object o : doc.selectNodes("//oaf:projectid")) { + final String projectId = createOpenaireId(40, ((Node) o).getText()); + + final Relation r1 = new Relation(); + r1.setRelType("resultProject"); + r1.setSubRelType("outcome"); + r1.setRelClass("isProducedBy"); + r1.setSource(docId); + r1.setTarget(projectId); + r1.setCollectedFrom(Arrays.asList(collectedFrom)); + r1.setDataInfo(info); + r1.setLastupdatetimestamp(lastUpdateTimestamp); + res.add(r1); + + final Relation r2 = new Relation(); + r2.setRelType("resultProject"); + r2.setSubRelType("outcome"); + r2.setRelClass("produces"); + r2.setSource(projectId); + r2.setTarget(docId); + r2.setCollectedFrom(Arrays.asList(collectedFrom)); + r2.setDataInfo(info); + r2.setLastupdatetimestamp(lastUpdateTimestamp); + res.add(r2); + } + + return res; + } + + protected abstract List addOtherResultRels(final Document doc, + final KeyValue collectedFrom, + final DataInfo info, + final long lastUpdateTimestamp); + private void populateResultFields(final Result r, final Document doc, final KeyValue collectedFrom, @@ -199,19 +242,21 @@ public abstract class AbstractMongoExecutor extends AbstractMigrationExecutor { r.setPublisher(preparePublisher(doc, info)); r.setEmbargoenddate(prepareField(doc, "//oaf:embargoenddate", info)); r.setSource(prepareSources(doc, info)); - r.setFulltext(null); // NOT PRESENT IN MDSTORES + r.setFulltext(new ArrayList<>()); // NOT PRESENT IN MDSTORES r.setFormat(prepareFormats(doc, info)); r.setContributor(prepareContributors(doc, info)); - r.setResourcetype(null); // TODO + r.setResourcetype(prepareResourceType(doc, info)); r.setCoverage(prepareCoverages(doc, info)); - r.setRefereed(null); // TODO - r.setContext(null); // TODO - r.setExternalReference(null); // TODO + r.setRefereed(null); // NOT PRESENT IN MDSTORES + r.setContext(new ArrayList<>()); // NOT PRESENT IN MDSTORES + r.setExternalReference(new ArrayList<>()); // NOT PRESENT IN MDSTORES r.setInstance(prepareInstances(doc, info, collectedFrom, hostedBy)); - r.setProcessingchargeamount(null); // TODO - r.setProcessingchargecurrency(null); // TODO + r.setProcessingchargeamount(null); // NOT PRESENT IN MDSTORES + r.setProcessingchargecurrency(null); // NOT PRESENT IN MDSTORES } + protected abstract Qualifier prepareResourceType(Document doc, DataInfo info); + protected abstract List prepareInstances(Document doc, DataInfo info, KeyValue collectedfrom, KeyValue hostedby); protected abstract List> prepareSources(Document doc, DataInfo info); @@ -264,13 +309,6 @@ public abstract class AbstractMongoExecutor extends AbstractMigrationExecutor { protected abstract Field prepareDatasetStorageDate(Document doc, DataInfo info); - abstract protected void addRelations(final List oafs, - final Document doc, - final String type, - final KeyValue collectedFrom, - final DataInfo info, - final long lastUpdateTimestamp); - private Journal prepareJournal(final Document doc, final DataInfo info) { final Node n = doc.selectSingleNode("//oaf:journal"); if (n != null) { diff --git a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/OafMigrationExecutor.java b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/OafMigrationExecutor.java index 6dcfae71f..75360943c 100644 --- a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/OafMigrationExecutor.java +++ b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/OafMigrationExecutor.java @@ -10,6 +10,7 @@ import org.apache.commons.logging.LogFactory; import org.dom4j.Document; import org.dom4j.Node; +import eu.dnetlib.dhp.migration.pace.PacePerson; import eu.dnetlib.dhp.schema.oaf.Author; import eu.dnetlib.dhp.schema.oaf.DataInfo; import eu.dnetlib.dhp.schema.oaf.Field; @@ -37,29 +38,6 @@ public class OafMigrationExecutor extends AbstractMongoExecutor { nsContext.put("dc", "http://purl.org/dc/elements/1.1/"); } - @Override - protected void addRelations(final List oafs, - final Document doc, - final String type, - final KeyValue collectedFrom, - final DataInfo info, - final long lastUpdateTimestamp) { - for (final Object o : doc.selectNodes("//")) { // TODO - final Node n = (Node) o; - final Relation r = new Relation(); - r.setRelType(null); // TODO - r.setSubRelType(null); // TODO - r.setRelClass(null); // TODO - r.setSource(null); // TODO - r.setTarget(null); // TODO - r.setCollectedFrom(Arrays.asList(collectedFrom)); - r.setDataInfo(info); - r.setLastupdatetimestamp(lastUpdateTimestamp); - oafs.add(r); - } - - } - @Override protected List prepareAuthors(final Document doc, final DataInfo info) { final List res = new ArrayList<>(); @@ -69,6 +47,11 @@ public class OafMigrationExecutor extends AbstractMongoExecutor { final Author author = new Author(); author.setFullname(n.getText()); author.setRank(pos++); + final PacePerson p = new PacePerson(n.getText(), false); + if (p.isAccurate()) { + author.setName(p.getNormalisedFirstName()); + author.setSurname(p.getNormalisedSurname()); + } res.add(author); } return res; @@ -142,97 +125,124 @@ public class OafMigrationExecutor extends AbstractMongoExecutor { @Override protected List prepareRelevantDates(final Document doc, final DataInfo info) { - // TODO Auto-generated method stub - return null; + return new ArrayList<>(); // NOT PRESENT IN OAF } // SOFTWARES @Override protected Qualifier prepareSoftwareProgrammingLanguage(final Document doc, final DataInfo info) { - // TODO Auto-generated method stub - return null; + return null; // NOT PRESENT IN OAF } @Override protected Field prepareSoftwareCodeRepositoryUrl(final Document doc, final DataInfo info) { - // TODO Auto-generated method stub - return null; + return null; // NOT PRESENT IN OAF } @Override protected List prepareSoftwareLicenses(final Document doc, final DataInfo info) { - // TODO Auto-generated method stub - return null; + return new ArrayList<>(); // NOT PRESENT IN OAF } @Override protected List> prepareSoftwareDocumentationUrls(final Document doc, final DataInfo info) { - // TODO Auto-generated method stub - return null; + return new ArrayList<>(); // NOT PRESENT IN OAF } // DATASETS @Override protected List prepareDatasetGeoLocations(final Document doc, final DataInfo info) { - // TODO Auto-generated method stub - return null; + return new ArrayList<>(); // NOT PRESENT IN OAF } @Override protected Field prepareDatasetMetadataVersionNumber(final Document doc, final DataInfo info) { - // TODO Auto-generated method stub - return null; + return null; // NOT PRESENT IN OAF } @Override protected Field prepareDatasetLastMetadataUpdate(final Document doc, final DataInfo info) { - // TODO Auto-generated method stub - return null; + return null; // NOT PRESENT IN OAF } @Override protected Field prepareDatasetVersion(final Document doc, final DataInfo info) { - // TODO Auto-generated method stub - return null; + return null; // NOT PRESENT IN OAF } @Override protected Field prepareDatasetSize(final Document doc, final DataInfo info) { - // TODO Auto-generated method stub - return null; + return null; // NOT PRESENT IN OAF } @Override protected Field prepareDatasetDevice(final Document doc, final DataInfo info) { - // TODO Auto-generated method stub - return null; + return null; // NOT PRESENT IN OAF } @Override protected Field prepareDatasetStorageDate(final Document doc, final DataInfo info) { - // TODO Auto-generated method stub - return null; + return null; // NOT PRESENT IN OAF } // OTHER PRODUCTS @Override protected List> prepareOtherResearchProductTools(final Document doc, final DataInfo info) { - // TODO Auto-generated method stub - return null; + return new ArrayList<>(); // NOT PRESENT IN OAF } @Override protected List> prepareOtherResearchProductContactGroups(final Document doc, final DataInfo info) { - // TODO Auto-generated method stub - return null; + return new ArrayList<>(); // NOT PRESENT IN OAF } @Override protected List> prepareOtherResearchProductContactPersons(final Document doc, final DataInfo info) { - // TODO Auto-generated method stub - return null; + return new ArrayList<>(); // NOT PRESENT IN OAF + } + + @Override + protected List addOtherResultRels(final Document doc, + final KeyValue collectedFrom, + final DataInfo info, + final long lastUpdateTimestamp) { + final String docId = createOpenaireId(50, doc.valueOf("//dri:objIdentifier")); + + final List res = new ArrayList<>(); + + for (final Object o : doc.selectNodes("//*[local-name()='relatedDataset']")) { + final String otherId = createOpenaireId(50, ((Node) o).getText()); + + final Relation r1 = new Relation(); + r1.setRelType("resultResult"); + r1.setSubRelType("publicationDataset"); + r1.setRelClass("isRelatedTo"); + r1.setSource(docId); + r1.setTarget(otherId); + r1.setCollectedFrom(Arrays.asList(collectedFrom)); + r1.setDataInfo(info); + r1.setLastupdatetimestamp(lastUpdateTimestamp); + res.add(r1); + + final Relation r2 = new Relation(); + r2.setRelType("resultResult"); + r2.setSubRelType("publicationDataset"); + r2.setRelClass("isRelatedTo"); + r2.setSource(otherId); + r2.setTarget(docId); + r2.setCollectedFrom(Arrays.asList(collectedFrom)); + r2.setDataInfo(info); + r2.setLastupdatetimestamp(lastUpdateTimestamp); + res.add(r2); + } + return res; + } + + @Override + protected Qualifier prepareResourceType(final Document doc, final DataInfo info) { + return null; // NOT PRESENT IN OAF } } diff --git a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/OdfMigrationExecutor.java b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/OdfMigrationExecutor.java index 5e9c70ae5..b1dbfcdf4 100644 --- a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/OdfMigrationExecutor.java +++ b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/OdfMigrationExecutor.java @@ -1,6 +1,7 @@ package eu.dnetlib.dhp.migration; import java.util.ArrayList; +import java.util.Arrays; import java.util.List; import java.util.Map; @@ -18,6 +19,7 @@ import eu.dnetlib.dhp.schema.oaf.Instance; import eu.dnetlib.dhp.schema.oaf.KeyValue; import eu.dnetlib.dhp.schema.oaf.Oaf; import eu.dnetlib.dhp.schema.oaf.Qualifier; +import eu.dnetlib.dhp.schema.oaf.Relation; import eu.dnetlib.dhp.schema.oaf.StructuredProperty; public class OdfMigrationExecutor extends AbstractMongoExecutor { @@ -190,8 +192,7 @@ public class OdfMigrationExecutor extends AbstractMongoExecutor { @Override protected Field prepareDatasetMetadataVersionNumber(final Document doc, final DataInfo info) { - // TODO Auto-generated method stub - return null; + return null; // Not present in ODF ??? } @Override @@ -220,14 +221,49 @@ public class OdfMigrationExecutor extends AbstractMongoExecutor { } @Override - protected void addRelations(final List oafs, - final Document doc, - final String type, - final KeyValue collectedFrom, - final DataInfo info, - final long lastUpdateTimestamp) { - // TODO Auto-generated method stub + protected List addOtherResultRels(final Document doc, final KeyValue collectedFrom, final DataInfo info, final long lastUpdateTimestamp) { + final String docId = createOpenaireId(50, doc.valueOf("//dri:objIdentifier")); + + final List res = new ArrayList<>(); + + for (final Object o : doc.selectNodes("//*[local-name() = 'resource']//*[local-name()='relatedIdentifier' and ./@relatedIdentifierType='OPENAIRE']")) { + final String otherId = createOpenaireId(50, ((Node) o).getText()); + final String type = ((Node) o).valueOf("@relationType"); + + if (type.equals("IsSupplementTo")) { + res.add(prepareOtherResultRel(collectedFrom, info, lastUpdateTimestamp, docId, otherId, "supplement", "isSupplementTo")); + res.add(prepareOtherResultRel(collectedFrom, info, lastUpdateTimestamp, otherId, docId, "supplement", "isSupplementedBy")); + } else if (type.equals("IsPartOf")) { + res.add(prepareOtherResultRel(collectedFrom, info, lastUpdateTimestamp, docId, otherId, "part", "IsPartOf")); + res.add(prepareOtherResultRel(collectedFrom, info, lastUpdateTimestamp, otherId, docId, "part", "HasParts")); + } else {} + } + return res; + } + + private Relation prepareOtherResultRel(final KeyValue collectedFrom, + final DataInfo info, + final long lastUpdateTimestamp, + final String source, + final String target, + final String subRelType, + final String relClass) { + final Relation r = new Relation(); + r.setRelType("resultResult"); + r.setSubRelType(subRelType); + r.setRelClass(relClass); + r.setSource(source); + r.setTarget(target); + r.setCollectedFrom(Arrays.asList(collectedFrom)); + r.setDataInfo(info); + r.setLastupdatetimestamp(lastUpdateTimestamp); + return r; + } + + @Override + protected Qualifier prepareResourceType(final Document doc, final DataInfo info) { + return prepareQualifier(doc, "//*[local-name() = 'resource']//*[local-name() = 'resourceType']", "dnet:dataCite_resource", "dnet:dataCite_resource"); } } diff --git a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/pace/PacePerson.java b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/pace/PacePerson.java new file mode 100644 index 000000000..927f5641b --- /dev/null +++ b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/pace/PacePerson.java @@ -0,0 +1,176 @@ +package eu.dnetlib.dhp.migration.pace; + +import java.nio.charset.Charset; +import java.text.Normalizer; +import java.util.HashSet; +import java.util.List; +import java.util.Set; + +import org.apache.commons.io.IOUtils; +import org.apache.commons.lang3.text.WordUtils; + +import com.google.common.base.Joiner; +import com.google.common.base.Splitter; +import com.google.common.collect.Iterables; +import com.google.common.collect.Lists; +import com.google.common.hash.Hashing; + +public class PacePerson { + + private static final String UTF8 = "UTF-8"; + private List name = Lists.newArrayList(); + private List surname = Lists.newArrayList(); + private List fullname = Lists.newArrayList(); + private final String original; + + private static Set particles = null; + + public static final String capitalize(final String s) { + return WordUtils.capitalize(s.toLowerCase(), ' ', '-'); + } + + public static final String dotAbbreviations(final String s) { + return s.length() == 1 ? s + "." : s; + } + + public static Set loadFromClasspath(final String classpath) { + final Set h = new HashSet<>(); + try { + for (final String s : IOUtils.readLines(PacePerson.class.getResourceAsStream(classpath))) { + h.add(s); + } + } catch (final Throwable e) { + return new HashSet<>(); + } + return h; + } + + public PacePerson(String s, final boolean aggressive) { + original = s; + s = Normalizer.normalize(s, Normalizer.Form.NFD); + s = s.replaceAll("\\(.+\\)", ""); + s = s.replaceAll("\\[.+\\]", ""); + s = s.replaceAll("\\{.+\\}", ""); + s = s.replaceAll("\\s+-\\s+", "-"); + s = s.replaceAll("[\\p{Punct}&&[^,-]]", " "); + s = s.replaceAll("\\d", " "); + s = s.replaceAll("\\n", " "); + s = s.replaceAll("\\.", " "); + s = s.replaceAll("\\s+", " "); + + if (aggressive) { + s = s.replaceAll("[\\p{InCombiningDiacriticalMarks}&&[^,-]]", ""); + // s = s.replaceAll("[\\W&&[^,-]]", ""); + } + + if (s.contains(",")) { + final String[] arr = s.split(","); + if (arr.length == 1) { + fullname = splitTerms(arr[0]); + } else if (arr.length > 1) { + surname = splitTerms(arr[0]); + name = splitTerms(arr[1]); + fullname.addAll(surname); + fullname.addAll(name); + } + } else { + fullname = splitTerms(s); + + int lastInitialPosition = fullname.size(); + boolean hasSurnameInUpperCase = false; + + for (int i = 0; i < fullname.size(); i++) { + final String term = fullname.get(i); + if (term.length() == 1) { + lastInitialPosition = i; + } else if (term.equals(term.toUpperCase())) { + hasSurnameInUpperCase = true; + } + } + + if (lastInitialPosition < fullname.size() - 1) { // Case: Michele G. Artini + name = fullname.subList(0, lastInitialPosition + 1); + surname = fullname.subList(lastInitialPosition + 1, fullname.size()); + } else if (hasSurnameInUpperCase) { // Case: Michele ARTINI + for (final String term : fullname) { + if (term.length() > 1 && term.equals(term.toUpperCase())) { + surname.add(term); + } else { + name.add(term); + } + } + } + } + } + + private List splitTerms(final String s) { + if (particles == null) { + particles = loadFromClasspath("/eu/dnetlib/dhp/migration/pace/name_particles.txt"); + } + + final List list = Lists.newArrayList(); + for (final String part : Splitter.on(" ").omitEmptyStrings().split(s)) { + if (!particles.contains(part.toLowerCase())) { + list.add(part); + } + } + return list; + } + + public List getName() { + return name; + } + + public String getNameString() { + return Joiner.on(" ").join(getName()); + } + + public List getSurname() { + return surname; + } + + public List getFullname() { + return fullname; + } + + public String getOriginal() { + return original; + } + + public String hash() { + return Hashing.murmur3_128().hashString(getNormalisedFullname(), Charset.forName(UTF8)).toString(); + } + + public String getNormalisedFirstName() { + return Joiner.on(" ").join(getCapitalFirstnames()); + } + + public String getNormalisedSurname() { + return Joiner.on(" ").join(getCapitalSurname()); + } + + public String getSurnameString() { + return Joiner.on(" ").join(getSurname()); + } + + public String getNormalisedFullname() { + return isAccurate() ? getNormalisedSurname() + ", " + getNormalisedFirstName() : Joiner.on(" ").join(fullname); + } + + public List getCapitalFirstnames() { + return Lists.newArrayList(Iterables.transform(getNameWithAbbreviations(), PacePerson::capitalize)); + } + + public List getCapitalSurname() { + return Lists.newArrayList(Iterables.transform(surname, PacePerson::capitalize)); + } + + public List getNameWithAbbreviations() { + return Lists.newArrayList(Iterables.transform(name, PacePerson::dotAbbreviations)); + } + + public boolean isAccurate() { + return name != null && surname != null && !name.isEmpty() && !surname.isEmpty(); + } + +} diff --git a/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/migration/pace/name_particles.txt b/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/migration/pace/name_particles.txt new file mode 100644 index 000000000..dae37c9dc --- /dev/null +++ b/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/migration/pace/name_particles.txt @@ -0,0 +1,7 @@ +van +der +de +dell +sig +mr +mrs From 5fc09b179cf68d169070b43f47ec8828e3351e49 Mon Sep 17 00:00:00 2001 From: Michele Artini Date: Tue, 11 Feb 2020 12:48:03 +0100 Subject: [PATCH 11/17] bug fixing --- dhp-workflows/dhp-aggregation/pom.xml | 6 ++++++ .../dnetlib/dhp/migration/AbstractMigrationExecutor.java | 7 +++++++ .../src/main/java/eu/dnetlib/dhp/migration/DbClient.java | 7 +++++-- .../dhp/migration/migrate_db_entities_parameters.json | 4 ++-- 4 files changed, 20 insertions(+), 4 deletions(-) diff --git a/dhp-workflows/dhp-aggregation/pom.xml b/dhp-workflows/dhp-aggregation/pom.xml index d031c0308..d523945ea 100644 --- a/dhp-workflows/dhp-aggregation/pom.xml +++ b/dhp-workflows/dhp-aggregation/pom.xml @@ -55,6 +55,12 @@ org.mongodb mongo-java-driver + + + org.postgresql + postgresql + 42.2.10 + org.mockito diff --git a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/AbstractMigrationExecutor.java b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/AbstractMigrationExecutor.java index 389790511..bf877dcf3 100644 --- a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/AbstractMigrationExecutor.java +++ b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/AbstractMigrationExecutor.java @@ -10,6 +10,8 @@ import java.util.concurrent.atomic.AtomicInteger; import java.util.stream.Collectors; import org.apache.commons.lang3.StringUtils; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -42,7 +44,12 @@ public class AbstractMigrationExecutor implements Closeable { private final SequenceFile.Writer writer; + private static final Log log = LogFactory.getLog(AbstractMigrationExecutor.class); + public AbstractMigrationExecutor(final String hdfsPath, final String hdfsNameNode, final String hdfsUser) throws Exception { + + log.info(String.format("Creating SequenceFile Writer, hdfsPath=%s, nameNode=%s, user=%s", hdfsPath, hdfsNameNode, hdfsUser)); + this.writer = SequenceFile.createWriter(getConf(hdfsNameNode, hdfsUser), SequenceFile.Writer.file(new Path(hdfsPath)), SequenceFile.Writer .keyClass(IntWritable.class), SequenceFile.Writer.valueClass(Text.class)); } diff --git a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/DbClient.java b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/DbClient.java index e9fee63b9..246dae474 100644 --- a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/DbClient.java +++ b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/DbClient.java @@ -9,6 +9,7 @@ import java.sql.SQLException; import java.sql.Statement; import java.util.function.Consumer; +import org.apache.commons.lang3.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; @@ -22,7 +23,9 @@ public class DbClient implements Closeable { try { Class.forName("org.postgresql.Driver"); - this.connection = DriverManager.getConnection(address, login, password); + + this.connection = + StringUtils.isNoneBlank(login, password) ? DriverManager.getConnection(address, login, password) : DriverManager.getConnection(address); this.connection.setAutoCommit(false); } catch (final Exception e) { log.error(e.getClass().getName() + ": " + e.getMessage()); @@ -34,7 +37,7 @@ public class DbClient implements Closeable { public void processResults(final String sql, final Consumer consumer) { try (final Statement stmt = connection.createStatement()) { - try (final ResultSet rs = stmt.executeQuery("SELECT * FROM COMPANY;")) { + try (final ResultSet rs = stmt.executeQuery(sql)) { while (rs.next()) { consumer.accept(rs); } diff --git a/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/migration/migrate_db_entities_parameters.json b/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/migration/migrate_db_entities_parameters.json index 861d297ba..5e9f378f5 100644 --- a/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/migration/migrate_db_entities_parameters.json +++ b/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/migration/migrate_db_entities_parameters.json @@ -27,12 +27,12 @@ "paramName": "dbuser", "paramLongName": "postgresUser", "paramDescription": "postgres user", - "paramRequired": true + "paramRequired": false }, { "paramName": "dbpasswd", "paramLongName": "postgresPassword", "paramDescription": "postgres password", - "paramRequired": true + "paramRequired": false } ] \ No newline at end of file From 06c2fd6df90d1113f06b641f296ecce63740c104 Mon Sep 17 00:00:00 2001 From: Michele Artini Date: Tue, 11 Feb 2020 15:29:50 +0100 Subject: [PATCH 12/17] bug fixing --- .../migration/AbstractMigrationExecutor.java | 6 +++- .../eu/dnetlib/dhp/migration/DbClient.java | 2 ++ .../MigrateDbEntitiesApplication.java | 36 +++++++++++++------ .../dhp/migration/sql/queryProjects.sql | 9 +++-- .../src/main/resources/log4j.properties | 9 +++++ 5 files changed, 47 insertions(+), 15 deletions(-) create mode 100644 dhp-workflows/dhp-aggregation/src/main/resources/log4j.properties diff --git a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/AbstractMigrationExecutor.java b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/AbstractMigrationExecutor.java index bf877dcf3..3367399c6 100644 --- a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/AbstractMigrationExecutor.java +++ b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/AbstractMigrationExecutor.java @@ -71,12 +71,13 @@ public class AbstractMigrationExecutor implements Closeable { value.set(objectMapper.writeValueAsString(oaf)); writer.append(key, value); } catch (final Exception e) { - e.printStackTrace(); + throw new RuntimeException(e); } } @Override public void close() throws IOException { + writer.hflush(); writer.close(); } @@ -216,4 +217,7 @@ public class AbstractMigrationExecutor implements Closeable { } + public static String asString(final Object o) { + return o == null ? "" : o.toString(); + } } diff --git a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/DbClient.java b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/DbClient.java index 246dae474..9ac0089d2 100644 --- a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/DbClient.java +++ b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/DbClient.java @@ -37,6 +37,8 @@ public class DbClient implements Closeable { public void processResults(final String sql, final Consumer consumer) { try (final Statement stmt = connection.createStatement()) { + stmt.setFetchSize(100); + try (final ResultSet rs = stmt.executeQuery(sql)) { while (rs.next()) { consumer.accept(rs); diff --git a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/MigrateDbEntitiesApplication.java b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/MigrateDbEntitiesApplication.java index 12043709f..d22e8e5b3 100644 --- a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/MigrateDbEntitiesApplication.java +++ b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/MigrateDbEntitiesApplication.java @@ -54,11 +54,22 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationExecutor impl final String hdfsUser = parser.get("hdfsUser"); try (final MigrateDbEntitiesApplication smdbe = new MigrateDbEntitiesApplication(hdfsPath, hdfsNameNode, hdfsUser, dbUrl, dbUser, dbPassword)) { + log.info("Processing datasources..."); smdbe.execute("queryDatasources.sql", smdbe::processDatasource); + + log.info("Processing projects..."); smdbe.execute("queryProjects.sql", smdbe::processProject); + + log.info("Processing orgs..."); smdbe.execute("queryOrganizations.sql", smdbe::processOrganization); + + log.info("Processing relations ds <-> orgs ..."); smdbe.execute("queryDatasourceOrganization.sql", smdbe::processDatasourceOrganization); + + log.info("Processing projects <-> orgs ..."); smdbe.execute("queryProjectOrganization.sql", smdbe::processProjectOrganization); + + log.info("All done."); } } @@ -75,6 +86,7 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationExecutor impl } public void processDatasource(final ResultSet rs) { + try { final DataInfo info = prepareDataInfo(rs); @@ -85,7 +97,7 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationExecutor impl ds.setOriginalId(Arrays.asList(rs.getString("datasourceid"))); ds.setCollectedfrom(listKeyValues(rs.getString("collectedfromid"), rs.getString("collectedfromname"))); ds.setPid(new ArrayList<>()); - ds.setDateofcollection(rs.getDate("dateofcollection").toString()); + ds.setDateofcollection(asString(rs.getDate("dateofcollection"))); ds.setDateoftransformation(null); // Value not returned by the SQL query ds.setExtraInfo(new ArrayList<>()); // Values not present in the DB ds.setOaiprovenance(null); // Values not present in the DB @@ -99,17 +111,17 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationExecutor impl ds.setNamespaceprefix(field(rs.getString("namespaceprefix"), info)); ds.setLatitude(field(Double.toString(rs.getDouble("latitude")), info)); ds.setLongitude(field(Double.toString(rs.getDouble("longitude")), info)); - ds.setDateofvalidation(field(rs.getDate("dateofvalidation").toString(), info)); + ds.setDateofvalidation(field(asString(rs.getDate("dateofvalidation")), info)); ds.setDescription(field(rs.getString("description"), info)); ds.setSubjects(prepareListOfStructProps(rs.getArray("subjects"), info)); ds.setOdnumberofitems(field(Double.toString(rs.getInt("odnumberofitems")), info)); - ds.setOdnumberofitemsdate(field(rs.getDate("odnumberofitemsdate").toString(), info)); + ds.setOdnumberofitemsdate(field(asString(rs.getDate("odnumberofitemsdate")), info)); ds.setOdpolicies(field(rs.getString("odpolicies"), info)); ds.setOdlanguages(prepareListFields(rs.getArray("odlanguages"), info)); ds.setOdcontenttypes(prepareListFields(rs.getArray("odcontenttypes"), info)); ds.setAccessinfopackage(prepareListFields(rs.getArray("accessinfopackage"), info)); - ds.setReleasestartdate(field(rs.getDate("releasestartdate").toString(), info)); - ds.setReleaseenddate(field(rs.getDate("releaseenddate").toString(), info)); + ds.setReleasestartdate(field(asString(rs.getDate("releasestartdate")), info)); + ds.setReleaseenddate(field(asString(rs.getDate("releaseenddate")), info)); ds.setMissionstatementurl(field(rs.getString("missionstatementurl"), info)); ds.setDataprovider(field(rs.getBoolean("dataprovider"), info)); ds.setServiceprovider(field(rs.getBoolean("serviceprovider"), info)); @@ -192,16 +204,16 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationExecutor impl p.setOriginalId(Arrays.asList(rs.getString("projectid"))); p.setCollectedfrom(listKeyValues(rs.getString("collectedfromid"), rs.getString("collectedfromname"))); p.setPid(new ArrayList<>()); - p.setDateofcollection(rs.getDate("dateofcollection").toString()); - p.setDateoftransformation(rs.getDate("dateoftransformation").toString()); + p.setDateofcollection(asString(rs.getDate("dateofcollection"))); + p.setDateoftransformation(asString(rs.getDate("dateoftransformation"))); p.setExtraInfo(new ArrayList<>()); // Values not present in the DB p.setOaiprovenance(null); // Values not present in the DB p.setWebsiteurl(field(rs.getString("websiteurl"), info)); p.setCode(field(rs.getString("code"), info)); p.setAcronym(field(rs.getString("acronym"), info)); p.setTitle(field(rs.getString("title"), info)); - p.setStartdate(field(rs.getDate("startdate").toString(), info)); - p.setEnddate(field(rs.getDate("enddate").toString(), info)); + p.setStartdate(field(asString(rs.getDate("startdate")), info)); + p.setEnddate(field(asString(rs.getDate("enddate")), info)); p.setCallidentifier(field(rs.getString("callidentifier"), info)); p.setKeywords(field(rs.getString("keywords"), info)); p.setDuration(field(Integer.toString(rs.getInt("duration")), info)); @@ -271,6 +283,7 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationExecutor impl } public void processOrganization(final ResultSet rs) { + try { final DataInfo info = prepareDataInfo(rs); @@ -281,8 +294,8 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationExecutor impl o.setOriginalId(Arrays.asList(rs.getString("organizationid"))); o.setCollectedfrom(listKeyValues(rs.getString("collectedfromid"), rs.getString("collectedfromname"))); o.setPid(new ArrayList<>()); - o.setDateofcollection(rs.getDate("dateofcollection").toString()); - o.setDateoftransformation(rs.getDate("dateoftransformation").toString()); + o.setDateofcollection(asString(rs.getDate("dateofcollection"))); + o.setDateoftransformation(asString(rs.getDate("dateoftransformation"))); o.setExtraInfo(new ArrayList<>()); // Values not present in the DB o.setOaiprovenance(null); // Values not present in the DB o.setLegalshortname(field("legalshortname", info)); @@ -387,6 +400,7 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationExecutor impl } public void processProjectOrganization(final ResultSet rs) { + try { final DataInfo info = prepareDataInfo(rs); final String orgId = createOpenaireId(20, rs.getString("resporganization")); diff --git a/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/migration/sql/queryProjects.sql b/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/migration/sql/queryProjects.sql index f04f1f03b..6cff18875 100644 --- a/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/migration/sql/queryProjects.sql +++ b/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/migration/sql/queryProjects.sql @@ -31,7 +31,7 @@ SELECT p.fundedamount AS fundedamount, dc.id AS collectedfromid, dc.officialname AS collectedfromname, - p.contracttype || '@@@' || p.contracttypename || '@@@' || p.contracttypescheme || '@@@' || p.contracttypescheme AS contracttype, + ctc.code || '@@@' || ctc.name || '@@@' || cts.code || '@@@' || cts.name AS contracttype, pac.code || '@@@' || pac.name || '@@@' || pas.code || '@@@' || pas.name AS provenanceaction, array_agg(DISTINCT i.pid || '###' || i.issuertype) AS pid, array_agg(DISTINCT s.name || '###' || sc.code || '@@@' || sc.name || '@@@' || ss.code || '@@@' || ss.name) AS subjects, @@ -54,6 +54,9 @@ SELECT LEFT OUTER JOIN class sc ON (sc.code = s.semanticclass) LEFT OUTER JOIN scheme ss ON (ss.code = s.semanticscheme) + LEFT OUTER JOIN class ctc ON (ctc.code = p.contracttypeclass) + LEFT OUTER JOIN scheme cts ON (cts.code = p.contracttypescheme) + GROUP BY p.id, p.code, @@ -77,11 +80,11 @@ SELECT p.contactfax, p.contactphone, p.contactemail, - p.contracttype, p.summary, p.currency, p.totalcost, p.fundedamount, dc.id, dc.officialname, - pac.code, pac.name, pas.code, pas.name; \ No newline at end of file + pac.code, pac.name, pas.code, pas.name, + ctc.code, ctc.name, cts.code, cts.name; \ No newline at end of file diff --git a/dhp-workflows/dhp-aggregation/src/main/resources/log4j.properties b/dhp-workflows/dhp-aggregation/src/main/resources/log4j.properties new file mode 100644 index 000000000..63cba917e --- /dev/null +++ b/dhp-workflows/dhp-aggregation/src/main/resources/log4j.properties @@ -0,0 +1,9 @@ +# Set root logger level to DEBUG and its only appender to A1. +log4j.rootLogger=INFO, A1 + +# A1 is set to be a ConsoleAppender. +log4j.appender.A1=org.apache.log4j.ConsoleAppender + +# A1 uses PatternLayout. +log4j.appender.A1.layout=org.apache.log4j.PatternLayout +log4j.appender.A1.layout.ConversionPattern=%-4r [%t] %-5p %c %x - %m%n From 69336195d3aaefe95a557f592893de4d8f3b79b1 Mon Sep 17 00:00:00 2001 From: Michele Artini Date: Wed, 12 Feb 2020 11:12:38 +0100 Subject: [PATCH 13/17] simplifications --- .../migration/AbstractMigrationExecutor.java | 47 +++++++++++-------- 1 file changed, 28 insertions(+), 19 deletions(-) diff --git a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/AbstractMigrationExecutor.java b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/AbstractMigrationExecutor.java index 3367399c6..11c1fb6ae 100644 --- a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/AbstractMigrationExecutor.java +++ b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/AbstractMigrationExecutor.java @@ -6,6 +6,7 @@ import java.net.URI; import java.util.ArrayList; import java.util.Arrays; import java.util.List; +import java.util.Objects; import java.util.concurrent.atomic.AtomicInteger; import java.util.stream.Collectors; @@ -15,7 +16,6 @@ import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.SequenceFile; import org.apache.hadoop.io.Text; import org.codehaus.jackson.map.ObjectMapper; @@ -36,7 +36,7 @@ public class AbstractMigrationExecutor implements Closeable { private final AtomicInteger counter = new AtomicInteger(0); - private final IntWritable key = new IntWritable(counter.get()); + private final Text key = new Text(); private final Text value = new Text(); @@ -51,7 +51,7 @@ public class AbstractMigrationExecutor implements Closeable { log.info(String.format("Creating SequenceFile Writer, hdfsPath=%s, nameNode=%s, user=%s", hdfsPath, hdfsNameNode, hdfsUser)); this.writer = SequenceFile.createWriter(getConf(hdfsNameNode, hdfsUser), SequenceFile.Writer.file(new Path(hdfsPath)), SequenceFile.Writer - .keyClass(IntWritable.class), SequenceFile.Writer.valueClass(Text.class)); + .keyClass(Text.class), SequenceFile.Writer.valueClass(Text.class)); } private Configuration getConf(final String hdfsNameNode, final String hdfsUser) throws IOException { @@ -67,7 +67,7 @@ public class AbstractMigrationExecutor implements Closeable { protected void emitOaf(final Oaf oaf) { try { - key.set(counter.getAndIncrement()); + key.set(counter.getAndIncrement() + ":" + oaf.getClass().getSimpleName().toLowerCase()); value.set(objectMapper.writeValueAsString(oaf)); writer.append(key, value); } catch (final Exception e) { @@ -99,6 +99,8 @@ public class AbstractMigrationExecutor implements Closeable { } public static Field field(final T value, final DataInfo info) { + if (value == null || StringUtils.isBlank(value.toString())) { return null; } + final Field field = new Field<>(); field.setValue(value); field.setDataInfo(info); @@ -106,7 +108,7 @@ public class AbstractMigrationExecutor implements Closeable { } public static List> listFields(final DataInfo info, final String... values) { - return Arrays.stream(values).map(v -> field(v, info)).collect(Collectors.toList()); + return Arrays.stream(values).map(v -> field(v, info)).filter(Objects::nonNull).collect(Collectors.toList()); } public static Qualifier qualifier(final String classid, final String classname, final String schemeid, final String schemename) { @@ -124,10 +126,12 @@ public class AbstractMigrationExecutor implements Closeable { final String schemeid, final String schemename, final DataInfo dataInfo) { + return structuredProperty(value, qualifier(classid, classname, schemeid, schemename), dataInfo); } public static StructuredProperty structuredProperty(final String value, final Qualifier qualifier, final DataInfo dataInfo) { + if (value == null) { return null; } final StructuredProperty sp = new StructuredProperty(); sp.setValue(value); sp.setQualifier(qualifier); @@ -178,20 +182,25 @@ public class AbstractMigrationExecutor implements Closeable { final String conferenceplace, final String conferencedate, final DataInfo dataInfo) { - final Journal j = new Journal(); - j.setName(name); - j.setIssnPrinted(issnPrinted); - j.setIssnOnline(issnOnline); - j.setIssnLinking(issnLinking); - j.setEp(ep); - j.setIss(iss); - j.setSp(sp); - j.setVol(vol); - j.setEdition(edition); - j.setConferenceplace(conferenceplace); - j.setConferencedate(conferencedate); - j.setDataInfo(dataInfo); - return j; + + if (StringUtils.isNotBlank(name) || StringUtils.isNotBlank(issnPrinted) || StringUtils.isNotBlank(issnOnline) || StringUtils.isNotBlank(issnLinking)) { + final Journal j = new Journal(); + j.setName(name); + j.setIssnPrinted(issnPrinted); + j.setIssnOnline(issnOnline); + j.setIssnLinking(issnLinking); + j.setEp(ep); + j.setIss(iss); + j.setSp(sp); + j.setVol(vol); + j.setEdition(edition); + j.setConferenceplace(conferenceplace); + j.setConferencedate(conferencedate); + j.setDataInfo(dataInfo); + return j; + } else { + return null; + } } public static DataInfo dataInfo(final Boolean deletedbyinference, From cdea0dae75abff4ea83c4ab4d40e01d6ab93c749 Mon Sep 17 00:00:00 2001 From: Michele Artini Date: Wed, 12 Feb 2020 16:34:00 +0100 Subject: [PATCH 14/17] bug fixing --- .../dhp/migration/AbstractMongoExecutor.java | 19 +++++++++++++++++-- .../migrate_mongo_mstores_parameters.json | 10 +++++----- 2 files changed, 22 insertions(+), 7 deletions(-) diff --git a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/AbstractMongoExecutor.java b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/AbstractMongoExecutor.java index b2792e292..83e05c59f 100644 --- a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/AbstractMongoExecutor.java +++ b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/AbstractMongoExecutor.java @@ -11,6 +11,8 @@ import java.util.Map; import java.util.Map.Entry; import org.apache.commons.lang3.StringUtils; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; import org.dom4j.Document; import org.dom4j.DocumentException; import org.dom4j.DocumentFactory; @@ -49,6 +51,8 @@ public abstract class AbstractMongoExecutor extends AbstractMigrationExecutor { protected static final Qualifier SOFTWARE_RESULTTYPE_QUALIFIER = qualifier("software", "software", "dnet:result_typologies", "dnet:result_typologies"); protected static final Qualifier OTHER_RESULTTYPE_QUALIFIER = qualifier("other", "other", "dnet:result_typologies", "dnet:result_typologies"); + private static final Log log = LogFactory.getLog(AbstractMongoExecutor.class); + public AbstractMongoExecutor(final String hdfsPath, final String hdfsNameNode, final String hdfsUser, final String mongoBaseUrl, final String mongoDb, final String dbUrl, final String dbUser, final String dbPassword) throws Exception { @@ -66,6 +70,9 @@ public abstract class AbstractMongoExecutor extends AbstractMigrationExecutor { } private void loadClassNames(final String dbUrl, final String dbUser, final String dbPassword) throws IOException { + + log.info("Loading vocabulary terms from db..."); + try (DbClient dbClient = new DbClient(dbUrl, dbUser, dbPassword)) { code2name.clear(); dbClient.processResults("select code, name from class", rs -> { @@ -77,12 +84,19 @@ public abstract class AbstractMongoExecutor extends AbstractMigrationExecutor { }); } + log.info("Found " + code2name.size() + " terms."); + } public void processMdRecords(final String mdFormat, final String mdLayout, final String mdInterpretation) throws DocumentException { - for (final Entry entry : mdstoreClient.validCollections(mdFormat, mdLayout, mdInterpretation).entrySet()) { - // final String mdId = entry.getKey(); + log.info(String.format("Searching mdstores (format: %s, layout: %s, interpretation: %s)", mdFormat, mdLayout, mdInterpretation)); + + final Map colls = mdstoreClient.validCollections(mdFormat, mdLayout, mdInterpretation); + log.info("Found " + colls.size() + " mdstores"); + + for (final Entry entry : colls.entrySet()) { + log.info("Processing mdstore " + entry.getKey() + " (collection: " + entry.getValue() + ")"); final String currentColl = entry.getValue(); for (final String xml : mdstoreClient.listRecords(currentColl)) { @@ -101,6 +115,7 @@ public abstract class AbstractMongoExecutor extends AbstractMigrationExecutor { } } } + log.info("All Done."); } protected void registerNamespaces(final Map nsContext) { diff --git a/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/migration/migrate_mongo_mstores_parameters.json b/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/migration/migrate_mongo_mstores_parameters.json index 3cd6f39f5..5738daa76 100644 --- a/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/migration/migrate_mongo_mstores_parameters.json +++ b/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/migration/migrate_mongo_mstores_parameters.json @@ -48,21 +48,21 @@ "paramRequired": true }, { - "paramName": "postgresUrl", + "paramName": "pgurl", "paramLongName": "postgresUrl", "paramDescription": "postgres url, example: jdbc:postgresql://localhost:5432/testdb", "paramRequired": true }, { - "paramName": "postgresUser", + "paramName": "pguser", "paramLongName": "postgresUser", "paramDescription": "postgres user", - "paramRequired": true + "paramRequired": false }, { - "paramName": "postgresPassword", + "paramName": "pgpasswd", "paramLongName": "postgresPassword", "paramDescription": "postgres password", - "paramRequired": true + "paramRequired": false } ] \ No newline at end of file From 80cb52593f80c1287498747b69286eaf730db943 Mon Sep 17 00:00:00 2001 From: Michele Artini Date: Thu, 13 Feb 2020 15:34:13 +0100 Subject: [PATCH 15/17] bug fixing --- .../migration/AbstractMigrationExecutor.java | 4 +++ .../dhp/migration/AbstractMongoExecutor.java | 5 +++- .../dnetlib/dhp/migration/MdstoreClient.java | 25 ++++++++++++------- .../dhp/migration/OdfMigrationExecutor.java | 1 + 4 files changed, 25 insertions(+), 10 deletions(-) diff --git a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/AbstractMigrationExecutor.java b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/AbstractMigrationExecutor.java index 11c1fb6ae..e91a53045 100644 --- a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/AbstractMigrationExecutor.java +++ b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/AbstractMigrationExecutor.java @@ -111,6 +111,10 @@ public class AbstractMigrationExecutor implements Closeable { return Arrays.stream(values).map(v -> field(v, info)).filter(Objects::nonNull).collect(Collectors.toList()); } + public static List> listFields(final DataInfo info, final List values) { + return values.stream().map(v -> field(v, info)).filter(Objects::nonNull).collect(Collectors.toList()); + } + public static Qualifier qualifier(final String classid, final String classname, final String schemeid, final String schemename) { final Qualifier q = new Qualifier(); q.setClassid(classid); diff --git a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/AbstractMongoExecutor.java b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/AbstractMongoExecutor.java index 83e05c59f..d1b618c7a 100644 --- a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/AbstractMongoExecutor.java +++ b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/AbstractMongoExecutor.java @@ -385,6 +385,8 @@ public abstract class AbstractMongoExecutor extends AbstractMigrationExecutor { protected OAIProvenance prepareOAIprovenance(final Document doc) { final Node n = doc.selectSingleNode("//*[local-name()='provenance']/*[local-name()='originDescription']"); + if (n == null) { return null; } + final String identifier = n.valueOf("./*[local-name()='identifier']"); final String baseURL = n.valueOf("./*[local-name()='baseURL']");; final String metadataNamespace = n.valueOf("./*[local-name()='metadataNamespace']");; @@ -393,6 +395,7 @@ public abstract class AbstractMongoExecutor extends AbstractMigrationExecutor { final String harvestDate = n.valueOf("@harvestDate");; return oaiIProvenance(identifier, baseURL, metadataNamespace, altered, datestamp, harvestDate); + } protected DataInfo prepareDataInfo(final Document doc) { @@ -416,7 +419,7 @@ public abstract class AbstractMongoExecutor extends AbstractMigrationExecutor { } protected List> prepareListFields(final Node node, final String xpath, final DataInfo info) { - return listFields(info, (String[]) prepareListString(node, xpath).toArray()); + return listFields(info, prepareListString(node, xpath)); } protected List prepareListString(final Node node, final String xpath) { diff --git a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/MdstoreClient.java b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/MdstoreClient.java index 971d7f165..87dadfc7a 100644 --- a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/MdstoreClient.java +++ b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/MdstoreClient.java @@ -2,6 +2,7 @@ package eu.dnetlib.dhp.migration; import java.io.Closeable; import java.io.IOException; +import java.util.ArrayList; import java.util.HashMap; import java.util.Map; import java.util.stream.StreamSupport; @@ -35,7 +36,7 @@ public class MdstoreClient implements Closeable { public Map validCollections(final String mdFormat, final String mdLayout, final String mdInterpretation) { final Map transactions = new HashMap<>(); - for (final Document entry : getColl(db, COLL_METADATA_MANAGER).find()) { + for (final Document entry : getColl(db, COLL_METADATA_MANAGER, true).find()) { final String mdId = entry.getString("mdId"); final String currentId = entry.getString("currentId"); if (StringUtils.isNoneBlank(mdId, currentId)) { @@ -44,7 +45,7 @@ public class MdstoreClient implements Closeable { } final Map res = new HashMap<>(); - for (final Document entry : getColl(db, COLL_METADATA).find()) { + for (final Document entry : getColl(db, COLL_METADATA, true).find()) { if (entry.getString("format").equals(mdFormat) && entry.getString("layout").equals(mdLayout) && entry.getString("interpretation").equals(mdInterpretation) && transactions.containsKey(entry.getString("mdId"))) { res.put(entry.getString("mdId"), transactions.get(entry.getString("mdId"))); @@ -63,20 +64,26 @@ public class MdstoreClient implements Closeable { return client.getDatabase(dbName); } - private MongoCollection getColl(final MongoDatabase db, final String collName) { + private MongoCollection getColl(final MongoDatabase db, final String collName, final boolean abortIfMissing) { if (!Iterables.contains(db.listCollectionNames(), collName)) { final String err = String.format(String.format("Missing collection '%s' in database '%s'", collName, db.getName())); log.warn(err); - throw new RuntimeException(err); + if (abortIfMissing) { + throw new RuntimeException(err); + } else { + return null; + } } return db.getCollection(collName); } - public Iterable listRecords(final String coll) { - return () -> StreamSupport.stream(getColl(db, coll).find().spliterator(), false) - .filter(e -> e.containsKey("body")) - .map(e -> e.getString("body")) - .iterator(); + public Iterable listRecords(final String collName) { + final MongoCollection coll = getColl(db, collName, false); + return coll == null ? new ArrayList<>() + : () -> StreamSupport.stream(coll.find().spliterator(), false) + .filter(e -> e.containsKey("body")) + .map(e -> e.getString("body")) + .iterator(); } @Override diff --git a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/OdfMigrationExecutor.java b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/OdfMigrationExecutor.java index b1dbfcdf4..54636b3bf 100644 --- a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/OdfMigrationExecutor.java +++ b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/OdfMigrationExecutor.java @@ -34,6 +34,7 @@ public class OdfMigrationExecutor extends AbstractMongoExecutor { @Override protected void registerNamespaces(final Map nsContext) { + super.registerNamespaces(nsContext); nsContext.put("dc", "http://datacite.org/schema/kernel-3"); } From c460e2d28126535ab085c104fbce3997eb781a2b Mon Sep 17 00:00:00 2001 From: Claudio Atzori Date: Mon, 17 Feb 2020 11:54:48 +0100 Subject: [PATCH 16/17] Aggiornare 'dhp-workflows/docs/oozie-installer.markdown' --- dhp-workflows/docs/oozie-installer.markdown | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/dhp-workflows/docs/oozie-installer.markdown b/dhp-workflows/docs/oozie-installer.markdown index b1953a54e..d2de80dcc 100644 --- a/dhp-workflows/docs/oozie-installer.markdown +++ b/dhp-workflows/docs/oozie-installer.markdown @@ -10,9 +10,8 @@ This module is automatically executed when running: on module having set: - eu.dnetlib - dhp-wf - 1.0.0-SNAPSHOT + eu.dnetlib.dhp + dhp-workflows in `pom.xml` file. `oozie-package` profile initializes oozie workflow packaging, `workflow.source.dir` property points to a workflow (notice: this is not a relative path but a classpath to directory usually holding `oozie_app` subdirectory). From 76ee85141a10fead8a67c807416d6cb3b88a167d Mon Sep 17 00:00:00 2001 From: "sandro.labruzzo" Date: Mon, 17 Feb 2020 12:31:44 +0100 Subject: [PATCH 17/17] added oozie job for DNET migration and implemented Spark job for extracting entities --- .../migration/ExtractEntitiesFromHDFSJob.java | 56 ++++ ...extract_entities_from_hdfs_parameters.json | 26 ++ .../migration/oozie_app/config-default.xml | 22 ++ .../dhp/migration/oozie_app/workflow.xml | 282 ++++++++++++++++++ .../dnetlib/dhp/graph/GraphMappingUtils.java | 12 +- pom.xml | 2 +- 6 files changed, 393 insertions(+), 7 deletions(-) create mode 100644 dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/ExtractEntitiesFromHDFSJob.java create mode 100644 dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/migration/extract_entities_from_hdfs_parameters.json create mode 100644 dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/migration/oozie_app/config-default.xml create mode 100644 dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/migration/oozie_app/workflow.xml diff --git a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/ExtractEntitiesFromHDFSJob.java b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/ExtractEntitiesFromHDFSJob.java new file mode 100644 index 000000000..f2d9caebf --- /dev/null +++ b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/ExtractEntitiesFromHDFSJob.java @@ -0,0 +1,56 @@ +package eu.dnetlib.dhp.migration; + +import eu.dnetlib.dhp.application.ArgumentApplicationParser; +import org.apache.commons.io.IOUtils; +import org.apache.commons.lang3.StringUtils; +import org.apache.hadoop.io.Text; +import org.apache.spark.api.java.JavaRDD; +import org.apache.spark.api.java.JavaSparkContext; +import org.apache.spark.sql.SparkSession; +import scala.Tuple2; + +import java.util.Arrays; +import java.util.List; + +public class ExtractEntitiesFromHDFSJob { + + + private static List folderNames = Arrays.asList("db_entities", "oaf_entities", "odf_entities"); + + public static void main(String[] args) throws Exception { + final ArgumentApplicationParser parser = new ArgumentApplicationParser( + IOUtils.toString(MigrateMongoMdstoresApplication.class.getResourceAsStream("/eu/dnetlib/dhp/migration/extract_entities_from_hdfs_parameters.json"))); + parser.parseArgument(args); + + final SparkSession spark = SparkSession + .builder() + .appName(ExtractEntitiesFromHDFSJob.class.getSimpleName()) + .master(parser.get("master")) + .getOrCreate(); + + final String sourcePath = parser.get("sourcePath"); + final String targetPath = parser.get("graphRawPath"); + final String entity = parser.get("entity"); + + + final JavaSparkContext sc = new JavaSparkContext(spark.sparkContext()); + + + JavaRDD inputRdd = sc.emptyRDD(); + + + folderNames.forEach(p -> inputRdd.union( + sc.sequenceFile(sourcePath+"/"+p, Text.class, Text.class) + .map(k -> new Tuple2<>(k._1().toString(), k._2().toString())) + .filter(k -> isEntityType(k._1(), entity)) + .map(Tuple2::_2)) + ); + + inputRdd.saveAsTextFile(targetPath+"/"+entity); + } + + + private static boolean isEntityType(final String item, final String entity) { + return StringUtils.substringAfter(item, ":").equalsIgnoreCase(entity); + } +} diff --git a/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/migration/extract_entities_from_hdfs_parameters.json b/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/migration/extract_entities_from_hdfs_parameters.json new file mode 100644 index 000000000..f179ee0f8 --- /dev/null +++ b/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/migration/extract_entities_from_hdfs_parameters.json @@ -0,0 +1,26 @@ +[ + { + "paramName": "s", + "paramLongName": "sourcePath", + "paramDescription": "the HDFS source path which contains the sequential file", + "paramRequired": true + }, + { + "paramName": "mt", + "paramLongName": "master", + "paramDescription": "should be local or yarn", + "paramRequired": true + }, + { + "paramName": "g", + "paramLongName": "graphRawPath", + "paramDescription": "the path of the graph Raw in hdfs", + "paramRequired": true + }, + { + "paramName": "e", + "paramLongName": "entity", + "paramDescription": "The entity to extract", + "paramRequired": true + } +] \ No newline at end of file diff --git a/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/migration/oozie_app/config-default.xml b/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/migration/oozie_app/config-default.xml new file mode 100644 index 000000000..51e48d8f7 --- /dev/null +++ b/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/migration/oozie_app/config-default.xml @@ -0,0 +1,22 @@ + + + jobTracker + yarnRM + + + nameNode + hdfs://nameservice1 + + + oozie.use.system.libpath + true + + + oozie.action.sharelib.for.spark + spark2 + + + hdfsUser + dnet + + \ No newline at end of file diff --git a/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/migration/oozie_app/workflow.xml b/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/migration/oozie_app/workflow.xml new file mode 100644 index 000000000..309a6d90f --- /dev/null +++ b/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/migration/oozie_app/workflow.xml @@ -0,0 +1,282 @@ + + + + workingPath + the base path to store hdfs file + + + graphRawPath + the graph Raw base path + + + + postgresURL + the postgres URL to access to the database + + + postgresUser + the user postgres + + + postgresPassword + the password postgres + + + mongourl + mongoDB url, example: mongodb://[username:password@]host[:port] + + + mongoDb + mongo database + + + sparkDriverMemory + memory for driver process + + + sparkExecutorMemory + memory for individual executor + + + sparkExecutorCores + number of cores used by single executor + + + + + + + + Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}] + + + + + + + + + + + + + + ${jobTracker} + ${nameNode} + eu.dnetlib.dhp.migration.MigrateDbEntitiesApplication + -p${workingPath}/db_entities + -n${nameNode} + -u${hdfsUser} + -dburl${postgresURL} + -dbuser${postgresUser} + -dbpasswd${postgresPassword} + + + + + + + + ${jobTracker} + ${nameNode} + eu.dnetlib.dhp.migration.MigrateMongoMdstoresApplication + -p${workingPath}/odf_entities + -n${nameNode} + -u${hdfsUser} + -mongourl${mongourl} + -db${mongoDb} + -fODF + -lstore + -icleaned + -pgurl${postgresURL} + -pguser${postgresUser} + -pgpasswd${postgresPassword} + + + + + + + + ${jobTracker} + ${nameNode} + eu.dnetlib.dhp.migration.MigrateMongoMdstoresApplication + -p${workingPath}/oaf_entities + -n${nameNode} + -u${hdfsUser} + -mongourl${mongourl} + -db${mongoDb} + -fOAF + -lstore + -icleaned + -pgurl${postgresURL} + -pguser${postgresUser} + -pgpasswd${postgresPassword} + + + + + + + + + ${jobTracker} + ${nameNode} + yarn-cluster + cluster + ExtractEntities: publication + eu.dnetlib.dhp.migration.ExtractEntitiesFromHDFSJob + dhp-aggregation-${projectVersion}.jar + --executor-memory ${sparkExecutorMemory} --executor-cores ${sparkExecutorCores} --driver-memory=${sparkDriverMemory} --conf spark.extraListeners="com.cloudera.spark.lineage.NavigatorAppListener" --conf spark.sql.queryExecutionListeners="com.cloudera.spark.lineage.NavigatorQueryListener" --conf spark.sql.warehouse.dir="/user/hive/warehouse" + -mt yarn-cluster + --sourcePath${workingPath} + -g${graphRawPath}/publication + -epublication + + + + + + + + + ${jobTracker} + ${nameNode} + yarn-cluster + cluster + ExtractEntities: dataset + eu.dnetlib.dhp.migration.ExtractEntitiesFromHDFSJob + dhp-aggregation-${projectVersion}.jar + --executor-memory ${sparkExecutorMemory} --executor-cores ${sparkExecutorCores} --driver-memory=${sparkDriverMemory} --conf spark.extraListeners="com.cloudera.spark.lineage.NavigatorAppListener" --conf spark.sql.queryExecutionListeners="com.cloudera.spark.lineage.NavigatorQueryListener" --conf spark.sql.warehouse.dir="/user/hive/warehouse" + -mt yarn-cluster + --sourcePath${workingPath} + -g${graphRawPath}/dataset + -edataset + + + + + + + + + ${jobTracker} + ${nameNode} + yarn-cluster + cluster + ExtractEntities: software + eu.dnetlib.dhp.migration.ExtractEntitiesFromHDFSJob + dhp-aggregation-${projectVersion}.jar + --executor-memory ${sparkExecutorMemory} --executor-cores ${sparkExecutorCores} --driver-memory=${sparkDriverMemory} --conf spark.extraListeners="com.cloudera.spark.lineage.NavigatorAppListener" --conf spark.sql.queryExecutionListeners="com.cloudera.spark.lineage.NavigatorQueryListener" --conf spark.sql.warehouse.dir="/user/hive/warehouse" + -mt yarn-cluster + --sourcePath${workingPath} + -g${graphRawPath}/software + -esoftware + + + + + + + + + ${jobTracker} + ${nameNode} + yarn-cluster + cluster + ExtractEntities: otherresearchproduct + eu.dnetlib.dhp.migration.ExtractEntitiesFromHDFSJob + dhp-aggregation-${projectVersion}.jar + --executor-memory ${sparkExecutorMemory} --executor-cores ${sparkExecutorCores} --driver-memory=${sparkDriverMemory} --conf spark.extraListeners="com.cloudera.spark.lineage.NavigatorAppListener" --conf spark.sql.queryExecutionListeners="com.cloudera.spark.lineage.NavigatorQueryListener" --conf spark.sql.warehouse.dir="/user/hive/warehouse" + -mt yarn-cluster + --sourcePath${workingPath} + -g${graphRawPath}/otherresearchproduct + -eotherresearchproduct + + + + + + + + + ${jobTracker} + ${nameNode} + yarn-cluster + cluster + ExtractEntities: datasource + eu.dnetlib.dhp.migration.ExtractEntitiesFromHDFSJob + dhp-aggregation-${projectVersion}.jar + --executor-memory ${sparkExecutorMemory} --executor-cores ${sparkExecutorCores} --driver-memory=${sparkDriverMemory} --conf spark.extraListeners="com.cloudera.spark.lineage.NavigatorAppListener" --conf spark.sql.queryExecutionListeners="com.cloudera.spark.lineage.NavigatorQueryListener" --conf spark.sql.warehouse.dir="/user/hive/warehouse" + -mt yarn-cluster + --sourcePath${workingPath} + -g${graphRawPath}/datasource + -edatasource + + + + + + + + + ${jobTracker} + ${nameNode} + yarn-cluster + cluster + ExtractEntities: organization + eu.dnetlib.dhp.migration.ExtractEntitiesFromHDFSJob + dhp-aggregation-${projectVersion}.jar + --executor-memory ${sparkExecutorMemory} --executor-cores ${sparkExecutorCores} --driver-memory=${sparkDriverMemory} --conf spark.extraListeners="com.cloudera.spark.lineage.NavigatorAppListener" --conf spark.sql.queryExecutionListeners="com.cloudera.spark.lineage.NavigatorQueryListener" --conf spark.sql.warehouse.dir="/user/hive/warehouse" + -mt yarn-cluster + --sourcePath${workingPath} + -g${graphRawPath}/organization + -eorganization + + + + + + + + + ${jobTracker} + ${nameNode} + yarn-cluster + cluster + ExtractEntities: project + eu.dnetlib.dhp.migration.ExtractEntitiesFromHDFSJob + dhp-aggregation-${projectVersion}.jar + --executor-memory ${sparkExecutorMemory} --executor-cores ${sparkExecutorCores} --driver-memory=${sparkDriverMemory} --conf spark.extraListeners="com.cloudera.spark.lineage.NavigatorAppListener" --conf spark.sql.queryExecutionListeners="com.cloudera.spark.lineage.NavigatorQueryListener" --conf spark.sql.warehouse.dir="/user/hive/warehouse" + -mt yarn-cluster + --sourcePath${workingPath} + -g${graphRawPath}/project + -eproject + + + + + + + + + ${jobTracker} + ${nameNode} + yarn-cluster + cluster + ExtractEntities: relation + eu.dnetlib.dhp.migration.ExtractEntitiesFromHDFSJob + dhp-aggregation-${projectVersion}.jar + --executor-memory ${sparkExecutorMemory} --executor-cores ${sparkExecutorCores} --driver-memory=${sparkDriverMemory} --conf spark.extraListeners="com.cloudera.spark.lineage.NavigatorAppListener" --conf spark.sql.queryExecutionListeners="com.cloudera.spark.lineage.NavigatorQueryListener" --conf spark.sql.warehouse.dir="/user/hive/warehouse" + -mt yarn-cluster + --sourcePath${workingPath} + -g${graphRawPath}/relation + -erelation + + + + + + + + \ No newline at end of file diff --git a/dhp-workflows/dhp-graph-mapper/src/main/java/eu/dnetlib/dhp/graph/GraphMappingUtils.java b/dhp-workflows/dhp-graph-mapper/src/main/java/eu/dnetlib/dhp/graph/GraphMappingUtils.java index 0291be47e..7c0967b2e 100644 --- a/dhp-workflows/dhp-graph-mapper/src/main/java/eu/dnetlib/dhp/graph/GraphMappingUtils.java +++ b/dhp-workflows/dhp-graph-mapper/src/main/java/eu/dnetlib/dhp/graph/GraphMappingUtils.java @@ -18,13 +18,13 @@ public class GraphMappingUtils { public final static Map types = Maps.newHashMap(); static { - types.put("datasource", Datasource.class); - types.put("organization", Organization.class); + types.put("datasource", Datasource.class); + types.put("organization", Organization.class); types.put("project", Project.class); - types.put("dataset", Dataset.class); - types.put("otherresearchproduct", OtherResearchProduct.class); - types.put("software", Software.class); - types.put("publication", Publication.class); + types.put("dataset", Dataset.class); + types.put("otherresearchproduct", OtherResearchProduct.class); + types.put("software", Software.class); + types.put("publication", Publication.class); types.put("relation", Relation.class); } diff --git a/pom.xml b/pom.xml index 6f85886c0..658d8285f 100644 --- a/pom.xml +++ b/pom.xml @@ -202,7 +202,7 @@ eu.dnetlib dnet-pace-core - 4.0.0-SNAPSHOT + 4.0.0