diff --git a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/step1/MigrateDbEntitiesApplication.java b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/step1/MigrateDbEntitiesApplication.java
index 2d5c425a1..5e54c2b86 100644
--- a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/step1/MigrateDbEntitiesApplication.java
+++ b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/step1/MigrateDbEntitiesApplication.java
@@ -68,12 +68,10 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationApplication i
final String dbPassword = parser.get("postgresPassword");
final String hdfsPath = parser.get("hdfsPath");
- final String hdfsNameNode = parser.get("namenode");
- final String hdfsUser = parser.get("hdfsUser");
final boolean processClaims = parser.get("action") != null && parser.get("action").equalsIgnoreCase("claims");
- try (final MigrateDbEntitiesApplication smdbe = new MigrateDbEntitiesApplication(hdfsPath, hdfsNameNode, hdfsUser, dbUrl, dbUser, dbPassword)) {
+ try (final MigrateDbEntitiesApplication smdbe = new MigrateDbEntitiesApplication(hdfsPath, dbUrl, dbUser, dbPassword)) {
if (processClaims) {
log.info("Processing claims...");
smdbe.execute("queryClaims.sql", smdbe::processClaims);
@@ -97,9 +95,9 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationApplication i
}
}
- public MigrateDbEntitiesApplication(final String hdfsPath, final String hdfsNameNode, final String hdfsUser, final String dbUrl, final String dbUser,
+ public MigrateDbEntitiesApplication(final String hdfsPath, final String dbUrl, final String dbUser,
final String dbPassword) throws Exception {
- super(hdfsPath, hdfsNameNode, hdfsUser);
+ super(hdfsPath);
this.dbClient = new DbClient(dbUrl, dbUser, dbPassword);
this.lastUpdateTimestamp = new Date().getTime();
}
diff --git a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/step1/MigrateMongoMdstoresApplication.java b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/step1/MigrateMongoMdstoresApplication.java
index dad1278e9..b1de31326 100644
--- a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/step1/MigrateMongoMdstoresApplication.java
+++ b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/step1/MigrateMongoMdstoresApplication.java
@@ -32,18 +32,15 @@ public class MigrateMongoMdstoresApplication extends AbstractMigrationApplicatio
final String mdInterpretation = parser.get("mdInterpretation");
final String hdfsPath = parser.get("hdfsPath");
- final String hdfsNameNode = parser.get("namenode");
- final String hdfsUser = parser.get("hdfsUser");
- try (MigrateMongoMdstoresApplication app = new MigrateMongoMdstoresApplication(hdfsPath, hdfsNameNode, hdfsUser, mongoBaseUrl, mongoDb)) {
+ try (MigrateMongoMdstoresApplication app = new MigrateMongoMdstoresApplication(hdfsPath, mongoBaseUrl, mongoDb)) {
app.execute(mdFormat, mdLayout, mdInterpretation);
}
}
- public MigrateMongoMdstoresApplication(final String hdfsPath, final String hdfsNameNode, final String hdfsUser, final String mongoBaseUrl,
- final String mongoDb) throws Exception {
- super(hdfsPath, hdfsNameNode, hdfsUser);
+ public MigrateMongoMdstoresApplication(final String hdfsPath, final String mongoBaseUrl, final String mongoDb) throws Exception {
+ super(hdfsPath);
this.mdstoreClient = new MdstoreClient(mongoBaseUrl, mongoDb);
}
diff --git a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/utils/AbstractMigrationApplication.java b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/utils/AbstractMigrationApplication.java
index 41f9f8145..8eb444562 100644
--- a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/utils/AbstractMigrationApplication.java
+++ b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/migration/utils/AbstractMigrationApplication.java
@@ -2,13 +2,11 @@ package eu.dnetlib.dhp.migration.utils;
import java.io.Closeable;
import java.io.IOException;
-import java.net.URI;
import java.util.concurrent.atomic.AtomicInteger;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.SequenceFile;
import org.apache.hadoop.io.Text;
@@ -30,22 +28,21 @@ public class AbstractMigrationApplication implements Closeable {
private static final Log log = LogFactory.getLog(AbstractMigrationApplication.class);
- public AbstractMigrationApplication(final String hdfsPath, final String hdfsNameNode, final String hdfsUser) throws Exception {
+ public AbstractMigrationApplication(final String hdfsPath) throws Exception {
- log.info(String.format("Creating SequenceFile Writer, hdfsPath=%s, nameNode=%s, user=%s", hdfsPath, hdfsNameNode, hdfsUser));
+ log.info(String.format("Creating SequenceFile Writer, hdfsPath=%s", hdfsPath));
- this.writer = SequenceFile.createWriter(getConf(hdfsNameNode, hdfsUser), SequenceFile.Writer.file(new Path(hdfsPath)), SequenceFile.Writer
+ this.writer = SequenceFile.createWriter(getConf(), SequenceFile.Writer.file(new Path(hdfsPath)), SequenceFile.Writer
.keyClass(Text.class), SequenceFile.Writer.valueClass(Text.class));
}
- private Configuration getConf(final String hdfsNameNode, final String hdfsUser) throws IOException {
+ private Configuration getConf() throws IOException {
final Configuration conf = new Configuration();
- conf.set("fs.defaultFS", hdfsNameNode);
- conf.set("fs.hdfs.impl", org.apache.hadoop.hdfs.DistributedFileSystem.class.getName());
- conf.set("fs.file.impl", org.apache.hadoop.fs.LocalFileSystem.class.getName());
- System.setProperty("HADOOP_USER_NAME", hdfsUser);
- System.setProperty("hadoop.home.dir", "/");
- FileSystem.get(URI.create(hdfsNameNode), conf);
+ /*
+ * conf.set("fs.defaultFS", hdfsNameNode); conf.set("fs.hdfs.impl", org.apache.hadoop.hdfs.DistributedFileSystem.class.getName());
+ * conf.set("fs.file.impl", org.apache.hadoop.fs.LocalFileSystem.class.getName()); System.setProperty("HADOOP_USER_NAME", hdfsUser);
+ * System.setProperty("hadoop.home.dir", "/"); FileSystem.get(URI.create(hdfsNameNode), conf);
+ */
return conf;
}
diff --git a/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/migration/migrate_db_entities_parameters.json b/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/migration/migrate_db_entities_parameters.json
index 39e0dd5ac..cb13ff024 100644
--- a/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/migration/migrate_db_entities_parameters.json
+++ b/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/migration/migrate_db_entities_parameters.json
@@ -5,18 +5,6 @@
"paramDescription": "the path where storing the sequential file",
"paramRequired": true
},
- {
- "paramName": "n",
- "paramLongName": "namenode",
- "paramDescription": "the Name Node URI",
- "paramRequired": true
- },
- {
- "paramName": "u",
- "paramLongName": "hdfsUser",
- "paramDescription": "the user wich create the hdfs seq file",
- "paramRequired": true
- },
{
"paramName": "pgurl",
"paramLongName": "postgresUrl",
diff --git a/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/migration/migrate_mongo_mstores_parameters.json b/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/migration/migrate_mongo_mstores_parameters.json
index fc900e97d..ee1a6ac4e 100644
--- a/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/migration/migrate_mongo_mstores_parameters.json
+++ b/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/migration/migrate_mongo_mstores_parameters.json
@@ -5,18 +5,6 @@
"paramDescription": "the path where storing the sequential file",
"paramRequired": true
},
- {
- "paramName": "n",
- "paramLongName": "namenode",
- "paramDescription": "the Name Node URI",
- "paramRequired": true
- },
- {
- "paramName": "u",
- "paramLongName": "hdfsUser",
- "paramDescription": "the user wich create the hdfs seq file",
- "paramRequired": true
- },
{
"paramName": "mongourl",
"paramLongName": "mongoBaseUrl",
diff --git a/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/migration/wfs/claims/oozie_app/config-default.xml b/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/migration/wfs/claims/oozie_app/config-default.xml
index 51e48d8f7..2e0ed9aee 100644
--- a/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/migration/wfs/claims/oozie_app/config-default.xml
+++ b/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/migration/wfs/claims/oozie_app/config-default.xml
@@ -15,8 +15,4 @@
oozie.action.sharelib.for.spark
spark2
-
- hdfsUser
- dnet
-
\ No newline at end of file
diff --git a/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/migration/wfs/claims/oozie_app/workflow.xml b/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/migration/wfs/claims/oozie_app/workflow.xml
index 7a1ee2cae..1ac456976 100644
--- a/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/migration/wfs/claims/oozie_app/workflow.xml
+++ b/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/migration/wfs/claims/oozie_app/workflow.xml
@@ -67,8 +67,6 @@
${nameNode}
eu.dnetlib.dhp.migration.step1.MigrateDbEntitiesApplication
-p${migrationClaimsPathStep1}/db_claims
- -n${nameNode}
- -u${hdfsUser}
-pgurl${postgresURL}
-pguser${postgresUser}
-pgpasswd${postgresPassword}
@@ -84,8 +82,6 @@
${nameNode}
eu.dnetlib.dhp.migration.step1.MigrateMongoMdstoresApplication
-p${migrationClaimsPathStep1}/odf_claims
- -n${nameNode}
- -u${hdfsUser}
-mongourl${mongoURL}
-mongodb${mongoDb}
-fODF
@@ -102,8 +98,6 @@
${nameNode}
eu.dnetlib.dhp.migration.step1.MigrateMongoMdstoresApplication
-p${migrationClaimsPathStep1}/oaf_claims
- -n${nameNode}
- -u${hdfsUser}
-mongourl${mongoURL}
-mongodb${mongoDb}
-fOAF
diff --git a/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/migration/wfs/regular_all_steps/oozie_app/config-default.xml b/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/migration/wfs/regular_all_steps/oozie_app/config-default.xml
index 51e48d8f7..2e0ed9aee 100644
--- a/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/migration/wfs/regular_all_steps/oozie_app/config-default.xml
+++ b/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/migration/wfs/regular_all_steps/oozie_app/config-default.xml
@@ -15,8 +15,4 @@
oozie.action.sharelib.for.spark
spark2
-
- hdfsUser
- dnet
-
\ No newline at end of file
diff --git a/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/migration/wfs/regular_all_steps/oozie_app/workflow.xml b/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/migration/wfs/regular_all_steps/oozie_app/workflow.xml
index e27372240..39807dd36 100644
--- a/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/migration/wfs/regular_all_steps/oozie_app/workflow.xml
+++ b/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/migration/wfs/regular_all_steps/oozie_app/workflow.xml
@@ -67,8 +67,6 @@
${nameNode}
eu.dnetlib.dhp.migration.step1.MigrateDbEntitiesApplication
-p${migrationPathStep1}/db_records
- -n${nameNode}
- -u${hdfsUser}
-pgurl${postgresURL}
-pguser${postgresUser}
-pgpasswd${postgresPassword}
@@ -83,8 +81,6 @@
${nameNode}
eu.dnetlib.dhp.migration.step1.MigrateMongoMdstoresApplication
-p${migrationPathStep1}/odf_records
- -n${nameNode}
- -u${hdfsUser}
-mongourl${mongoURL}
-mongodb${mongoDb}
-fODF
@@ -101,8 +97,6 @@
${nameNode}
eu.dnetlib.dhp.migration.step1.MigrateMongoMdstoresApplication
-p${migrationPathStep1}/oaf_records
- -n${nameNode}
- -u${hdfsUser}
-mongourl${mongoURL}
-mongodb${mongoDb}
-fOAF
diff --git a/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/migration/wfs/regular_step1/oozie_app/config-default.xml b/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/migration/wfs/regular_step1/oozie_app/config-default.xml
index 51e48d8f7..2e0ed9aee 100644
--- a/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/migration/wfs/regular_step1/oozie_app/config-default.xml
+++ b/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/migration/wfs/regular_step1/oozie_app/config-default.xml
@@ -15,8 +15,4 @@
oozie.action.sharelib.for.spark
spark2
-
- hdfsUser
- dnet
-
\ No newline at end of file
diff --git a/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/migration/wfs/regular_step1/oozie_app/workflow.xml b/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/migration/wfs/regular_step1/oozie_app/workflow.xml
index 7b3c5a746..f16e22f95 100644
--- a/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/migration/wfs/regular_step1/oozie_app/workflow.xml
+++ b/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/migration/wfs/regular_step1/oozie_app/workflow.xml
@@ -59,8 +59,6 @@
${nameNode}
eu.dnetlib.dhp.migration.step1.MigrateDbEntitiesApplication
-p${migrationPathStep1}/db_records
- -n${nameNode}
- -u${hdfsUser}
-pgurl${postgresURL}
-pguser${postgresUser}
-pgpasswd${postgresPassword}
@@ -75,8 +73,6 @@
${nameNode}
eu.dnetlib.dhp.migration.step1.MigrateMongoMdstoresApplication
-p${migrationPathStep1}/odf_records
- -n${nameNode}
- -u${hdfsUser}
-mongourl${mongoURL}
-mongodb${mongoDb}
-fODF
@@ -93,8 +89,6 @@
${nameNode}
eu.dnetlib.dhp.migration.step1.MigrateMongoMdstoresApplication
-p${migrationPathStep1}/oaf_records
- -n${nameNode}
- -u${hdfsUser}
-mongourl${mongoURL}
-mongodb${mongoDb}
-fOAF
diff --git a/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/migration/wfs/regular_step2/oozie_app/config-default.xml b/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/migration/wfs/regular_step2/oozie_app/config-default.xml
index 51e48d8f7..2e0ed9aee 100644
--- a/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/migration/wfs/regular_step2/oozie_app/config-default.xml
+++ b/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/migration/wfs/regular_step2/oozie_app/config-default.xml
@@ -15,8 +15,4 @@
oozie.action.sharelib.for.spark
spark2
-
- hdfsUser
- dnet
-
\ No newline at end of file
diff --git a/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/migration/wfs/regular_step3/oozie_app/config-default.xml b/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/migration/wfs/regular_step3/oozie_app/config-default.xml
index 51e48d8f7..2e0ed9aee 100644
--- a/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/migration/wfs/regular_step3/oozie_app/config-default.xml
+++ b/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/migration/wfs/regular_step3/oozie_app/config-default.xml
@@ -15,8 +15,4 @@
oozie.action.sharelib.for.spark
spark2
-
- hdfsUser
- dnet
-
\ No newline at end of file
diff --git a/dhp-workflows/dhp-aggregation/src/test/java/eu/dnetlib/dhp/collection/CollectionJobTest.java b/dhp-workflows/dhp-aggregation/src/test/java/eu/dnetlib/dhp/collection/CollectionJobTest.java
index 848fbe17d..b367491e5 100644
--- a/dhp-workflows/dhp-aggregation/src/test/java/eu/dnetlib/dhp/collection/CollectionJobTest.java
+++ b/dhp-workflows/dhp-aggregation/src/test/java/eu/dnetlib/dhp/collection/CollectionJobTest.java
@@ -1,79 +1,87 @@
package eu.dnetlib.dhp.collection;
-import com.fasterxml.jackson.databind.ObjectMapper;
-import eu.dnetlib.dhp.model.mdstore.MetadataRecord;
-import eu.dnetlib.dhp.model.mdstore.Provenance;
-import org.apache.commons.io.FileUtils;
-import org.apache.commons.io.IOUtils;
-import org.junit.*;
-
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
+import org.apache.commons.io.FileUtils;
+import org.apache.commons.io.IOUtils;
+import org.junit.After;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+
+import eu.dnetlib.dhp.model.mdstore.MetadataRecord;
+import eu.dnetlib.dhp.model.mdstore.Provenance;
+
public class CollectionJobTest {
- private Path testDir;
- @Before
- public void setup() throws IOException {
- testDir = Files.createTempDirectory("dhp-collection");
- }
+ private Path testDir;
- @After
- public void teadDown() throws IOException {
- FileUtils.deleteDirectory(testDir.toFile());
- }
+ @Before
+ public void setup() throws IOException {
+ testDir = Files.createTempDirectory("dhp-collection");
+ }
- @Test
- public void tesCollection() throws Exception {
- Provenance provenance = new Provenance("pippo", "puppa", "ns_prefix");
- GenerateNativeStoreSparkJob.main(new String[] {
- "-mt", "local",
- "-w", "wid",
- "-e", "XML",
- "-d", ""+System.currentTimeMillis(),
- "-p", new ObjectMapper().writeValueAsString(provenance),
- "-x", "./*[local-name()='record']/*[local-name()='header']/*[local-name()='identifier']",
- "-i", this.getClass().getResource("/eu/dnetlib/dhp/collection/native.seq").toString(),
- "-o", testDir.toString()+"/store",
- "-t", "true",
- "-ru", "",
- "-rp", "",
- "-rh", "",
- "-ro", "",
- "-rr", ""});
- System.out.println(new ObjectMapper().writeValueAsString(provenance));
- }
+ @After
+ public void teadDown() throws IOException {
+ FileUtils.deleteDirectory(testDir.toFile());
+ }
+ @Test
+ public void tesCollection() throws Exception {
+ final Provenance provenance = new Provenance("pippo", "puppa", "ns_prefix");
+ GenerateNativeStoreSparkJob.main(new String[] {
+ "-mt", "local",
+ "-w", "wid",
+ "-e", "XML",
+ "-d", "" + System.currentTimeMillis(),
+ "-p", new ObjectMapper().writeValueAsString(provenance),
+ "-x", "./*[local-name()='record']/*[local-name()='header']/*[local-name()='identifier']",
+ "-i", this.getClass().getResource("/eu/dnetlib/dhp/collection/native.seq").toString(),
+ "-o", testDir.toString() + "/store",
+ "-t", "true",
+ "-ru", "",
+ "-rp", "",
+ "-rh", "",
+ "-ro", "",
+ "-rr", "" });
+ System.out.println(new ObjectMapper().writeValueAsString(provenance));
+ }
+ @Test
+ public void testGenerationMetadataRecord() throws Exception {
- @Test
- public void testGenerationMetadataRecord() throws Exception {
+ final String xml = IOUtils.toString(this.getClass().getResourceAsStream("./record.xml"));
- final String xml = IOUtils.toString(this.getClass().getResourceAsStream("./record.xml"));
+ final MetadataRecord record = GenerateNativeStoreSparkJob
+ .parseRecord(xml, "./*[local-name()='record']/*[local-name()='header']/*[local-name()='identifier']", "XML", new Provenance("foo", "bar",
+ "ns_prefix"), System.currentTimeMillis(), null, null);
- MetadataRecord record = GenerateNativeStoreSparkJob.parseRecord(xml, "./*[local-name()='record']/*[local-name()='header']/*[local-name()='identifier']", "XML", new Provenance("foo", "bar", "ns_prefix"), System.currentTimeMillis(), null,null);
+ assert record != null;
+ System.out.println(record.getId());
+ System.out.println(record.getOriginalId());
- assert record != null;
- System.out.println(record.getId());
- System.out.println(record.getOriginalId());
+ }
+ @Test
+ public void TestEquals() throws IOException {
- }
+ final String xml = IOUtils.toString(this.getClass().getResourceAsStream("./record.xml"));
+ final MetadataRecord record = GenerateNativeStoreSparkJob
+ .parseRecord(xml, "./*[local-name()='record']/*[local-name()='header']/*[local-name()='identifier']", "XML", new Provenance("foo", "bar",
+ "ns_prefix"), System.currentTimeMillis(), null, null);
+ final MetadataRecord record1 = GenerateNativeStoreSparkJob
+ .parseRecord(xml, "./*[local-name()='record']/*[local-name()='header']/*[local-name()='identifier']", "XML", new Provenance("foo", "bar",
+ "ns_prefix"), System.currentTimeMillis(), null, null);
+ assert record != null;
+ record.setBody("ciao");
+ assert record1 != null;
+ record1.setBody("mondo");
+ Assert.assertEquals(record, record1);
-
- @Test
- public void TestEquals () throws IOException {
-
- final String xml = IOUtils.toString(this.getClass().getResourceAsStream("./record.xml"));
- MetadataRecord record = GenerateNativeStoreSparkJob.parseRecord(xml, "./*[local-name()='record']/*[local-name()='header']/*[local-name()='identifier']", "XML", new Provenance("foo", "bar", "ns_prefix"), System.currentTimeMillis(), null,null);
- MetadataRecord record1 = GenerateNativeStoreSparkJob.parseRecord(xml, "./*[local-name()='record']/*[local-name()='header']/*[local-name()='identifier']", "XML", new Provenance("foo", "bar", "ns_prefix"), System.currentTimeMillis(), null,null);
- assert record != null;
- record.setBody("ciao");
- assert record1 != null;
- record1.setBody("mondo");
- Assert.assertEquals(record, record1);
-
- }
+ }
}