fix problem in parameters
This commit is contained in:
parent
5868ff8a86
commit
e0a777028a
|
@ -1,66 +0,0 @@
|
|||
package eu.dnetlib.dhp.graph;
|
||||
|
||||
import eu.dnetlib.dhp.graph.model.EntityRelEntity;
|
||||
import eu.dnetlib.dhp.graph.model.RelatedEntity;
|
||||
import eu.dnetlib.dhp.graph.utils.GraphMappingUtils;
|
||||
import org.codehaus.jackson.map.ObjectMapper;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStreamReader;
|
||||
|
||||
public class MappingUtilsTest {
|
||||
|
||||
private GraphMappingUtils utils;
|
||||
|
||||
@Before
|
||||
public void setUp() {
|
||||
utils = new GraphMappingUtils();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testOafMappingDatasource() throws IOException {
|
||||
|
||||
final InputStreamReader in = new InputStreamReader(getClass().getResourceAsStream("datasource.json"));
|
||||
final EntityRelEntity e = new ObjectMapper().readValue(in, EntityRelEntity.class);
|
||||
e.getSource().setType("datasource");
|
||||
|
||||
final EntityRelEntity out = utils.asRelatedEntity(e);
|
||||
System.out.println(out);
|
||||
|
||||
}
|
||||
|
||||
//@Test
|
||||
public void testOafMappingResult() throws IOException {
|
||||
|
||||
final InputStreamReader in = new InputStreamReader(getClass().getResourceAsStream("result.json"));
|
||||
final EntityRelEntity e = new ObjectMapper().readValue(in, EntityRelEntity.class);
|
||||
|
||||
final EntityRelEntity out = utils.asRelatedEntity(e);
|
||||
System.out.println(out);
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testOafMappingSoftware() throws IOException {
|
||||
|
||||
final InputStreamReader in = new InputStreamReader(getClass().getResourceAsStream("software.json"));
|
||||
final EntityRelEntity e = new ObjectMapper().readValue(in, EntityRelEntity.class);
|
||||
|
||||
final EntityRelEntity out = utils.asRelatedEntity(e);
|
||||
System.out.println(out);
|
||||
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testParseRelatedEntity() throws IOException {
|
||||
|
||||
final InputStreamReader in = new InputStreamReader(getClass().getResourceAsStream("related_entity.json"));
|
||||
final RelatedEntity e = new ObjectMapper().readValue(in, RelatedEntity.class);
|
||||
|
||||
System.out.println(e);
|
||||
|
||||
}
|
||||
}
|
|
@ -1,55 +0,0 @@
|
|||
package eu.dnetlib.dhp.graph;
|
||||
|
||||
import eu.dnetlib.dhp.graph.utils.ContextMapper;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.spark.sql.SparkSession;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
|
||||
public class XmlRecordFactoryTest {
|
||||
|
||||
private static final Log log = LogFactory.getLog(XmlRecordFactoryTest.class);
|
||||
|
||||
private Path testDir;
|
||||
|
||||
@Before
|
||||
public void setup() throws IOException {
|
||||
testDir = Files.createTempDirectory(getClass().getSimpleName());
|
||||
log.info("created test directory " + testDir.toString());
|
||||
}
|
||||
|
||||
@After
|
||||
public void tearDown() throws IOException {
|
||||
FileUtils.deleteDirectory(testDir.toFile());
|
||||
log.info("deleted test directory " + testDir.toString());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testXmlSerialization() throws Exception {
|
||||
|
||||
final SparkSession spark = SparkSession
|
||||
.builder()
|
||||
.appName(SparkXmlRecordBuilderJob.class.getSimpleName())
|
||||
.master("local[*]")
|
||||
.getOrCreate();
|
||||
|
||||
final String inputDir = testDir.toString() + "/3_joined_entities";
|
||||
FileUtils.forceMkdir(new File(inputDir));
|
||||
FileUtils.copyFile(new File("/Users/claudio/Downloads/joined_entities-part-00000"), new File(inputDir + "/joined_entities-part-00000"));
|
||||
|
||||
final ContextMapper ctx = ContextMapper.fromIS("https://dev-openaire.d4science.org:443/is/services/isLookUp");
|
||||
|
||||
final GraphJoiner g = new GraphJoiner(spark, ctx, inputDir, testDir.toString());
|
||||
|
||||
g.asXML();
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,4 @@
|
|||
package eu.dnetlib.dhp;
|
||||
|
||||
public class PropagationConstant {
|
||||
}
|
|
@ -3,13 +3,13 @@
|
|||
"paramName":"mt",
|
||||
"paramLongName":"master",
|
||||
"paramDescription": "should be local or yarn",
|
||||
"paramRequired": true,
|
||||
"paramRequired": true
|
||||
},
|
||||
{
|
||||
"paramName":"s",
|
||||
"paramLongName":"sourcePath",
|
||||
"paramDescription": "the path of the sequencial file to read",
|
||||
"paramRequired": true,
|
||||
"paramRequired": true
|
||||
},
|
||||
{
|
||||
"paramName":"wl",
|
||||
|
|
|
@ -49,7 +49,7 @@
|
|||
</spark-opts>
|
||||
<arg>-mt</arg> <arg>yarn-cluster</arg>
|
||||
<arg>--sourcePath</arg><arg>${sourcePath}</arg>
|
||||
<arg>--withelist</arg><arg>${whitelist}</arg>
|
||||
<arg>--whitelist</arg><arg>${whitelist}</arg>
|
||||
<arg>--allowedtypes</arg><arg>${allowedtypes}</arg>
|
||||
</spark>
|
||||
<ok to="End"/>
|
||||
|
|
|
@ -0,0 +1,26 @@
|
|||
[
|
||||
{
|
||||
"paramName":"mt",
|
||||
"paramLongName":"master",
|
||||
"paramDescription": "should be local or yarn",
|
||||
"paramRequired": true
|
||||
},
|
||||
{
|
||||
"paramName":"s",
|
||||
"paramLongName":"sourcePath",
|
||||
"paramDescription": "the path of the sequencial file to read",
|
||||
"paramRequired": true
|
||||
},
|
||||
{
|
||||
"paramName":"wl",
|
||||
"paramLongName":"whitelist",
|
||||
"paramDescription": "datasource id that will be considered even if not in the allowed typology list. Split by ;",
|
||||
"paramRequired": true
|
||||
},
|
||||
{
|
||||
"paramName":"at",
|
||||
"paramLongName":"allowedtypes",
|
||||
"paramDescription": "the types of the allowed datasources. Split by ;",
|
||||
"paramRequired": true
|
||||
}
|
||||
]
|
|
@ -0,0 +1,18 @@
|
|||
<configuration>
|
||||
<property>
|
||||
<name>jobTracker</name>
|
||||
<value>yarnRM</value>
|
||||
</property>
|
||||
<property>
|
||||
<name>nameNode</name>
|
||||
<value>hdfs://nameservice1</value>
|
||||
</property>
|
||||
<property>
|
||||
<name>oozie.use.system.libpath</name>
|
||||
<value>true</value>
|
||||
</property>
|
||||
<property>
|
||||
<name>oozie.action.sharelib.for.spark</name>
|
||||
<value>spark2</value>
|
||||
</property>
|
||||
</configuration>
|
|
@ -0,0 +1,60 @@
|
|||
<workflow-app name="country_propagation" xmlns="uri:oozie:workflow:0.5">
|
||||
<parameters>
|
||||
<property>
|
||||
<name>sourcePath</name>
|
||||
<description>the source path</description>
|
||||
</property>
|
||||
<property>
|
||||
<name>whitelist</name>
|
||||
<description>the white list</description>
|
||||
</property>
|
||||
<property>
|
||||
<name>allowedtypes</name>
|
||||
<description>the allowed types</description>
|
||||
</property>
|
||||
<property>
|
||||
<name>sparkDriverMemory</name>
|
||||
<description>memory for driver process</description>
|
||||
</property>
|
||||
<property>
|
||||
<name>sparkExecutorMemory</name>
|
||||
<description>memory for individual executor</description>
|
||||
</property>
|
||||
<property>
|
||||
<name>sparkExecutorCores</name>
|
||||
<description>number of cores used by single executor</description>
|
||||
</property>
|
||||
</parameters>
|
||||
|
||||
<start to="CountryPropagation"/>
|
||||
|
||||
<kill name="Kill">
|
||||
<message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
|
||||
</kill>
|
||||
|
||||
<action name="CountryPropagation">
|
||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||
<job-tracker>${jobTracker}</job-tracker>
|
||||
<name-node>${nameNode}</name-node>
|
||||
<master>yarn-cluster</master>
|
||||
<mode>cluster</mode>
|
||||
<name>CountryPropagation</name>
|
||||
<class>eu.dnetlib.dhp.countrypropagation.SparkCountryPropagationJob</class>
|
||||
<jar>dhp-propagation-${projectVersion}.jar</jar>
|
||||
<spark-opts>--executor-memory ${sparkExecutorMemory}
|
||||
--executor-cores ${sparkExecutorCores}
|
||||
--driver-memory=${sparkDriverMemory}
|
||||
--conf spark.extraListeners="com.cloudera.spark.lineage.NavigatorAppListener"
|
||||
--conf spark.sql.queryExecutionListeners="com.cloudera.spark.lineage.NavigatorQueryListener"
|
||||
</spark-opts>
|
||||
<arg>-mt</arg> <arg>yarn-cluster</arg>
|
||||
<arg>--sourcePath</arg><arg>${sourcePath}</arg>
|
||||
<arg>--whitelist</arg><arg>${whitelist}</arg>
|
||||
<arg>--allowedtypes</arg><arg>${allowedtypes}</arg>
|
||||
</spark>
|
||||
<ok to="End"/>
|
||||
<error to="Kill"/>
|
||||
</action>
|
||||
|
||||
<end name="End"/>
|
||||
</workflow-app>
|
Loading…
Reference in New Issue