1
0
Fork 0

use oozie prepare statement to cleanup working directories

This commit is contained in:
Claudio Atzori 2020-03-30 19:48:41 +02:00
parent 3af2b8d700
commit 0fbec69b82
2 changed files with 4 additions and 5 deletions

View File

@ -29,14 +29,9 @@ public class SparkXmlRecordBuilderJob {
final String otherDsTypeId = parser.get("otherDsTypeId");
final FileSystem fs = FileSystem.get(spark.sparkContext().hadoopConfiguration());
if (fs.exists(new Path(outputPath))) {
fs.delete(new Path(outputPath), true);
fs.mkdirs(new Path(outputPath));
}
new GraphJoiner(spark, ContextMapper.fromIS(isLookupUrl), otherDsTypeId, inputPath, outputPath)
.adjacencyLists();
//.asXML();
}
}

View File

@ -58,6 +58,10 @@
<action name="adjancency_lists">
<spark xmlns="uri:oozie:spark-action:0.2">
<prepare>
<delete path="${outputPath}"/>
<mkdir path="${outputPath}"/>
</prepare>
<master>yarn</master>
<mode>cluster</mode>
<name>build_adjacency_lists</name>