OpenAIRE graph dump #51
|
@ -4,7 +4,6 @@ package eu.dnetlib.dhp.oa.graph.dump;
|
||||||
import java.io.*;
|
import java.io.*;
|
||||||
import java.util.Optional;
|
import java.util.Optional;
|
||||||
|
|
||||||
import eu.dnetlib.dhp.common.MakeTarArchive;
|
|
||||||
import org.apache.commons.compress.archivers.ar.ArArchiveEntry;
|
import org.apache.commons.compress.archivers.ar.ArArchiveEntry;
|
||||||
import org.apache.commons.compress.archivers.ar.ArArchiveOutputStream;
|
import org.apache.commons.compress.archivers.ar.ArArchiveOutputStream;
|
||||||
import org.apache.commons.compress.archivers.tar.TarArchiveEntry;
|
import org.apache.commons.compress.archivers.tar.TarArchiveEntry;
|
||||||
|
@ -16,6 +15,7 @@ import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
||||||
|
import eu.dnetlib.dhp.common.MakeTarArchive;
|
||||||
import eu.dnetlib.dhp.oa.graph.dump.community.CommunityMap;
|
import eu.dnetlib.dhp.oa.graph.dump.community.CommunityMap;
|
||||||
|
|
||||||
public class MakeTar implements Serializable {
|
public class MakeTar implements Serializable {
|
||||||
|
@ -41,9 +41,10 @@ public class MakeTar implements Serializable {
|
||||||
final String inputPath = parser.get("sourcePath");
|
final String inputPath = parser.get("sourcePath");
|
||||||
log.info("input path : {}", inputPath);
|
log.info("input path : {}", inputPath);
|
||||||
|
|
||||||
final int gBperSplit = Optional.ofNullable(parser.get("splitSize"))
|
final int gBperSplit = Optional
|
||||||
.map(Integer::valueOf)
|
.ofNullable(parser.get("splitSize"))
|
||||||
.orElse(10);
|
.map(Integer::valueOf)
|
||||||
|
.orElse(10);
|
||||||
|
|
||||||
Configuration conf = new Configuration();
|
Configuration conf = new Configuration();
|
||||||
conf.set("fs.defaultFS", hdfsNameNode);
|
conf.set("fs.defaultFS", hdfsNameNode);
|
||||||
|
@ -54,7 +55,8 @@ public class MakeTar implements Serializable {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public static void makeTArArchive(FileSystem fileSystem, String inputPath, String outputPath, int gBperSplit) throws IOException {
|
public static void makeTArArchive(FileSystem fileSystem, String inputPath, String outputPath, int gBperSplit)
|
||||||
|
throws IOException {
|
||||||
|
|
||||||
RemoteIterator<LocatedFileStatus> dir_iterator = fileSystem.listLocatedStatus(new Path(inputPath));
|
RemoteIterator<LocatedFileStatus> dir_iterator = fileSystem.listLocatedStatus(new Path(inputPath));
|
||||||
|
|
||||||
|
@ -70,5 +72,4 @@ public class MakeTar implements Serializable {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -98,8 +98,10 @@ public class SendToZenodoHDFS implements Serializable {
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
if (!metadata.equals("")) {
|
||||||
|
zenodoApiClient.sendMretadata(metadata);
|
||||||
|
}
|
||||||
|
|
||||||
zenodoApiClient.sendMretadata(metadata);
|
|
||||||
if (publish)
|
if (publish)
|
||||||
zenodoApiClient.publish();
|
zenodoApiClient.publish();
|
||||||
|
|
||||||
|
|
|
@ -5,7 +5,6 @@ import java.io.BufferedReader;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.io.InputStreamReader;
|
import java.io.InputStreamReader;
|
||||||
|
|
||||||
|
|
||||||
import org.apache.hadoop.fs.FileSystem;
|
import org.apache.hadoop.fs.FileSystem;
|
||||||
import org.apache.hadoop.fs.Path;
|
import org.apache.hadoop.fs.Path;
|
||||||
import org.apache.spark.api.java.function.MapFunction;
|
import org.apache.spark.api.java.function.MapFunction;
|
||||||
|
@ -13,14 +12,12 @@ import org.apache.spark.sql.Dataset;
|
||||||
import org.apache.spark.sql.Encoders;
|
import org.apache.spark.sql.Encoders;
|
||||||
import org.apache.spark.sql.SparkSession;
|
import org.apache.spark.sql.SparkSession;
|
||||||
|
|
||||||
|
|
||||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
import com.google.gson.Gson;
|
import com.google.gson.Gson;
|
||||||
|
|
||||||
import eu.dnetlib.dhp.common.HdfsSupport;
|
import eu.dnetlib.dhp.common.HdfsSupport;
|
||||||
import eu.dnetlib.dhp.oa.graph.dump.community.CommunityMap;
|
import eu.dnetlib.dhp.oa.graph.dump.community.CommunityMap;
|
||||||
import eu.dnetlib.dhp.oa.graph.dump.graph.Constants;
|
import eu.dnetlib.dhp.oa.graph.dump.graph.Constants;
|
||||||
|
|
||||||
import eu.dnetlib.dhp.utils.DHPUtils;
|
import eu.dnetlib.dhp.utils.DHPUtils;
|
||||||
import eu.dnetlib.dhp.utils.ISLookupClientFactory;
|
import eu.dnetlib.dhp.utils.ISLookupClientFactory;
|
||||||
import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpService;
|
import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpService;
|
||||||
|
@ -74,5 +71,4 @@ public class Utils {
|
||||||
return new Gson().fromJson(sb.toString(), CommunityMap.class);
|
return new Gson().fromJson(sb.toString(), CommunityMap.class);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -418,7 +418,7 @@
|
||||||
<arg>--metadata</arg><arg>${metadata}</arg>
|
<arg>--metadata</arg><arg>${metadata}</arg>
|
||||||
<arg>--communityMapPath</arg><arg>${workingDir}/communityMap</arg>
|
<arg>--communityMapPath</arg><arg>${workingDir}/communityMap</arg>
|
||||||
<arg>--conceptRecordId</arg><arg>${conceptRecordId}</arg>
|
<arg>--conceptRecordId</arg><arg>${conceptRecordId}</arg>
|
||||||
<arg>--newDeposition</arg><arg>${newDeposition}</arg>
|
<arg>--depositionType</arg><arg>${depositionType}</arg>
|
||||||
</java>
|
</java>
|
||||||
<ok to="End"/>
|
<ok to="End"/>
|
||||||
<error to="Kill"/>
|
<error to="Kill"/>
|
||||||
|
|
Loading…
Reference in New Issue