package eu.dnetlib.dhp.oa.graph.dump.complete; import java.io.*; import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.List; import java.util.Objects; import java.util.Optional; import java.util.function.Consumer; import java.util.function.Function; import org.apache.commons.io.IOUtils; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.fasterxml.jackson.databind.ObjectMapper; import eu.dnetlib.dhp.application.ArgumentApplicationParser; import eu.dnetlib.dhp.oa.graph.dump.UtilCommunityAPI; import eu.dnetlib.dhp.oa.graph.dump.Utils; import eu.dnetlib.dhp.oa.graph.dump.exceptions.MyRuntimeException; import eu.dnetlib.dhp.oa.graph.dump.subset.MasterDuplicate; import eu.dnetlib.dhp.oa.model.graph.*; /** * Writes the set of new Relation between the context and datasources. At the moment the relation between the context * and the project is not created because of a low coverage in the profiles of openaire ids related to projects */ public class CreateContextRelation implements Serializable { private static final Logger log = LoggerFactory.getLogger(CreateContextRelation.class); private final transient Configuration conf; private final transient BufferedWriter writer; public static void main(String[] args) throws Exception { String jsonConfiguration = IOUtils .toString( Objects .requireNonNull( CreateContextRelation.class .getResourceAsStream( "/eu/dnetlib/dhp/oa/graph/dump/input_entity_parameter.json"))); final ArgumentApplicationParser parser = new ArgumentApplicationParser(jsonConfiguration); parser.parseArgument(args); Boolean isSparkSessionManaged = Optional .ofNullable(parser.get("isSparkSessionManaged")) .map(Boolean::valueOf) .orElse(Boolean.TRUE); log.info("isSparkSessionManaged: {}", isSparkSessionManaged); final String hdfsPath = parser.get("hdfsPath"); log.info("hdfsPath: {}", hdfsPath); final String hdfsNameNode = parser.get("nameNode"); log.info("hdfsNameNode: {}", hdfsNameNode); final CreateContextRelation cce = new CreateContextRelation(hdfsPath, hdfsNameNode); log.info("Creating relation for datasources and projects..."); cce .execute( Process::getRelation); cce.close(); } private void close() throws IOException { writer.close(); } public CreateContextRelation(String hdfsPath, String hdfsNameNode) throws IOException { this.conf = new Configuration(); this.conf.set("fs.defaultFS", hdfsNameNode); FileSystem fileSystem = FileSystem.get(this.conf); Path hdfsWritePath = new Path(hdfsPath); FSDataOutputStream fsDataOutputStream = null; if (fileSystem.exists(hdfsWritePath)) { fsDataOutputStream = fileSystem.append(hdfsWritePath); } else { fsDataOutputStream = fileSystem.create(hdfsWritePath); } this.writer = new BufferedWriter(new OutputStreamWriter(fsDataOutputStream, StandardCharsets.UTF_8)); } public void execute(final Function> producer) throws IOException { final Consumer consumer = ci -> producer.apply(ci).forEach(this::writeEntity); UtilCommunityAPI queryCommunityAPI = new UtilCommunityAPI(); queryCommunityAPI.getContextRelation().forEach(ci -> consumer.accept(ci)); } protected void writeEntity(final Relation r) { try { writer.write(Utils.OBJECT_MAPPER.writeValueAsString(r)); writer.newLine(); } catch (final Exception e) { throw new MyRuntimeException(e); } } }