1
0
Fork 0

changes in the blacklist and workflow definition

This commit is contained in:
Miriam Baglioni 2020-04-30 10:26:50 +02:00
parent 564e5d6279
commit 354f0162be
2 changed files with 109 additions and 99 deletions

View File

@ -1,13 +1,5 @@
package eu.dnetlib.dhp.blacklist;
import eu.dnetlib.dhp.application.ArgumentApplicationParser; package eu.dnetlib.dhp.blacklist;
import eu.dnetlib.dhp.oa.graph.raw.common.DbClient;
import eu.dnetlib.dhp.schema.common.ModelSupport;
import eu.dnetlib.dhp.schema.common.RelationInverse;
import eu.dnetlib.dhp.schema.oaf.Relation;
import org.apache.commons.io.IOUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import java.io.BufferedWriter; import java.io.BufferedWriter;
import java.io.Closeable; import java.io.Closeable;
@ -19,120 +11,125 @@ import java.util.Arrays;
import java.util.List; import java.util.List;
import java.util.function.Consumer; import java.util.function.Consumer;
import java.util.function.Function; import java.util.function.Function;
import org.apache.commons.io.IOUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import com.fasterxml.jackson.databind.ObjectMapper;
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
import eu.dnetlib.dhp.oa.graph.raw.common.DbClient;
import eu.dnetlib.dhp.schema.common.ModelSupport;
import eu.dnetlib.dhp.schema.common.RelationInverse;
import eu.dnetlib.dhp.schema.oaf.Relation;
public class ReadBlacklistFromDB implements Closeable { public class ReadBlacklistFromDB implements Closeable {
private final DbClient dbClient; private final DbClient dbClient;
private static final Log log = LogFactory.getLog(ReadBlacklistFromDB.class); private static final Log log = LogFactory.getLog(ReadBlacklistFromDB.class);
private final Configuration conf; private final Configuration conf;
private final BufferedWriter writer; private final BufferedWriter writer;
private final static String query = "SELECT source_type, unnest(original_source_objects) as source, " + private final static String query = "SELECT source_type, unnest(original_source_objects) as source, " +
"target_type, unnest(original_target_objects) as target, " + "target_type, unnest(original_target_objects) as target, " +
"relationship FROM blacklist WHERE status = 'ACCEPTED'"; "relationship FROM blacklist WHERE status = 'ACCEPTED'";
public static void main(final String[] args) throws Exception { public static void main(final String[] args) throws Exception {
final ArgumentApplicationParser parser = final ArgumentApplicationParser parser = new ArgumentApplicationParser(
new ArgumentApplicationParser( IOUtils
IOUtils.toString( .toString(
ReadBlacklistFromDB.class.getResourceAsStream( ReadBlacklistFromDB.class
"/eu/dnetlib/dhp/blacklist/blacklist_parameters.json"))); .getResourceAsStream(
"/eu/dnetlib/dhp/blacklist/blacklist_parameters.json")));
parser.parseArgument(args); parser.parseArgument(args);
final String dbUrl = parser.get("postgresUrl"); final String dbUrl = parser.get("postgresUrl");
final String dbUser = parser.get("postgresUser"); final String dbUser = parser.get("postgresUser");
final String dbPassword = parser.get("postgresPassword"); final String dbPassword = parser.get("postgresPassword");
final String hdfsPath = parser.get("hdfsPath"); final String hdfsPath = parser.get("hdfsPath");
final String hdfsNameNode = parser.get("hdfsNameNode"); final String hdfsNameNode = parser.get("hdfsNameNode");
try (final ReadBlacklistFromDB rbl = new ReadBlacklistFromDB(hdfsPath, hdfsNameNode, dbUrl, dbUser,
dbPassword)) {
log.info("Processing blacklist...");
rbl.execute(query, rbl::processBlacklistEntry);
try (final ReadBlacklistFromDB rbl = }
new ReadBlacklistFromDB(hdfsPath, hdfsNameNode, dbUrl, dbUser, dbPassword)) { }
log.info("Processing blacklist..."); public void execute(final String sql, final Function<ResultSet, List<Relation>> producer)
rbl.execute(query, rbl::processBlacklistEntry); throws Exception {
} final Consumer<ResultSet> consumer = rs -> producer.apply(rs).forEach(r -> writeRelation(r));
}
dbClient.processResults(sql, consumer);
}
public void execute(final String sql, final Function<ResultSet, List<Relation>> producer) public List<Relation> processBlacklistEntry(ResultSet rs) {
throws Exception { try {
Relation direct = new Relation();
Relation inverse = new Relation();
final Consumer<ResultSet> consumer = rs -> producer.apply(rs).forEach(r -> writeRelation(r)); String source_prefix = ModelSupport.entityIdPrefix.get(rs.getString("source_type"));
String target_prefix = ModelSupport.entityIdPrefix.get(rs.getString("target_type"));
dbClient.processResults(sql, consumer); String source_direct = source_prefix + "|" + rs.getString("source");
} direct.setSource(source_direct);
inverse.setTarget(source_direct);
public List<Relation> processBlacklistEntry(ResultSet rs){ String target_direct = target_prefix + "|" + rs.getString("target");
try { direct.setTarget(target_direct);
Relation direct = new Relation(); inverse.setSource(target_direct);
Relation inverse = new Relation();
String source_prefix = ModelSupport.entityIdPrefix.get(rs.getString("source_type")); String encoding = rs.getString("relationship");
String target_prefix = ModelSupport.entityIdPrefix.get(rs.getString("target_type")); RelationInverse ri = ModelSupport.relationInverseMap.get(encoding);
direct.setRelClass(ri.getRelation());
inverse.setRelClass(ri.getInverse());
direct.setRelType(ri.getRelType());
inverse.setRelType(ri.getRelType());
direct.setSubRelType(ri.getSubReltype());
inverse.setSubRelType(ri.getSubReltype());
String source_direct = source_prefix + "|" + rs.getString("source"); return Arrays.asList(direct, inverse);
direct.setSource(source_direct);
inverse.setTarget(source_direct);
String target_direct = target_prefix + "|" + rs.getString("target"); } catch (final Exception e) {
direct.setTarget(target_direct); throw new RuntimeException(e);
inverse.setSource(target_direct); }
}
String encoding = rs.getString("relationship"); @Override
RelationInverse ri = ModelSupport.relationInverseMap.get(encoding); public void close() throws IOException {
direct.setRelClass(ri.getRelation()); dbClient.close();
inverse.setRelClass(ri.getInverse()); writer.close();
direct.setRelType(ri.getRelType()); }
inverse.setRelType(ri.getRelType());
direct.setSubRelType(ri.getSubReltype());
inverse.setSubRelType(ri.getSubReltype());
return Arrays.asList(direct, inverse);
} catch (final Exception e) {
throw new RuntimeException(e);
}
}
@Override
public void close() throws IOException {
dbClient.close();
writer.close();
}
public ReadBlacklistFromDB(
final String hdfsPath, String hdfsNameNode, final String dbUrl, final String dbUser, final String dbPassword)
throws Exception {
this.dbClient = new DbClient(dbUrl, dbUser, dbPassword);
this.conf = new Configuration();
this.conf.set("fs.defaultFS", hdfsNameNode);
FileSystem fileSystem = FileSystem.get(this.conf);
Path hdfsWritePath = new Path(hdfsPath);
FSDataOutputStream fsDataOutputStream = fileSystem.append(hdfsWritePath);
this.writer = new BufferedWriter(new OutputStreamWriter(fsDataOutputStream, StandardCharsets.UTF_8));
}
protected void writeRelation(final Relation r) {
try {
writer.write(new ObjectMapper().writeValueAsString(r));
writer.newLine();
} catch (final Exception e) {
throw new RuntimeException(e);
}
}
public ReadBlacklistFromDB(
final String hdfsPath, String hdfsNameNode, final String dbUrl, final String dbUser, final String dbPassword)
throws Exception {
this.dbClient = new DbClient(dbUrl, dbUser, dbPassword);
this.conf = new Configuration();
this.conf.set("fs.defaultFS", hdfsNameNode);
FileSystem fileSystem = FileSystem.get(this.conf);
Path hdfsWritePath = new Path(hdfsPath);
FSDataOutputStream fsDataOutputStream = fileSystem.append(hdfsWritePath);
this.writer = new BufferedWriter(new OutputStreamWriter(fsDataOutputStream, StandardCharsets.UTF_8));
}
protected void writeRelation(final Relation r) {
try {
writer.write(new ObjectMapper().writeValueAsString(r));
writer.newLine();
} catch (final Exception e) {
throw new RuntimeException(e);
}
}
} }

View File

@ -1,5 +1,18 @@
<workflow-app name="blacklsting" xmlns="uri:oozie:workflow:0.5"> <workflow-app name="blacklisting" xmlns="uri:oozie:workflow:0.5">
<parameters>
<property>
<name>postgresURL</name>
<description>the url of the postgress server to query</description>
</property>
<property>
<name>postgresUser</name>
<description>the username to access the postgres db</description>
</property>
<property>
<name>postgresPassword</name>
<description>the postgres password</description>
</property>
</parameters>
<start to="reset-outputpath"/> <start to="reset-outputpath"/>
<kill name="Kill"> <kill name="Kill">
@ -8,7 +21,7 @@
<action name="reset-outputpath"> <action name="reset-outputpath">
<fs> <fs>
<delete path='${hdfsPath}'/> <delete path='${workingDir}/blacklist'/>
</fs> </fs>
<ok to="read_blacklist"/> <ok to="read_blacklist"/>
<error to="Kill"/> <error to="Kill"/>
@ -21,7 +34,7 @@
<main-class>eu.dnetlib.dhp.blacklist.ReadBlacklistFromDB</main-class> <main-class>eu.dnetlib.dhp.blacklist.ReadBlacklistFromDB</main-class>
<arg>--hdfsPath</arg><arg>${workingDir}/blacklist</arg> <arg>--hdfsPath</arg><arg>${workingDir}/blacklist</arg>
<arg>--hdfsNameNode</arg><arg>${nameNode}</arg> <arg>--hdfsNameNode</arg><arg>${nameNode}</arg>
<arg>--postgresUrl</arg><arg>${postgresUrl}</arg> <arg>--postgresUrl</arg><arg>${postgresURL}</arg>
<arg>--postgresUser</arg><arg>${postgresUser}</arg> <arg>--postgresUser</arg><arg>${postgresUser}</arg>
<arg>--postgresPassword</arg><arg>${postgresPassword}</arg> <arg>--postgresPassword</arg><arg>${postgresPassword}</arg>
</java> </java>