forked from D-Net/dnet-hadoop
reads the blacklist from the blacklist db and writes it as a set of relations on hdfs
This commit is contained in:
parent
f7695e833c
commit
b85ad7012a
|
@ -0,0 +1,4 @@
|
||||||
|
package eu.dnetlib.dhp.schema.common;
|
||||||
|
|
||||||
|
public class RelationInverse {
|
||||||
|
}
|
|
@ -0,0 +1,15 @@
|
||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<project xmlns="http://maven.apache.org/POM/4.0.0"
|
||||||
|
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||||
|
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||||
|
<parent>
|
||||||
|
<artifactId>dhp-workflows</artifactId>
|
||||||
|
<groupId>eu.dnetlib.dhp</groupId>
|
||||||
|
<version>1.1.7-SNAPSHOT</version>
|
||||||
|
</parent>
|
||||||
|
<modelVersion>4.0.0</modelVersion>
|
||||||
|
|
||||||
|
<artifactId>dhp-blacklist</artifactId>
|
||||||
|
|
||||||
|
|
||||||
|
</project>
|
|
@ -0,0 +1,138 @@
|
||||||
|
package eu.dnetlib.dhp.blacklist;
|
||||||
|
|
||||||
|
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
||||||
|
import eu.dnetlib.dhp.oa.graph.raw.common.DbClient;
|
||||||
|
import eu.dnetlib.dhp.schema.common.ModelSupport;
|
||||||
|
import eu.dnetlib.dhp.schema.common.RelationInverse;
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.Relation;
|
||||||
|
import org.apache.commons.io.IOUtils;
|
||||||
|
import org.apache.commons.logging.Log;
|
||||||
|
import org.apache.commons.logging.LogFactory;
|
||||||
|
|
||||||
|
import java.io.BufferedWriter;
|
||||||
|
import java.io.Closeable;
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.io.OutputStreamWriter;
|
||||||
|
import java.nio.charset.StandardCharsets;
|
||||||
|
import java.sql.ResultSet;
|
||||||
|
import java.util.Arrays;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.function.Consumer;
|
||||||
|
import java.util.function.Function;
|
||||||
|
import org.apache.hadoop.conf.Configuration;
|
||||||
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
|
import org.apache.hadoop.fs.FSDataOutputStream;
|
||||||
|
import org.apache.hadoop.fs.FileSystem;
|
||||||
|
import org.apache.hadoop.fs.Path;
|
||||||
|
|
||||||
|
|
||||||
|
public class ReadBlacklistFromDB implements Closeable {
|
||||||
|
|
||||||
|
private final DbClient dbClient;
|
||||||
|
private static final Log log = LogFactory.getLog(ReadBlacklistFromDB.class);
|
||||||
|
private final Configuration conf;
|
||||||
|
private final BufferedWriter writer;
|
||||||
|
|
||||||
|
private final static String query = "SELECT source_type, unnest(original_source_objects) as source, " +
|
||||||
|
"target_type, unnest(original_target_objects) as target, " +
|
||||||
|
"relationship FROM blacklist WHERE status = 'ACCEPTED'";
|
||||||
|
|
||||||
|
public static void main(final String[] args) throws Exception {
|
||||||
|
final ArgumentApplicationParser parser =
|
||||||
|
new ArgumentApplicationParser(
|
||||||
|
IOUtils.toString(
|
||||||
|
ReadBlacklistFromDB.class.getResourceAsStream(
|
||||||
|
"/eu/dnetlib/dhp/blacklist/blacklist_parameters.json")));
|
||||||
|
|
||||||
|
parser.parseArgument(args);
|
||||||
|
|
||||||
|
final String dbUrl = parser.get("postgresUrl");
|
||||||
|
final String dbUser = parser.get("postgresUser");
|
||||||
|
final String dbPassword = parser.get("postgresPassword");
|
||||||
|
final String hdfsPath = parser.get("hdfsPath");
|
||||||
|
final String hdfsNameNode = parser.get("hdfsNameNode");
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
try (final ReadBlacklistFromDB rbl =
|
||||||
|
new ReadBlacklistFromDB(hdfsPath, hdfsNameNode, dbUrl, dbUser, dbPassword)) {
|
||||||
|
|
||||||
|
log.info("Processing blacklist...");
|
||||||
|
rbl.execute(query, rbl::processBlacklistEntry);
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
public void execute(final String sql, final Function<ResultSet, List<Relation>> producer)
|
||||||
|
throws Exception {
|
||||||
|
|
||||||
|
final Consumer<ResultSet> consumer = rs -> producer.apply(rs).forEach(r -> writeRelation(r));
|
||||||
|
|
||||||
|
dbClient.processResults(sql, consumer);
|
||||||
|
}
|
||||||
|
|
||||||
|
public List<Relation> processBlacklistEntry(ResultSet rs){
|
||||||
|
try {
|
||||||
|
Relation direct = new Relation();
|
||||||
|
Relation inverse = new Relation();
|
||||||
|
|
||||||
|
String source_prefix = ModelSupport.entityIdPrefix.get(rs.getString("source_type"));
|
||||||
|
String target_prefix = ModelSupport.entityIdPrefix.get(rs.getString("target_type"));
|
||||||
|
|
||||||
|
String source_direct = source_prefix + "|" + rs.getString("source");
|
||||||
|
direct.setSource(source_direct);
|
||||||
|
inverse.setTarget(source_direct);
|
||||||
|
|
||||||
|
String target_direct = target_prefix + "|" + rs.getString("target");
|
||||||
|
direct.setTarget(target_direct);
|
||||||
|
inverse.setSource(target_direct);
|
||||||
|
|
||||||
|
String encoding = rs.getString("relationship");
|
||||||
|
RelationInverse ri = ModelSupport.relationInverseMap.get(encoding);
|
||||||
|
direct.setRelClass(ri.getRelation());
|
||||||
|
inverse.setRelClass(ri.getInverse());
|
||||||
|
direct.setRelType(ri.getRelType());
|
||||||
|
inverse.setRelType(ri.getRelType());
|
||||||
|
direct.setSubRelType(ri.getSubReltype());
|
||||||
|
inverse.setSubRelType(ri.getSubReltype());
|
||||||
|
|
||||||
|
return Arrays.asList(direct, inverse);
|
||||||
|
|
||||||
|
} catch (final Exception e) {
|
||||||
|
throw new RuntimeException(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void close() throws IOException {
|
||||||
|
dbClient.close();
|
||||||
|
writer.close();
|
||||||
|
}
|
||||||
|
|
||||||
|
public ReadBlacklistFromDB(
|
||||||
|
final String hdfsPath, String hdfsNameNode, final String dbUrl, final String dbUser, final String dbPassword)
|
||||||
|
throws Exception {
|
||||||
|
|
||||||
|
this.dbClient = new DbClient(dbUrl, dbUser, dbPassword);
|
||||||
|
this.conf = new Configuration();
|
||||||
|
this.conf.set("fs.defaultFS", hdfsNameNode);
|
||||||
|
FileSystem fileSystem = FileSystem.get(this.conf);
|
||||||
|
Path hdfsWritePath = new Path(hdfsPath);
|
||||||
|
FSDataOutputStream fsDataOutputStream = fileSystem.append(hdfsWritePath);
|
||||||
|
this.writer = new BufferedWriter(new OutputStreamWriter(fsDataOutputStream, StandardCharsets.UTF_8));
|
||||||
|
}
|
||||||
|
|
||||||
|
protected void writeRelation(final Relation r) {
|
||||||
|
try {
|
||||||
|
writer.write(new ObjectMapper().writeValueAsString(r));
|
||||||
|
writer.newLine();
|
||||||
|
} catch (final Exception e) {
|
||||||
|
throw new RuntimeException(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,32 @@
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"paramName": "p",
|
||||||
|
"paramLongName": "hdfsPath",
|
||||||
|
"paramDescription": "the path where storing the sequential file",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "pgurl",
|
||||||
|
"paramLongName": "postgresUrl",
|
||||||
|
"paramDescription": "postgres url, example: jdbc:postgresql://localhost:5432/testdb",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "pguser",
|
||||||
|
"paramLongName": "postgresUser",
|
||||||
|
"paramDescription": "postgres user",
|
||||||
|
"paramRequired": false
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "pgpasswd",
|
||||||
|
"paramLongName": "postgresPassword",
|
||||||
|
"paramDescription": "postgres password",
|
||||||
|
"paramRequired": false
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "a",
|
||||||
|
"paramLongName": "action",
|
||||||
|
"paramDescription": "process claims",
|
||||||
|
"paramRequired": false
|
||||||
|
}
|
||||||
|
]
|
Loading…
Reference in New Issue