Create dedup record for "merged" pivots
Do not create dedup records for group that have more than 20 different acceptance date
This commit is contained in:
parent
10e135db1e
commit
3c66e3bd7b
|
@ -1,130 +1,187 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.oa.dedup;
|
package eu.dnetlib.dhp.oa.dedup;
|
||||||
|
|
||||||
import java.lang.reflect.InvocationTargetException;
|
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.Iterator;
|
|
||||||
import java.util.List;
|
|
||||||
|
|
||||||
import org.apache.spark.api.java.function.MapFunction;
|
|
||||||
import org.apache.spark.api.java.function.ReduceFunction;
|
|
||||||
import org.apache.spark.sql.Dataset;
|
|
||||||
import org.apache.spark.sql.Encoders;
|
|
||||||
import org.apache.spark.sql.Row;
|
|
||||||
import org.apache.spark.sql.SparkSession;
|
|
||||||
|
|
||||||
import com.fasterxml.jackson.databind.DeserializationFeature;
|
|
||||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
|
||||||
|
|
||||||
import eu.dnetlib.dhp.oa.dedup.model.Identifier;
|
import eu.dnetlib.dhp.oa.dedup.model.Identifier;
|
||||||
import eu.dnetlib.dhp.oa.merge.AuthorMerger;
|
import eu.dnetlib.dhp.oa.merge.AuthorMerger;
|
||||||
import eu.dnetlib.dhp.schema.common.ModelSupport;
|
import eu.dnetlib.dhp.schema.common.ModelSupport;
|
||||||
import eu.dnetlib.dhp.schema.oaf.*;
|
import eu.dnetlib.dhp.schema.oaf.Author;
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.DataInfo;
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.OafEntity;
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.Result;
|
||||||
|
import org.apache.commons.beanutils.BeanUtils;
|
||||||
|
import org.apache.commons.lang3.StringUtils;
|
||||||
|
import org.apache.spark.api.java.function.FlatMapFunction;
|
||||||
|
import org.apache.spark.api.java.function.MapFunction;
|
||||||
|
import org.apache.spark.api.java.function.ReduceFunction;
|
||||||
|
import org.apache.spark.sql.*;
|
||||||
import scala.Tuple2;
|
import scala.Tuple2;
|
||||||
|
import scala.Tuple3;
|
||||||
|
import scala.collection.JavaConversions;
|
||||||
|
|
||||||
|
import java.util.*;
|
||||||
|
import java.util.stream.Stream;
|
||||||
|
|
||||||
public class DedupRecordFactory {
|
public class DedupRecordFactory {
|
||||||
|
public static final class DedupRecordReduceState {
|
||||||
|
public final String dedupId;
|
||||||
|
|
||||||
protected static final ObjectMapper OBJECT_MAPPER = new ObjectMapper()
|
public final ArrayList<String> aliases = new ArrayList<>();
|
||||||
.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
|
|
||||||
|
|
||||||
private DedupRecordFactory() {
|
public final HashSet<String> acceptanceDate = new HashSet<>();
|
||||||
}
|
|
||||||
|
|
||||||
public static <T extends OafEntity> Dataset<T> createDedupRecord(
|
public OafEntity entity;
|
||||||
final SparkSession spark,
|
|
||||||
final DataInfo dataInfo,
|
|
||||||
final String mergeRelsInputPath,
|
|
||||||
final String entitiesInputPath,
|
|
||||||
final Class<T> clazz) {
|
|
||||||
|
|
||||||
long ts = System.currentTimeMillis();
|
public DedupRecordReduceState(String dedupId, String id, OafEntity entity) {
|
||||||
|
this.dedupId = dedupId;
|
||||||
|
this.entity = entity;
|
||||||
|
if (entity == null) {
|
||||||
|
aliases.add(id);
|
||||||
|
} else {
|
||||||
|
if (Result.class.isAssignableFrom(entity.getClass())) {
|
||||||
|
Result result = (Result) entity;
|
||||||
|
if (result.getDateofacceptance() != null && StringUtils.isNotBlank(result.getDateofacceptance().getValue())) {
|
||||||
|
acceptanceDate.add(result.getDateofacceptance().getValue());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// <id, json_entity>
|
public String getDedupId() {
|
||||||
Dataset<Row> entities = spark
|
return dedupId;
|
||||||
.read()
|
}
|
||||||
.schema(Encoders.bean(clazz).schema())
|
}
|
||||||
.json(entitiesInputPath)
|
private static final int MAX_ACCEPTANCE_DATE = 20;
|
||||||
.as(Encoders.bean(clazz))
|
|
||||||
.map(
|
|
||||||
(MapFunction<T, Tuple2<String, T>>) entity -> {
|
|
||||||
return new Tuple2<>(entity.getId(), entity);
|
|
||||||
},
|
|
||||||
Encoders.tuple(Encoders.STRING(), Encoders.kryo(clazz)))
|
|
||||||
.selectExpr("_1 AS id", "_2 AS kryoObject");
|
|
||||||
|
|
||||||
// <source, target>: source is the dedup_id, target is the id of the mergedIn
|
private DedupRecordFactory() {
|
||||||
Dataset<Row> mergeRels = spark
|
}
|
||||||
.read()
|
|
||||||
.load(mergeRelsInputPath)
|
|
||||||
.where("relClass == 'merges'")
|
|
||||||
.selectExpr("source as dedupId", "target as id");
|
|
||||||
|
|
||||||
return mergeRels
|
public static Dataset<OafEntity> createDedupRecord(
|
||||||
.join(entities, "id")
|
final SparkSession spark,
|
||||||
.select("dedupId", "kryoObject")
|
final DataInfo dataInfo,
|
||||||
.as(Encoders.tuple(Encoders.STRING(), Encoders.kryo(clazz)))
|
final String mergeRelsInputPath,
|
||||||
.groupByKey((MapFunction<Tuple2<String, T>, String>) Tuple2::_1, Encoders.STRING())
|
final String entitiesInputPath,
|
||||||
.reduceGroups(
|
final Class<OafEntity> clazz) {
|
||||||
(ReduceFunction<Tuple2<String, T>>) (t1, t2) -> new Tuple2<>(t1._1(),
|
|
||||||
reduceEntity(t1._1(), t1._2(), t2._2(), clazz)))
|
|
||||||
.map(
|
|
||||||
(MapFunction<Tuple2<String, Tuple2<String, T>>, T>) t -> {
|
|
||||||
T res = t._2()._2();
|
|
||||||
res.setDataInfo(dataInfo);
|
|
||||||
res.setLastupdatetimestamp(ts);
|
|
||||||
return res;
|
|
||||||
},
|
|
||||||
Encoders.bean(clazz));
|
|
||||||
}
|
|
||||||
|
|
||||||
public static <T extends OafEntity> T reduceEntity(
|
final long ts = System.currentTimeMillis();
|
||||||
String id, T entity, T duplicate, Class<T> clazz) {
|
final Encoder<OafEntity> beanEncoder = Encoders.bean(clazz);
|
||||||
|
final Encoder<OafEntity> kryoEncoder = Encoders.kryo(clazz);
|
||||||
|
|
||||||
int compare = new IdentifierComparator()
|
// <id, json_entity>
|
||||||
.compare(Identifier.newInstance(entity), Identifier.newInstance(duplicate));
|
Dataset<Row> entities = spark
|
||||||
|
.read()
|
||||||
|
.schema(Encoders.bean(clazz).schema())
|
||||||
|
.json(entitiesInputPath)
|
||||||
|
.as(beanEncoder)
|
||||||
|
.map(
|
||||||
|
(MapFunction<OafEntity, Tuple2<String, OafEntity>>) entity -> {
|
||||||
|
return new Tuple2<>(entity.getId(), entity);
|
||||||
|
},
|
||||||
|
Encoders.tuple(Encoders.STRING(), kryoEncoder))
|
||||||
|
.selectExpr("_1 AS id", "_2 AS kryoObject");
|
||||||
|
|
||||||
if (compare > 0) {
|
// <source, target>: source is the dedup_id, target is the id of the mergedIn
|
||||||
T swap = duplicate;
|
Dataset<Row> mergeRels = spark
|
||||||
duplicate = entity;
|
.read()
|
||||||
entity = swap;
|
.load(mergeRelsInputPath)
|
||||||
|
.where("relClass == 'merges'")
|
||||||
|
.selectExpr("source as dedupId", "target as id");
|
||||||
|
|
||||||
|
return mergeRels
|
||||||
|
.join(entities, JavaConversions.asScalaBuffer(Collections.singletonList("id")), "left")
|
||||||
|
.select("dedupId", "id", "kryoObject")
|
||||||
|
.as(Encoders.tuple(Encoders.STRING(), Encoders.STRING(), kryoEncoder))
|
||||||
|
.map((MapFunction<Tuple3<String, String, OafEntity>, DedupRecordReduceState>) t -> new DedupRecordReduceState(t._1(), t._2(), t._3()), Encoders.kryo(DedupRecordReduceState.class))
|
||||||
|
.groupByKey((MapFunction<DedupRecordReduceState, String>) DedupRecordReduceState::getDedupId, Encoders.STRING())
|
||||||
|
.reduceGroups(
|
||||||
|
(ReduceFunction<DedupRecordReduceState>) (t1, t2) -> {
|
||||||
|
if (t1.entity == null) {
|
||||||
|
t2.aliases.addAll(t1.aliases);
|
||||||
|
return t2;
|
||||||
|
}
|
||||||
|
if (t1.acceptanceDate.size() < MAX_ACCEPTANCE_DATE) {
|
||||||
|
t1.acceptanceDate.addAll(t2.acceptanceDate);
|
||||||
|
}
|
||||||
|
t1.aliases.addAll(t2.aliases);
|
||||||
|
t1.entity = reduceEntity(t1.entity, t2.entity);
|
||||||
|
|
||||||
|
return t1;
|
||||||
|
}
|
||||||
|
)
|
||||||
|
.flatMap
|
||||||
|
((FlatMapFunction<Tuple2<String, DedupRecordReduceState>, OafEntity>) t -> {
|
||||||
|
String dedupId = t._1();
|
||||||
|
DedupRecordReduceState agg = t._2();
|
||||||
|
|
||||||
|
if (agg.acceptanceDate.size() >= MAX_ACCEPTANCE_DATE) {
|
||||||
|
return Collections.emptyIterator();
|
||||||
|
}
|
||||||
|
|
||||||
|
return Stream.concat(Stream.of(agg.getDedupId()), agg.aliases.stream())
|
||||||
|
.map(id -> {
|
||||||
|
try {
|
||||||
|
OafEntity res = (OafEntity) BeanUtils.cloneBean(agg.entity);
|
||||||
|
res.setId(id);
|
||||||
|
res.setDataInfo(dataInfo);
|
||||||
|
res.setLastupdatetimestamp(ts);
|
||||||
|
return res;
|
||||||
|
} catch (Exception e) {
|
||||||
|
throw new RuntimeException(e);
|
||||||
|
}
|
||||||
|
}).iterator();
|
||||||
|
}, beanEncoder);
|
||||||
|
}
|
||||||
|
|
||||||
|
private static OafEntity reduceEntity(OafEntity entity, OafEntity duplicate) {
|
||||||
|
|
||||||
|
if (duplicate == null) {
|
||||||
|
return entity;
|
||||||
}
|
}
|
||||||
|
|
||||||
entity.mergeFrom(duplicate);
|
|
||||||
entity.setId(id);
|
|
||||||
|
|
||||||
if (ModelSupport.isSubClass(duplicate, Result.class)) {
|
int compare = new IdentifierComparator<>()
|
||||||
Result re = (Result) entity;
|
.compare(Identifier.newInstance(entity), Identifier.newInstance(duplicate));
|
||||||
Result rd = (Result) duplicate;
|
|
||||||
|
|
||||||
List<List<Author>> authors = new ArrayList<>();
|
if (compare > 0) {
|
||||||
if (re.getAuthor() != null) {
|
OafEntity swap = duplicate;
|
||||||
authors.add(re.getAuthor());
|
duplicate = entity;
|
||||||
}
|
entity = swap;
|
||||||
if (rd.getAuthor() != null) {
|
}
|
||||||
authors.add(rd.getAuthor());
|
|
||||||
}
|
|
||||||
|
|
||||||
re.setAuthor(AuthorMerger.merge(authors));
|
entity.mergeFrom(duplicate);
|
||||||
}
|
|
||||||
|
|
||||||
return entity;
|
if (ModelSupport.isSubClass(duplicate, Result.class)) {
|
||||||
}
|
Result re = (Result) entity;
|
||||||
|
Result rd = (Result) duplicate;
|
||||||
|
|
||||||
public static <T extends OafEntity> T entityMerger(
|
List<List<Author>> authors = new ArrayList<>();
|
||||||
String id, Iterator<Tuple2<String, T>> entities, long ts, DataInfo dataInfo, Class<T> clazz)
|
if (re.getAuthor() != null) {
|
||||||
throws IllegalAccessException, InstantiationException, InvocationTargetException {
|
authors.add(re.getAuthor());
|
||||||
T base = entities.next()._2();
|
}
|
||||||
|
if (rd.getAuthor() != null) {
|
||||||
|
authors.add(rd.getAuthor());
|
||||||
|
}
|
||||||
|
|
||||||
while (entities.hasNext()) {
|
re.setAuthor(AuthorMerger.merge(authors));
|
||||||
T duplicate = entities.next()._2();
|
}
|
||||||
if (duplicate != null)
|
|
||||||
base = reduceEntity(id, base, duplicate, clazz);
|
|
||||||
}
|
|
||||||
|
|
||||||
base.setDataInfo(dataInfo);
|
return entity;
|
||||||
base.setLastupdatetimestamp(ts);
|
}
|
||||||
|
|
||||||
return base;
|
public static <T extends OafEntity> T entityMerger(
|
||||||
}
|
String id, Iterator<Tuple2<String, T>> entities, long ts, DataInfo dataInfo, Class<T> clazz) {
|
||||||
|
T base = entities.next()._2();
|
||||||
|
|
||||||
|
while (entities.hasNext()) {
|
||||||
|
T duplicate = entities.next()._2();
|
||||||
|
if (duplicate != null)
|
||||||
|
base = (T) reduceEntity(base, duplicate);
|
||||||
|
}
|
||||||
|
|
||||||
|
base.setId(id);
|
||||||
|
base.setDataInfo(dataInfo);
|
||||||
|
base.setLastupdatetimestamp(ts);
|
||||||
|
|
||||||
|
return base;
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -611,7 +611,7 @@ public class SparkDedupTest implements Serializable {
|
||||||
assertEquals(91, pubs.count());
|
assertEquals(91, pubs.count());
|
||||||
assertEquals(47, sw_deduprecord);
|
assertEquals(47, sw_deduprecord);
|
||||||
assertEquals(97, ds_deduprecord);
|
assertEquals(97, ds_deduprecord);
|
||||||
assertEquals(93, orp_deduprecord);
|
assertEquals(92, orp_deduprecord);
|
||||||
|
|
||||||
verifyRoot_1(mapper, pubs);
|
verifyRoot_1(mapper, pubs);
|
||||||
|
|
||||||
|
@ -751,7 +751,7 @@ public class SparkDedupTest implements Serializable {
|
||||||
assertEquals(100, datasource);
|
assertEquals(100, datasource);
|
||||||
assertEquals(196, softwares);
|
assertEquals(196, softwares);
|
||||||
assertEquals(389, dataset);
|
assertEquals(389, dataset);
|
||||||
assertEquals(521, otherresearchproduct);
|
assertEquals(520, otherresearchproduct);
|
||||||
|
|
||||||
// System.out.println("publications = " + publications);
|
// System.out.println("publications = " + publications);
|
||||||
// System.out.println("organizations = " + organizations);
|
// System.out.println("organizations = " + organizations);
|
||||||
|
|
Loading…
Reference in New Issue