forked from D-Net/dnet-hadoop
code formatting
This commit is contained in:
parent
bd187ec6e7
commit
6fd25cf549
|
@ -1,13 +1,7 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.oozie;
|
package eu.dnetlib.dhp.oozie;
|
||||||
|
|
||||||
import com.google.common.io.Resources;
|
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkHiveSession;
|
||||||
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
|
||||||
import org.apache.commons.lang3.time.DurationFormatUtils;
|
|
||||||
import org.apache.commons.text.StringSubstitutor;
|
|
||||||
import org.apache.spark.SparkConf;
|
|
||||||
import org.slf4j.Logger;
|
|
||||||
import org.slf4j.LoggerFactory;
|
|
||||||
|
|
||||||
import java.net.URL;
|
import java.net.URL;
|
||||||
import java.nio.charset.StandardCharsets;
|
import java.nio.charset.StandardCharsets;
|
||||||
|
@ -15,7 +9,15 @@ import java.util.HashMap;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Optional;
|
import java.util.Optional;
|
||||||
|
|
||||||
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkHiveSession;
|
import org.apache.commons.lang3.time.DurationFormatUtils;
|
||||||
|
import org.apache.commons.text.StringSubstitutor;
|
||||||
|
import org.apache.spark.SparkConf;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
import com.google.common.io.Resources;
|
||||||
|
|
||||||
|
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
||||||
|
|
||||||
public class RunSQLSparkJob {
|
public class RunSQLSparkJob {
|
||||||
private static final Logger log = LoggerFactory.getLogger(RunSQLSparkJob.class);
|
private static final Logger log = LoggerFactory.getLogger(RunSQLSparkJob.class);
|
||||||
|
|
|
@ -1,6 +1,16 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.oa.dedup;
|
package eu.dnetlib.dhp.oa.dedup;
|
||||||
|
|
||||||
|
import java.util.*;
|
||||||
|
import java.util.stream.Stream;
|
||||||
|
|
||||||
|
import org.apache.commons.beanutils.BeanUtils;
|
||||||
|
import org.apache.commons.lang3.StringUtils;
|
||||||
|
import org.apache.spark.api.java.function.FlatMapFunction;
|
||||||
|
import org.apache.spark.api.java.function.MapFunction;
|
||||||
|
import org.apache.spark.api.java.function.ReduceFunction;
|
||||||
|
import org.apache.spark.sql.*;
|
||||||
|
|
||||||
import eu.dnetlib.dhp.oa.dedup.model.Identifier;
|
import eu.dnetlib.dhp.oa.dedup.model.Identifier;
|
||||||
import eu.dnetlib.dhp.oa.merge.AuthorMerger;
|
import eu.dnetlib.dhp.oa.merge.AuthorMerger;
|
||||||
import eu.dnetlib.dhp.schema.common.ModelSupport;
|
import eu.dnetlib.dhp.schema.common.ModelSupport;
|
||||||
|
@ -8,19 +18,10 @@ import eu.dnetlib.dhp.schema.oaf.Author;
|
||||||
import eu.dnetlib.dhp.schema.oaf.DataInfo;
|
import eu.dnetlib.dhp.schema.oaf.DataInfo;
|
||||||
import eu.dnetlib.dhp.schema.oaf.OafEntity;
|
import eu.dnetlib.dhp.schema.oaf.OafEntity;
|
||||||
import eu.dnetlib.dhp.schema.oaf.Result;
|
import eu.dnetlib.dhp.schema.oaf.Result;
|
||||||
import org.apache.commons.beanutils.BeanUtils;
|
|
||||||
import org.apache.commons.lang3.StringUtils;
|
|
||||||
import org.apache.spark.api.java.function.FlatMapFunction;
|
|
||||||
import org.apache.spark.api.java.function.MapFunction;
|
|
||||||
import org.apache.spark.api.java.function.ReduceFunction;
|
|
||||||
import org.apache.spark.sql.*;
|
|
||||||
import scala.Tuple2;
|
import scala.Tuple2;
|
||||||
import scala.Tuple3;
|
import scala.Tuple3;
|
||||||
import scala.collection.JavaConversions;
|
import scala.collection.JavaConversions;
|
||||||
|
|
||||||
import java.util.*;
|
|
||||||
import java.util.stream.Stream;
|
|
||||||
|
|
||||||
public class DedupRecordFactory {
|
public class DedupRecordFactory {
|
||||||
public static final class DedupRecordReduceState {
|
public static final class DedupRecordReduceState {
|
||||||
public final String dedupId;
|
public final String dedupId;
|
||||||
|
@ -39,7 +40,8 @@ public class DedupRecordFactory {
|
||||||
} else {
|
} else {
|
||||||
if (Result.class.isAssignableFrom(entity.getClass())) {
|
if (Result.class.isAssignableFrom(entity.getClass())) {
|
||||||
Result result = (Result) entity;
|
Result result = (Result) entity;
|
||||||
if (result.getDateofacceptance() != null && StringUtils.isNotBlank(result.getDateofacceptance().getValue())) {
|
if (result.getDateofacceptance() != null
|
||||||
|
&& StringUtils.isNotBlank(result.getDateofacceptance().getValue())) {
|
||||||
acceptanceDate.add(result.getDateofacceptance().getValue());
|
acceptanceDate.add(result.getDateofacceptance().getValue());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -50,6 +52,7 @@ public class DedupRecordFactory {
|
||||||
return dedupId;
|
return dedupId;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private static final int MAX_ACCEPTANCE_DATE = 20;
|
private static final int MAX_ACCEPTANCE_DATE = 20;
|
||||||
|
|
||||||
private DedupRecordFactory() {
|
private DedupRecordFactory() {
|
||||||
|
@ -90,8 +93,12 @@ public class DedupRecordFactory {
|
||||||
.join(entities, JavaConversions.asScalaBuffer(Collections.singletonList("id")), "left")
|
.join(entities, JavaConversions.asScalaBuffer(Collections.singletonList("id")), "left")
|
||||||
.select("dedupId", "id", "kryoObject")
|
.select("dedupId", "id", "kryoObject")
|
||||||
.as(Encoders.tuple(Encoders.STRING(), Encoders.STRING(), kryoEncoder))
|
.as(Encoders.tuple(Encoders.STRING(), Encoders.STRING(), kryoEncoder))
|
||||||
.map((MapFunction<Tuple3<String, String, OafEntity>, DedupRecordReduceState>) t -> new DedupRecordReduceState(t._1(), t._2(), t._3()), Encoders.kryo(DedupRecordReduceState.class))
|
.map(
|
||||||
.groupByKey((MapFunction<DedupRecordReduceState, String>) DedupRecordReduceState::getDedupId, Encoders.STRING())
|
(MapFunction<Tuple3<String, String, OafEntity>, DedupRecordReduceState>) t -> new DedupRecordReduceState(
|
||||||
|
t._1(), t._2(), t._3()),
|
||||||
|
Encoders.kryo(DedupRecordReduceState.class))
|
||||||
|
.groupByKey(
|
||||||
|
(MapFunction<DedupRecordReduceState, String>) DedupRecordReduceState::getDedupId, Encoders.STRING())
|
||||||
.reduceGroups(
|
.reduceGroups(
|
||||||
(ReduceFunction<DedupRecordReduceState>) (t1, t2) -> {
|
(ReduceFunction<DedupRecordReduceState>) (t1, t2) -> {
|
||||||
if (t1.entity == null) {
|
if (t1.entity == null) {
|
||||||
|
@ -105,10 +112,8 @@ public class DedupRecordFactory {
|
||||||
t1.entity = reduceEntity(t1.entity, t2.entity);
|
t1.entity = reduceEntity(t1.entity, t2.entity);
|
||||||
|
|
||||||
return t1;
|
return t1;
|
||||||
}
|
})
|
||||||
)
|
.flatMap((FlatMapFunction<Tuple2<String, DedupRecordReduceState>, OafEntity>) t -> {
|
||||||
.flatMap
|
|
||||||
((FlatMapFunction<Tuple2<String, DedupRecordReduceState>, OafEntity>) t -> {
|
|
||||||
String dedupId = t._1();
|
String dedupId = t._1();
|
||||||
DedupRecordReduceState agg = t._2();
|
DedupRecordReduceState agg = t._2();
|
||||||
|
|
||||||
|
@ -116,7 +121,8 @@ public class DedupRecordFactory {
|
||||||
return Collections.emptyIterator();
|
return Collections.emptyIterator();
|
||||||
}
|
}
|
||||||
|
|
||||||
return Stream.concat(Stream.of(agg.getDedupId()), agg.aliases.stream())
|
return Stream
|
||||||
|
.concat(Stream.of(agg.getDedupId()), agg.aliases.stream())
|
||||||
.map(id -> {
|
.map(id -> {
|
||||||
try {
|
try {
|
||||||
OafEntity res = (OafEntity) BeanUtils.cloneBean(agg.entity);
|
OafEntity res = (OafEntity) BeanUtils.cloneBean(agg.entity);
|
||||||
|
@ -127,7 +133,8 @@ public class DedupRecordFactory {
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
throw new RuntimeException(e);
|
throw new RuntimeException(e);
|
||||||
}
|
}
|
||||||
}).iterator();
|
})
|
||||||
|
.iterator();
|
||||||
}, beanEncoder);
|
}, beanEncoder);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -137,7 +144,6 @@ public class DedupRecordFactory {
|
||||||
return entity;
|
return entity;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
int compare = new IdentifierComparator<>()
|
int compare = new IdentifierComparator<>()
|
||||||
.compare(Identifier.newInstance(entity), Identifier.newInstance(duplicate));
|
.compare(Identifier.newInstance(entity), Identifier.newInstance(duplicate));
|
||||||
|
|
||||||
|
|
|
@ -242,7 +242,8 @@ public class SparkCreateMergeRels extends AbstractSparkAction {
|
||||||
|
|
||||||
// this was a pivot in a previous graph but it has been merged into a new group with different
|
// this was a pivot in a previous graph but it has been merged into a new group with different
|
||||||
// pivot
|
// pivot
|
||||||
if (!r.isNullAt(r.fieldIndex("lastUsage")) && !pivot.equals(id) && !dedupId.equals(pivotDedupId)) {
|
if (!r.isNullAt(r.fieldIndex("lastUsage")) && !pivot.equals(id)
|
||||||
|
&& !dedupId.equals(pivotDedupId)) {
|
||||||
// materialize the previous dedup record as a merge relation with the new one
|
// materialize the previous dedup record as a merge relation with the new one
|
||||||
res.add(new Tuple3<>(dedupId, pivotDedupId, null));
|
res.add(new Tuple3<>(dedupId, pivotDedupId, null));
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue