forked from D-Net/dnet-hadoop
code formatting
This commit is contained in:
parent
8fd1952f16
commit
5b28bb4131
|
@ -82,8 +82,12 @@ public class MigrateActionSet {
|
|||
List<Path> targetPaths = new ArrayList<>();
|
||||
|
||||
final List<Path> sourcePaths = getSourcePaths(sourceNN, isLookUp);
|
||||
log.info("paths to process:\n{}", sourcePaths
|
||||
.stream().map(p -> p.toString()).collect(Collectors.joining("\n")));
|
||||
log
|
||||
.info(
|
||||
"paths to process:\n{}", sourcePaths
|
||||
.stream()
|
||||
.map(p -> p.toString())
|
||||
.collect(Collectors.joining("\n")));
|
||||
|
||||
for (Path source : sourcePaths) {
|
||||
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
|
||||
package eu.dnetlib.dhp.broker.model;
|
||||
|
||||
import java.util.Map;
|
||||
|
@ -20,9 +21,11 @@ public class Event {
|
|||
|
||||
private Map<String, Object> map;
|
||||
|
||||
public Event() {}
|
||||
public Event() {
|
||||
}
|
||||
|
||||
public Event(final String producerId, final String eventId, final String topic, final String payload, final Long creationDate, final Long expiryDate,
|
||||
public Event(final String producerId, final String eventId, final String topic, final String payload,
|
||||
final Long creationDate, final Long expiryDate,
|
||||
final boolean instantMessage,
|
||||
final Map<String, Object> map) {
|
||||
this.producerId = producerId;
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
|
||||
package eu.dnetlib.dhp.broker.model;
|
||||
|
||||
import java.text.ParseException;
|
||||
|
@ -38,8 +39,8 @@ public class EventFactory {
|
|||
|
||||
final String payload = createPayload(target, updateInfo);
|
||||
|
||||
final String eventId =
|
||||
calculateEventId(updateInfo.getTopic(), target.getOriginalId().get(0), updateInfo.getHighlightValueAsString());
|
||||
final String eventId = calculateEventId(
|
||||
updateInfo.getTopic(), target.getOriginalId().get(0), updateInfo.getHighlightValueAsString());
|
||||
|
||||
res.setEventId(eventId);
|
||||
res.setProducerId(PRODUCER_ID);
|
||||
|
@ -61,7 +62,8 @@ public class EventFactory {
|
|||
return payload.toJSON();
|
||||
}
|
||||
|
||||
private static Map<String, Object> createMapFromResult(final Result oaf, final Result source, final UpdateInfo<?> updateInfo) {
|
||||
private static Map<String, Object> createMapFromResult(final Result oaf, final Result source,
|
||||
final UpdateInfo<?> updateInfo) {
|
||||
final Map<String, Object> map = new HashMap<>();
|
||||
|
||||
final List<KeyValue> collectedFrom = oaf.getCollectedfrom();
|
||||
|
@ -87,12 +89,18 @@ public class EventFactory {
|
|||
|
||||
final List<StructuredProperty> subjects = oaf.getSubject();
|
||||
if (subjects.size() > 0) {
|
||||
map.put("target_publication_subject_list", subjects.stream().map(StructuredProperty::getValue).collect(Collectors.toList()));
|
||||
map
|
||||
.put(
|
||||
"target_publication_subject_list",
|
||||
subjects.stream().map(StructuredProperty::getValue).collect(Collectors.toList()));
|
||||
}
|
||||
|
||||
final List<Author> authors = oaf.getAuthor();
|
||||
if (authors.size() > 0) {
|
||||
map.put("target_publication_author_list", authors.stream().map(Author::getFullname).collect(Collectors.toList()));
|
||||
map
|
||||
.put(
|
||||
"target_publication_author_list",
|
||||
authors.stream().map(Author::getFullname).collect(Collectors.toList()));
|
||||
}
|
||||
|
||||
// PROVENANCE INFO
|
||||
|
@ -119,7 +127,9 @@ public class EventFactory {
|
|||
}
|
||||
|
||||
private static long parseDateTolong(final String date) {
|
||||
if (StringUtils.isBlank(date)) { return -1; }
|
||||
if (StringUtils.isBlank(date)) {
|
||||
return -1;
|
||||
}
|
||||
try {
|
||||
return DateUtils.parseDate(date, DATE_PATTERNS).getTime();
|
||||
} catch (final ParseException e) {
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
|
||||
package eu.dnetlib.dhp.broker.oa;
|
||||
|
||||
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
|
||||
|
@ -40,8 +41,10 @@ public class GenerateEventsApplication {
|
|||
|
||||
public static void main(final String[] args) throws Exception {
|
||||
final ArgumentApplicationParser parser = new ArgumentApplicationParser(
|
||||
IOUtils.toString(GenerateEventsApplication.class
|
||||
.getResourceAsStream("/eu/dnetlib/dhp/oa/graph/merge_claims_parameters.json")));
|
||||
IOUtils
|
||||
.toString(
|
||||
GenerateEventsApplication.class
|
||||
.getResourceAsStream("/eu/dnetlib/dhp/oa/graph/merge_claims_parameters.json")));
|
||||
parser.parseArgument(args);
|
||||
|
||||
final Boolean isSparkSessionManaged = Optional
|
||||
|
@ -78,9 +81,12 @@ public class GenerateEventsApplication {
|
|||
for (final Result source : children) {
|
||||
for (final Result target : children) {
|
||||
if (source != target) {
|
||||
list.addAll(findUpdates(source, target).stream()
|
||||
.map(info -> EventFactory.newBrokerEvent(source, target, info))
|
||||
.collect(Collectors.toList()));
|
||||
list
|
||||
.addAll(
|
||||
findUpdates(source, target)
|
||||
.stream()
|
||||
.map(info -> EventFactory.newBrokerEvent(source, target, info))
|
||||
.collect(Collectors.toList()));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
|
||||
package eu.dnetlib.dhp.broker.oa.util;
|
||||
|
||||
import java.util.Arrays;
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
|
||||
package eu.dnetlib.dhp.broker.oa.util;
|
||||
|
||||
import java.util.Arrays;
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
|
||||
package eu.dnetlib.dhp.broker.oa.util;
|
||||
|
||||
import java.util.Arrays;
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
|
||||
package eu.dnetlib.dhp.broker.oa.util;
|
||||
|
||||
import java.util.Arrays;
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
|
||||
package eu.dnetlib.dhp.broker.oa.util;
|
||||
|
||||
import java.util.Arrays;
|
||||
|
@ -25,7 +26,8 @@ public class EnrichMissingProject extends UpdateInfo<Project> {
|
|||
|
||||
@Override
|
||||
public String getHighlightValueAsString() {
|
||||
return getHighlightValue().getFunder() + "::" + getHighlightValue().getFundingProgram() + getHighlightValue().getCode();
|
||||
return getHighlightValue().getFunder() + "::" + getHighlightValue().getFundingProgram()
|
||||
+ getHighlightValue().getCode();
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
|
||||
package eu.dnetlib.dhp.broker.oa.util;
|
||||
|
||||
import java.util.Arrays;
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
|
||||
package eu.dnetlib.dhp.broker.oa.util;
|
||||
|
||||
import java.util.Arrays;
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
|
||||
package eu.dnetlib.dhp.broker.oa.util;
|
||||
|
||||
import java.util.Arrays;
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
|
||||
package eu.dnetlib.dhp.broker.oa.util;
|
||||
|
||||
import java.util.Arrays;
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
|
||||
package eu.dnetlib.dhp.broker.oa.util;
|
||||
|
||||
import java.util.Arrays;
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
|
||||
package eu.dnetlib.dhp.broker.oa.util;
|
||||
|
||||
import eu.dnetlib.broker.objects.OpenAireEventPayload;
|
||||
|
|
|
@ -138,9 +138,9 @@ public class SparkCreateMergeRels extends AbstractSparkAction {
|
|||
|
||||
private Relation rel(String source, String target, String relClass, DedupConfig dedupConf) {
|
||||
|
||||
String entityType = dedupConf.getWf().getEntityType();
|
||||
String entityType = dedupConf.getWf().getEntityType();
|
||||
|
||||
Relation r = new Relation();
|
||||
Relation r = new Relation();
|
||||
r.setSource(source);
|
||||
r.setTarget(target);
|
||||
r.setRelClass(relClass);
|
||||
|
|
Loading…
Reference in New Issue