New sources formatted by maven plugin

This commit is contained in:
Giambattista Bloisi 2023-07-06 10:28:53 +02:00
parent bd3fcf869a
commit 801da2fd4a
105 changed files with 5610 additions and 5267 deletions

View File

@ -1,9 +1,5 @@
package eu.dnetlib.pace.clustering;
import eu.dnetlib.pace.common.AbstractPaceFunctions;
import eu.dnetlib.pace.config.Config;
import eu.dnetlib.pace.model.Field;
import org.apache.commons.lang3.StringUtils;
package eu.dnetlib.pace.clustering;
import java.util.Collection;
import java.util.HashSet;
@ -11,6 +7,12 @@ import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import org.apache.commons.lang3.StringUtils;
import eu.dnetlib.pace.common.AbstractPaceFunctions;
import eu.dnetlib.pace.config.Config;
import eu.dnetlib.pace.model.Field;
public abstract class AbstractClusteringFunction extends AbstractPaceFunctions implements ClusteringFunction {
protected Map<String, Integer> params;
@ -23,7 +25,9 @@ public abstract class AbstractClusteringFunction extends AbstractPaceFunctions i
@Override
public Collection<String> apply(Config conf, List<Field> fields) {
return fields.stream().filter(f -> !f.isEmpty())
return fields
.stream()
.filter(f -> !f.isEmpty())
.map(Field::stringValue)
.map(this::normalize)
.map(s -> filterAllStopWords(s))

View File

@ -1,3 +1,4 @@
package eu.dnetlib.pace.clustering;
import java.util.Collection;
@ -6,6 +7,7 @@ import java.util.Set;
import java.util.StringTokenizer;
import com.google.common.collect.Sets;
import eu.dnetlib.pace.config.Config;
@ClusteringClass("acronyms")

View File

@ -1,11 +1,5 @@
package eu.dnetlib.pace.clustering;
import com.google.common.collect.Maps;
import eu.dnetlib.pace.config.Config;
import eu.dnetlib.pace.model.Document;
import eu.dnetlib.pace.model.Field;
import eu.dnetlib.pace.model.FieldListImpl;
import eu.dnetlib.pace.model.MapDocument;
package eu.dnetlib.pace.clustering;
import java.util.Collection;
import java.util.List;
@ -13,6 +7,14 @@ import java.util.Map;
import java.util.Map.Entry;
import java.util.regex.Pattern;
import com.google.common.collect.Maps;
import eu.dnetlib.pace.config.Config;
import eu.dnetlib.pace.model.Document;
import eu.dnetlib.pace.model.Field;
import eu.dnetlib.pace.model.FieldListImpl;
import eu.dnetlib.pace.model.MapDocument;
public class BlacklistAwareClusteringCombiner extends ClusteringCombiner {
public static Collection<String> filterAndCombine(final MapDocument a, final Config conf) {
@ -56,4 +58,3 @@ public class BlacklistAwareClusteringCombiner extends ClusteringCombiner {
}
}

View File

@ -1,3 +1,4 @@
package eu.dnetlib.pace.clustering;
import java.lang.annotation.ElementType;

View File

@ -1,3 +1,4 @@
package eu.dnetlib.pace.clustering;
import java.util.ArrayList;
@ -5,6 +6,8 @@ import java.util.Collection;
import java.util.List;
import java.util.stream.Collectors;
import org.apache.commons.lang3.StringUtils;
import com.google.common.collect.Sets;
import eu.dnetlib.pace.config.Config;
@ -12,7 +15,6 @@ import eu.dnetlib.pace.model.ClusteringDef;
import eu.dnetlib.pace.model.Document;
import eu.dnetlib.pace.model.Field;
import eu.dnetlib.pace.model.FieldValueImpl;
import org.apache.commons.lang3.StringUtils;
public class ClusteringCombiner {
@ -30,18 +32,18 @@ public class ClusteringCombiner {
if (values instanceof FieldValueImpl) {
fields.add(values);
}
else {
} else {
fields.addAll((List<Field>) values);
}
res.addAll(
cd.clusteringFunction()
res
.addAll(
cd
.clusteringFunction()
.apply(conf, fields)
.stream()
.map(k -> prefix + SEPARATOR + k)
.collect(Collectors.toList())
);
.collect(Collectors.toList()));
}
}
return res;
@ -49,7 +51,9 @@ public class ClusteringCombiner {
private static String getPrefix(ClusteringDef cd, String fieldName) {
return cd.getName() + SEPARATOR +
cd.getParams().keySet()
cd
.getParams()
.keySet()
.stream()
.filter(k -> k.contains(COLLAPSE_ON))
.findFirst()

View File

@ -1,3 +1,4 @@
package eu.dnetlib.pace.clustering;
import java.util.Collection;

View File

@ -1,3 +1,4 @@
package eu.dnetlib.pace.clustering;
import java.util.Collection;
@ -5,6 +6,7 @@ import java.util.List;
import java.util.Map;
import com.google.common.collect.Lists;
import eu.dnetlib.pace.config.Config;
@ClusteringClass("immutablefieldvalue")

View File

@ -1,12 +1,14 @@
package eu.dnetlib.pace.clustering;
import java.util.*;
import java.util.stream.Collectors;
import org.apache.commons.lang3.StringUtils;
import eu.dnetlib.pace.common.AbstractPaceFunctions;
import eu.dnetlib.pace.config.Config;
import eu.dnetlib.pace.model.Field;
import org.apache.commons.lang3.StringUtils;
import java.util.*;
import java.util.stream.Collectors;
@ClusteringClass("keywordsclustering")
public class KeywordsClustering extends AbstractClusteringFunction {
@ -39,7 +41,9 @@ public class KeywordsClustering extends AbstractClusteringFunction {
@Override
public Collection<String> apply(final Config conf, List<Field> fields) {
return fields.stream().filter(f -> !f.isEmpty())
return fields
.stream()
.filter(f -> !f.isEmpty())
.map(Field::stringValue)
.map(this::cleanup)
.map(this::normalize)

View File

@ -1,14 +1,17 @@
package eu.dnetlib.pace.clustering;
import com.google.common.collect.Lists;
import eu.dnetlib.pace.config.Config;
import eu.dnetlib.pace.model.Field;
import eu.dnetlib.pace.model.Person;
import org.apache.commons.lang3.StringUtils;
package eu.dnetlib.pace.clustering;
import java.util.*;
import java.util.stream.Collectors;
import org.apache.commons.lang3.StringUtils;
import com.google.common.collect.Lists;
import eu.dnetlib.pace.config.Config;
import eu.dnetlib.pace.model.Field;
import eu.dnetlib.pace.model.Person;
@ClusteringClass("lnfi")
public class LastNameFirstInitial extends AbstractClusteringFunction {
@ -20,7 +23,9 @@ public class LastNameFirstInitial extends AbstractClusteringFunction{
@Override
public Collection<String> apply(Config conf, List<Field> fields) {
return fields.stream().filter(f -> !f.isEmpty())
return fields
.stream()
.filter(f -> !f.isEmpty())
.map(Field::stringValue)
.map(this::normalize)
.map(s -> doApply(conf, s))
@ -33,7 +38,8 @@ public class LastNameFirstInitial extends AbstractClusteringFunction{
@Override
protected String normalize(final String s) {
return fixAliases(transliterate(nfd(unicodeNormalization(s))))
// do not compact the regexes in a single expression, would cause StackOverflowError in case of large input strings
// do not compact the regexes in a single expression, would cause StackOverflowError in case of large input
// strings
.replaceAll("[^ \\w]+", "")
.replaceAll("(\\p{InCombiningDiacriticalMarks})+", "")
.replaceAll("(\\p{Punct})+", " ")
@ -47,7 +53,8 @@ public class LastNameFirstInitial extends AbstractClusteringFunction{
final List<String> res = Lists.newArrayList();
final boolean aggressive = (Boolean) (getParams().containsKey("aggressive") ? getParams().get("aggressive") : DEFAULT_AGGRESSIVE);
final boolean aggressive = (Boolean) (getParams().containsKey("aggressive") ? getParams().get("aggressive")
: DEFAULT_AGGRESSIVE);
Person p = new Person(s, aggressive);
@ -56,17 +63,14 @@ public class LastNameFirstInitial extends AbstractClusteringFunction{
String firstInitial = p.getNormalisedFirstName().toLowerCase().substring(0, 1);
res.add(firstInitial.concat(lastName));
}
else { // is not accurate, meaning it has no defined name and surname
} else { // is not accurate, meaning it has no defined name and surname
List<String> fullname = Arrays.asList(p.getNormalisedFullname().split(" "));
if (fullname.size() == 1) {
res.add(p.getNormalisedFullname().toLowerCase());
}
else if (fullname.size() == 2) {
} else if (fullname.size() == 2) {
res.add(fullname.get(0).substring(0, 1).concat(fullname.get(1)).toLowerCase());
res.add(fullname.get(1).substring(0, 1).concat(fullname.get(0)).toLowerCase());
}
else {
} else {
res.add(fullname.get(0).substring(0, 1).concat(fullname.get(fullname.size() - 1)).toLowerCase());
res.add(fullname.get(fullname.size() - 1).substring(0, 1).concat(fullname.get(0)).toLowerCase());
}

View File

@ -1,14 +1,17 @@
package eu.dnetlib.pace.clustering;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import org.apache.commons.lang3.StringUtils;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import eu.dnetlib.pace.config.Config;
import eu.dnetlib.pace.model.Field;
import org.apache.commons.lang3.StringUtils;
@ClusteringClass("lowercase")
public class LowercaseClustering extends AbstractClusteringFunction {

View File

@ -1,3 +1,4 @@
package eu.dnetlib.pace.clustering;
import java.util.Set;
@ -10,11 +11,14 @@ public class NGramUtils extends AbstractPaceFunctions {
private static final int SIZE = 100;
private static Set<String> stopwords = AbstractPaceFunctions.loadFromClasspath("/eu/dnetlib/pace/config/stopwords_en.txt");
private static Set<String> stopwords = AbstractPaceFunctions
.loadFromClasspath("/eu/dnetlib/pace/config/stopwords_en.txt");
public static String cleanupForOrdering(String s) {
NGramUtils utils = new NGramUtils();
return (utils.filterStopWords(utils.normalize(s), stopwords) + StringUtils.repeat(" ", SIZE)).substring(0, SIZE).replaceAll(" ", "");
return (utils.filterStopWords(utils.normalize(s), stopwords) + StringUtils.repeat(" ", SIZE))
.substring(0, SIZE)
.replaceAll(" ", "");
}
}

View File

@ -1,3 +1,4 @@
package eu.dnetlib.pace.clustering;
import java.util.Collection;
@ -6,6 +7,7 @@ import java.util.List;
import java.util.Map;
import com.google.common.collect.Lists;
import eu.dnetlib.pace.config.Config;
@ClusteringClass("ngrampairs")

View File

@ -1,9 +1,10 @@
package eu.dnetlib.pace.clustering;
import eu.dnetlib.pace.config.Config;
import java.util.*;
import eu.dnetlib.pace.config.Config;
@ClusteringClass("ngrams")
public class Ngrams extends AbstractClusteringFunction {

View File

@ -1,17 +1,20 @@
package eu.dnetlib.pace.clustering;
import com.google.common.collect.Sets;
import eu.dnetlib.pace.common.AbstractPaceFunctions;
import eu.dnetlib.pace.config.Config;
import eu.dnetlib.pace.model.Field;
import eu.dnetlib.pace.model.Person;
import org.apache.commons.lang3.StringUtils;
package eu.dnetlib.pace.clustering;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.commons.lang3.StringUtils;
import com.google.common.collect.Sets;
import eu.dnetlib.pace.common.AbstractPaceFunctions;
import eu.dnetlib.pace.config.Config;
import eu.dnetlib.pace.model.Field;
import eu.dnetlib.pace.model.Person;
@ClusteringClass("personClustering")
public class PersonClustering extends AbstractPaceFunctions implements ClusteringFunction {
@ -31,7 +34,8 @@ public class PersonClustering extends AbstractPaceFunctions implements Clusterin
final Person person = new Person(f.stringValue(), false);
if (StringUtils.isNotBlank(person.getNormalisedFirstName()) && StringUtils.isNotBlank(person.getNormalisedSurname())) {
if (StringUtils.isNotBlank(person.getNormalisedFirstName())
&& StringUtils.isNotBlank(person.getNormalisedSurname())) {
hashes.add(firstLC(person.getNormalisedFirstName()) + person.getNormalisedSurname().toLowerCase());
} else {
for (final String token1 : tokens(f.stringValue(), MAX_TOKENS)) {

View File

@ -1,3 +1,4 @@
package eu.dnetlib.pace.clustering;
import java.util.Collection;
@ -22,7 +23,8 @@ public class PersonHash extends AbstractClusteringFunction {
protected Collection<String> doApply(final Config conf, final String s) {
final List<String> res = Lists.newArrayList();
final boolean aggressive = (Boolean) (getParams().containsKey("aggressive") ? getParams().get("aggressive") : DEFAULT_AGGRESSIVE);
final boolean aggressive = (Boolean) (getParams().containsKey("aggressive") ? getParams().get("aggressive")
: DEFAULT_AGGRESSIVE);
res.add(new Person(s, aggressive).hash());

View File

@ -1,10 +1,11 @@
package eu.dnetlib.pace.clustering;
import eu.dnetlib.pace.config.Config;
package eu.dnetlib.pace.clustering;
import java.util.Collection;
import java.util.Map;
import eu.dnetlib.pace.config.Config;
public class RandomClusteringFunction extends AbstractClusteringFunction {
public RandomClusteringFunction(Map<String, Integer> params) {

View File

@ -1,3 +1,4 @@
package eu.dnetlib.pace.clustering;
import java.util.*;
@ -5,6 +6,7 @@ import java.util.*;
import com.google.common.base.Joiner;
import com.google.common.base.Splitter;
import com.google.common.collect.Lists;
import eu.dnetlib.pace.config.Config;
@ClusteringClass("sortedngrampairs")
@ -21,7 +23,9 @@ public class SortedNgramPairs extends NgramPairs {
Collections.sort(tokens);
return ngramPairs(Lists.newArrayList(getNgrams(Joiner.on(" ").join(tokens), param("ngramLen"), param("max") * 2, 1, 2)), param("max"));
return ngramPairs(
Lists.newArrayList(getNgrams(Joiner.on(" ").join(tokens), param("ngramLen"), param("max") * 2, 1, 2)),
param("max"));
}
}

View File

@ -1,15 +1,17 @@
package eu.dnetlib.pace.clustering;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import eu.dnetlib.pace.config.Config;
import org.apache.commons.lang3.RandomStringUtils;
import org.apache.commons.lang3.StringUtils;
import com.google.common.collect.Lists;
import eu.dnetlib.pace.config.Config;
@ClusteringClass("spacetrimmingfieldvalue")
public class SpaceTrimmingFieldValue extends AbstractClusteringFunction {
@ -21,7 +23,10 @@ public class SpaceTrimmingFieldValue extends AbstractClusteringFunction {
protected Collection<String> doApply(final Config conf, final String s) {
final List<String> res = Lists.newArrayList();
res.add(StringUtils.isBlank(s) ? RandomStringUtils.random(getParams().get("randomLength")) : s.toLowerCase().replaceAll("\\s+", ""));
res
.add(
StringUtils.isBlank(s) ? RandomStringUtils.random(getParams().get("randomLength"))
: s.toLowerCase().replaceAll("\\s+", ""));
return res;
}

View File

@ -1,3 +1,4 @@
package eu.dnetlib.pace.clustering;
import java.util.Collection;
@ -5,6 +6,7 @@ import java.util.Map;
import java.util.Set;
import com.google.common.collect.Sets;
import eu.dnetlib.pace.config.Config;
@ClusteringClass("suffixprefix")

View File

@ -1,8 +1,5 @@
package eu.dnetlib.pace.clustering;
import eu.dnetlib.pace.common.AbstractPaceFunctions;
import eu.dnetlib.pace.config.Config;
import eu.dnetlib.pace.model.Field;
package eu.dnetlib.pace.clustering;
import java.net.MalformedURLException;
import java.net.URL;
@ -12,6 +9,10 @@ import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import eu.dnetlib.pace.common.AbstractPaceFunctions;
import eu.dnetlib.pace.config.Config;
import eu.dnetlib.pace.model.Field;
@ClusteringClass("urlclustering")
public class UrlClustering extends AbstractPaceFunctions implements ClusteringFunction {
@ -24,14 +25,14 @@ public class UrlClustering extends AbstractPaceFunctions implements ClusteringFu
@Override
public Collection<String> apply(final Config conf, List<Field> fields) {
try {
return fields.stream()
return fields
.stream()
.filter(f -> !f.isEmpty())
.map(Field::stringValue)
.map(this::asUrl)
.map(URL::getHost)
.collect(Collectors.toCollection(HashSet::new));
}
catch (IllegalStateException e){
} catch (IllegalStateException e) {
return new HashSet<>();
}
}
@ -50,5 +51,4 @@ public class UrlClustering extends AbstractPaceFunctions implements ClusteringFu
}
}
}

View File

@ -1,11 +1,13 @@
package eu.dnetlib.pace.clustering;
import com.google.common.collect.Sets;
import eu.dnetlib.pace.config.Config;
package eu.dnetlib.pace.clustering;
import java.util.*;
import java.util.stream.Collectors;
import com.google.common.collect.Sets;
import eu.dnetlib.pace.config.Config;
@ClusteringClass("wordsStatsSuffixPrefixChain")
public class WordsStatsSuffixPrefixChain extends AbstractClusteringFunction {
@ -21,8 +23,8 @@ public class WordsStatsSuffixPrefixChain extends AbstractClusteringFunction {
private Collection<String> suffixPrefixChain(String s, int mod) {
// create the list of words from the string (remove short words)
List<String> wordsList =
Arrays.stream(s.split(" "))
List<String> wordsList = Arrays
.stream(s.split(" "))
.filter(si -> si.length() > 3)
.collect(Collectors.toList());
@ -44,33 +46,33 @@ public class WordsStatsSuffixPrefixChain extends AbstractClusteringFunction {
case 1:
break;
case 2:
set.add(
set
.add(
prefix +
suffix(wordsList.get(0), 3) +
prefix(wordsList.get(1), 3)
);
prefix(wordsList.get(1), 3));
set.add(
set
.add(
prefix +
prefix(wordsList.get(0), 3) +
suffix(wordsList.get(1), 3)
);
suffix(wordsList.get(1), 3));
break;
default:
set.add(
set
.add(
prefix +
suffix(wordsList.get(0), 3) +
prefix(wordsList.get(1), 3) +
suffix(wordsList.get(2), 3)
);
suffix(wordsList.get(2), 3));
set.add(
set
.add(
prefix +
prefix(wordsList.get(0), 3) +
suffix(wordsList.get(1), 3) +
prefix(wordsList.get(2), 3)
);
prefix(wordsList.get(2), 3));
break;
}
@ -78,7 +80,6 @@ public class WordsStatsSuffixPrefixChain extends AbstractClusteringFunction {
}
private String suffix(String s, int len) {
return s.substring(s.length() - len);
}

View File

@ -1,3 +1,4 @@
package eu.dnetlib.pace.clustering;
import java.util.Collection;
@ -5,6 +6,7 @@ import java.util.Map;
import java.util.Set;
import com.google.common.collect.Sets;
import eu.dnetlib.pace.config.Config;
@ClusteringClass("wordssuffixprefix")

View File

@ -1,16 +1,5 @@
package eu.dnetlib.pace.common;
import com.google.common.base.Joiner;
import com.google.common.base.Splitter;
import com.google.common.collect.Iterables;
import com.google.common.collect.Sets;
import com.ibm.icu.text.Transliterator;
import eu.dnetlib.pace.clustering.NGramUtils;
import eu.dnetlib.pace.model.Field;
import eu.dnetlib.pace.model.FieldList;
import eu.dnetlib.pace.model.FieldListImpl;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang3.StringUtils;
package eu.dnetlib.pace.common;
import java.io.IOException;
import java.io.StringWriter;
@ -21,6 +10,20 @@ import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang3.StringUtils;
import com.google.common.base.Joiner;
import com.google.common.base.Splitter;
import com.google.common.collect.Iterables;
import com.google.common.collect.Sets;
import com.ibm.icu.text.Transliterator;
import eu.dnetlib.pace.clustering.NGramUtils;
import eu.dnetlib.pace.model.Field;
import eu.dnetlib.pace.model.FieldList;
import eu.dnetlib.pace.model.FieldListImpl;
/**
* Set of common functions for the framework
*
@ -29,7 +32,8 @@ import java.util.stream.Collectors;
public abstract class AbstractPaceFunctions {
// city map to be used when translating the city names into codes
private static Map<String, String> cityMap = AbstractPaceFunctions.loadMapFromClasspath("/eu/dnetlib/pace/config/city_map.csv");
private static Map<String, String> cityMap = AbstractPaceFunctions
.loadMapFromClasspath("/eu/dnetlib/pace/config/city_map.csv");
// list of stopwords in different languages
protected static Set<String> stopwords_gr = loadFromClasspath("/eu/dnetlib/pace/config/stopwords_gr.txt");
@ -85,7 +89,8 @@ public abstract class AbstractPaceFunctions {
protected String fixXML(final String a) {
return a.replaceAll("&ndash;", " ")
return a
.replaceAll("&ndash;", " ")
.replaceAll("&amp;", " ")
.replaceAll("&quot;", " ")
.replaceAll("&minus;", " ");
@ -108,7 +113,9 @@ public abstract class AbstractPaceFunctions {
}
protected boolean isRoman(final String s) {
return s.replaceAll("^M{0,4}(CM|CD|D?C{0,3})(XC|XL|L?X{0,3})(IX|IV|V?I{0,3})$", "qwertyuiop").equals("qwertyuiop");
return s
.replaceAll("^M{0,4}(CM|CD|D?C{0,3})(XC|XL|L?X{0,3})(IX|IV|V?I{0,3})$", "qwertyuiop")
.equals("qwertyuiop");
}
protected String getNumbers(final String s) {
@ -140,8 +147,7 @@ public abstract class AbstractPaceFunctions {
protected static String transliterate(final String s) {
try {
return transliterator.transliterate(s);
}
catch(Exception e) {
} catch (Exception e) {
return s;
}
}
@ -157,7 +163,8 @@ public abstract class AbstractPaceFunctions {
}
protected String getFirstValue(final Field values) {
return (values != null) && !Iterables.isEmpty(values) ? Iterables.getFirst(values, EMPTY_FIELD).stringValue() : "";
return (values != null) && !Iterables.isEmpty(values) ? Iterables.getFirst(values, EMPTY_FIELD).stringValue()
: "";
}
protected boolean notNull(final String s) {
@ -167,7 +174,8 @@ public abstract class AbstractPaceFunctions {
protected String normalize(final String s) {
return fixAliases(transliterate(nfd(unicodeNormalization(s))))
.toLowerCase()
// do not compact the regexes in a single expression, would cause StackOverflowError in case of large input strings
// do not compact the regexes in a single expression, would cause StackOverflowError in case of large input
// strings
.replaceAll("[^ \\w]+", "")
.replaceAll("(\\p{InCombiningDiacriticalMarks})+", "")
.replaceAll("(\\p{Punct})+", " ")
@ -239,7 +247,8 @@ public abstract class AbstractPaceFunctions {
final Set<String> h = Sets.newHashSet();
try {
for (final String s : IOUtils.readLines(NGramUtils.class.getResourceAsStream(classpath), StandardCharsets.UTF_8)) {
for (final String s : IOUtils
.readLines(NGramUtils.class.getResourceAsStream(classpath), StandardCharsets.UTF_8)) {
h.add(fixAliases(transliterator.transliterate(s))); // transliteration of the stopwords
}
} catch (final Throwable e) {
@ -254,7 +263,8 @@ public abstract class AbstractPaceFunctions {
final Map<String, String> m = new HashMap<>();
try {
for (final String s : IOUtils.readLines(AbstractPaceFunctions.class.getResourceAsStream(classpath), StandardCharsets.UTF_8)) {
for (final String s : IOUtils
.readLines(AbstractPaceFunctions.class.getResourceAsStream(classpath), StandardCharsets.UTF_8)) {
// string is like this: code;word1;word2;word3
String[] line = s.split(";");
String value = line[0];

View File

@ -1,3 +1,4 @@
package eu.dnetlib.pace.config;
import java.util.List;
@ -50,7 +51,6 @@ public interface Config {
*/
public Map<String, List<Pattern>> blacklists();
/**
* Translation map.
*

View File

@ -1,16 +1,5 @@
package eu.dnetlib.pace.config;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.collect.Maps;
import eu.dnetlib.pace.model.ClusteringDef;
import eu.dnetlib.pace.model.FieldDef;
import eu.dnetlib.pace.util.PaceException;
import org.antlr.stringtemplate.StringTemplate;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
package eu.dnetlib.pace.config;
import java.io.IOException;
import java.io.Serializable;
@ -22,9 +11,20 @@ import java.util.Map.Entry;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
import org.antlr.stringtemplate.StringTemplate;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.collect.Maps;
import eu.dnetlib.pace.model.ClusteringDef;
import eu.dnetlib.pace.model.FieldDef;
import eu.dnetlib.pace.tree.support.TreeNodeDef;
import eu.dnetlib.pace.util.PaceException;
public class DedupConfig implements Config, Serializable {
@ -56,7 +56,8 @@ public class DedupConfig implements Config, Serializable {
defaults.put("idPath", "$.id");
}
public DedupConfig() {}
public DedupConfig() {
}
public static DedupConfig load(final String json) {
@ -66,10 +67,21 @@ public class DedupConfig implements Config, Serializable {
config.getPace().initModel();
config.getPace().initTranslationMap();
config.blacklists = config.getPace().getBlacklists().entrySet()
config.blacklists = config
.getPace()
.getBlacklists()
.entrySet()
.stream()
.collect(Collectors.toMap(e -> e.getKey(),
e ->e.getValue().stream().filter(s -> !StringUtils.isBlank(s)).map(Pattern::compile).collect(Collectors.toList()) ));
.collect(
Collectors
.toMap(
e -> e.getKey(),
e -> e
.getValue()
.stream()
.filter(s -> !StringUtils.isBlank(s))
.map(Pattern::compile)
.collect(Collectors.toList())));
return config;
} catch (IOException e) {

View File

@ -1,19 +1,20 @@
package eu.dnetlib.pace.config;
import java.io.Serializable;
import java.util.List;
import java.util.Map;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.google.common.collect.Maps;
import com.ibm.icu.text.Transliterator;
import eu.dnetlib.pace.common.AbstractPaceFunctions;
import eu.dnetlib.pace.model.ClusteringDef;
import eu.dnetlib.pace.model.FieldDef;
import eu.dnetlib.pace.tree.support.TreeNodeDef;
import eu.dnetlib.pace.util.PaceResolver;
import java.io.Serializable;
import java.util.List;
import java.util.Map;
public class PaceConfig extends AbstractPaceFunctions implements Serializable {
private List<FieldDef> model;
@ -33,7 +34,8 @@ public class PaceConfig extends AbstractPaceFunctions implements Serializable {
@JsonIgnore
public static PaceResolver resolver = new PaceResolver();
public PaceConfig() {}
public PaceConfig() {
}
public void initModel() {
modelMap = Maps.newHashMap();
@ -48,7 +50,8 @@ public class PaceConfig extends AbstractPaceFunctions implements Serializable {
Transliterator transliterator = Transliterator.getInstance("Any-Eng");
for (String key : synonyms.keySet()) {
for (String term : synonyms.get(key)) {
translationMap.put(
translationMap
.put(
fixAliases(transliterator.transliterate(term.toLowerCase())),
key);
}

View File

@ -1,3 +1,4 @@
package eu.dnetlib.pace.config;
public enum Type {

View File

@ -1,10 +1,5 @@
package eu.dnetlib.pace.config;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import eu.dnetlib.pace.util.PaceException;
import org.apache.commons.lang3.StringUtils;
package eu.dnetlib.pace.config;
import java.io.IOException;
import java.io.Serializable;
@ -12,6 +7,13 @@ import java.util.HashSet;
import java.util.List;
import java.util.Set;
import org.apache.commons.lang3.StringUtils;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import eu.dnetlib.pace.util.PaceException;
public class WfConfig implements Serializable {
@ -76,7 +78,6 @@ public class WfConfig implements Serializable {
/** Maximum number of allowed children. */
private int maxChildren = MAX_CHILDREN;
/** Default maximum number of iterations. */
private final static int MAX_ITERATIONS = 20;
@ -86,7 +87,8 @@ public class WfConfig implements Serializable {
/** The Jquery path to retrieve the identifier */
private String idPath = "$.id";
public WfConfig() {}
public WfConfig() {
}
/**
* Instantiates a new dedup config.
@ -114,8 +116,10 @@ public class WfConfig implements Serializable {
* @param idPath
* the path for the id of the entity
*/
public WfConfig(final String entityType, final String orderField, final List<String> rootBuilder, final String dedupRun,
final Set<String> skipList, final int queueMaxSize, final int groupMaxSize, final int slidingWindowSize, final boolean includeChildren, final int maxIterations, final String idPath) {
public WfConfig(final String entityType, final String orderField, final List<String> rootBuilder,
final String dedupRun,
final Set<String> skipList, final int queueMaxSize, final int groupMaxSize, final int slidingWindowSize,
final boolean includeChildren, final int maxIterations, final String idPath) {
super();
this.entityType = entityType;
this.orderField = orderField;
@ -257,7 +261,6 @@ public class WfConfig implements Serializable {
this.maxChildren = maxChildren;
}
public int getMaxIterations() {
return maxIterations;
}
@ -277,7 +280,6 @@ public class WfConfig implements Serializable {
/*
* (non-Javadoc)
*
* @see java.lang.Object#toString()
*/
@Override

View File

@ -1,3 +1,4 @@
package eu.dnetlib.pace.model;
import eu.dnetlib.pace.config.Type;
@ -16,7 +17,8 @@ public abstract class AbstractField implements Field {
/**
* Instantiates a new abstract field.
*/
protected AbstractField() {}
protected AbstractField() {
}
/**
* Instantiates a new abstract field.
@ -33,7 +35,6 @@ public abstract class AbstractField implements Field {
/*
* (non-Javadoc)
*
* @see eu.dnetlib.pace.model.Field#getName()
*/
@Override
@ -43,7 +44,6 @@ public abstract class AbstractField implements Field {
/*
* (non-Javadoc)
*
* @see eu.dnetlib.pace.model.Field#getType()
*/
@Override
@ -53,7 +53,6 @@ public abstract class AbstractField implements Field {
/*
* (non-Javadoc)
*
* @see eu.dnetlib.pace.model.Field#setName(java.lang.String)
*/
@Override
@ -63,7 +62,6 @@ public abstract class AbstractField implements Field {
/*
* (non-Javadoc)
*
* @see eu.dnetlib.pace.model.Field#setType(eu.dnetlib.pace.config.Type)
*/
@Override

View File

@ -1,15 +1,16 @@
package eu.dnetlib.pace.model;
import com.fasterxml.jackson.databind.ObjectMapper;
import eu.dnetlib.pace.clustering.ClusteringFunction;
import eu.dnetlib.pace.config.PaceConfig;
import eu.dnetlib.pace.util.PaceException;
package eu.dnetlib.pace.model;
import java.io.IOException;
import java.io.Serializable;
import java.util.List;
import java.util.Map;
import com.fasterxml.jackson.databind.ObjectMapper;
import eu.dnetlib.pace.clustering.ClusteringFunction;
import eu.dnetlib.pace.config.PaceConfig;
import eu.dnetlib.pace.util.PaceException;
public class ClusteringDef implements Serializable {
@ -19,7 +20,8 @@ public class ClusteringDef implements Serializable {
private Map<String, Integer> params;
public ClusteringDef() {}
public ClusteringDef() {
}
public String getName() {
return name;

View File

@ -1,3 +1,4 @@
package eu.dnetlib.pace.model;
import java.util.Set;

View File

@ -1,9 +1,10 @@
package eu.dnetlib.pace.model;
import eu.dnetlib.pace.config.Type;
import java.io.Serializable;
import eu.dnetlib.pace.config.Type;
/**
* The Interface Field.
*/

View File

@ -1,13 +1,15 @@
package eu.dnetlib.pace.model;
import java.io.Serializable;
import java.util.List;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.base.Splitter;
import com.google.common.collect.Lists;
import eu.dnetlib.pace.config.Type;
import java.io.Serializable;
import java.util.List;
import eu.dnetlib.pace.config.Type;
/**
* The schema is composed by field definitions (FieldDef). Each field has a type, a name, and an associated compare algorithm.
@ -34,7 +36,8 @@ public class FieldDef implements Serializable {
*/
private int length = -1;
public FieldDef() {}
public FieldDef() {
}
// def apply(s: String): Field[A]
public Field apply(final Type type, final String s) {

View File

@ -1,3 +1,4 @@
package eu.dnetlib.pace.model;
import java.util.List;

View File

@ -1,17 +1,19 @@
package eu.dnetlib.pace.model;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import java.util.ListIterator;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.base.Function;
import com.google.common.base.Joiner;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import eu.dnetlib.pace.config.Type;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import java.util.ListIterator;
import eu.dnetlib.pace.config.Type;
/**
* The Class FieldListImpl.
@ -41,7 +43,6 @@ public class FieldListImpl extends AbstractField implements FieldList {
/*
* (non-Javadoc)
*
* @see java.util.List#add(java.lang.Object)
*/
@Override
@ -51,7 +52,6 @@ public class FieldListImpl extends AbstractField implements FieldList {
/*
* (non-Javadoc)
*
* @see java.util.List#add(int, java.lang.Object)
*/
@Override
@ -61,7 +61,6 @@ public class FieldListImpl extends AbstractField implements FieldList {
/*
* (non-Javadoc)
*
* @see java.util.List#addAll(java.util.Collection)
*/
@Override
@ -71,7 +70,6 @@ public class FieldListImpl extends AbstractField implements FieldList {
/*
* (non-Javadoc)
*
* @see java.util.List#addAll(int, java.util.Collection)
*/
@Override
@ -81,7 +79,6 @@ public class FieldListImpl extends AbstractField implements FieldList {
/*
* (non-Javadoc)
*
* @see java.util.List#clear()
*/
@Override
@ -91,7 +88,6 @@ public class FieldListImpl extends AbstractField implements FieldList {
/*
* (non-Javadoc)
*
* @see java.util.List#contains(java.lang.Object)
*/
@Override
@ -101,7 +97,6 @@ public class FieldListImpl extends AbstractField implements FieldList {
/*
* (non-Javadoc)
*
* @see java.util.List#containsAll(java.util.Collection)
*/
@Override
@ -111,7 +106,6 @@ public class FieldListImpl extends AbstractField implements FieldList {
/*
* (non-Javadoc)
*
* @see java.util.List#get(int)
*/
@Override
@ -121,7 +115,6 @@ public class FieldListImpl extends AbstractField implements FieldList {
/*
* (non-Javadoc)
*
* @see java.util.List#indexOf(java.lang.Object)
*/
@Override
@ -131,7 +124,6 @@ public class FieldListImpl extends AbstractField implements FieldList {
/*
* (non-Javadoc)
*
* @see eu.dnetlib.pace.model.Field#isEmpty()
*/
@Override
@ -141,7 +133,6 @@ public class FieldListImpl extends AbstractField implements FieldList {
/*
* (non-Javadoc)
*
* @see java.lang.Iterable#iterator()
*/
@Override
@ -151,7 +142,6 @@ public class FieldListImpl extends AbstractField implements FieldList {
/*
* (non-Javadoc)
*
* @see java.util.List#lastIndexOf(java.lang.Object)
*/
@Override
@ -161,7 +151,6 @@ public class FieldListImpl extends AbstractField implements FieldList {
/*
* (non-Javadoc)
*
* @see java.util.List#listIterator()
*/
@Override
@ -171,7 +160,6 @@ public class FieldListImpl extends AbstractField implements FieldList {
/*
* (non-Javadoc)
*
* @see java.util.List#listIterator(int)
*/
@Override
@ -181,7 +169,6 @@ public class FieldListImpl extends AbstractField implements FieldList {
/*
* (non-Javadoc)
*
* @see java.util.List#remove(java.lang.Object)
*/
@Override
@ -191,7 +178,6 @@ public class FieldListImpl extends AbstractField implements FieldList {
/*
* (non-Javadoc)
*
* @see java.util.List#remove(int)
*/
@Override
@ -201,7 +187,6 @@ public class FieldListImpl extends AbstractField implements FieldList {
/*
* (non-Javadoc)
*
* @see java.util.List#removeAll(java.util.Collection)
*/
@Override
@ -211,7 +196,6 @@ public class FieldListImpl extends AbstractField implements FieldList {
/*
* (non-Javadoc)
*
* @see java.util.List#retainAll(java.util.Collection)
*/
@Override
@ -221,7 +205,6 @@ public class FieldListImpl extends AbstractField implements FieldList {
/*
* (non-Javadoc)
*
* @see java.util.List#set(int, java.lang.Object)
*/
@Override
@ -231,7 +214,6 @@ public class FieldListImpl extends AbstractField implements FieldList {
/*
* (non-Javadoc)
*
* @see java.util.List#size()
*/
@Override
@ -241,7 +223,6 @@ public class FieldListImpl extends AbstractField implements FieldList {
/*
* (non-Javadoc)
*
* @see java.util.List#subList(int, int)
*/
@Override
@ -251,7 +232,6 @@ public class FieldListImpl extends AbstractField implements FieldList {
/*
* (non-Javadoc)
*
* @see java.util.List#toArray()
*/
@Override
@ -261,7 +241,6 @@ public class FieldListImpl extends AbstractField implements FieldList {
/*
* (non-Javadoc)
*
* @see java.util.List#toArray(java.lang.Object[])
*/
@Override
@ -271,7 +250,6 @@ public class FieldListImpl extends AbstractField implements FieldList {
/*
* (non-Javadoc)
*
* @see eu.dnetlib.pace.model.Field#stringValue()
*/
@Override
@ -297,7 +275,6 @@ public class FieldListImpl extends AbstractField implements FieldList {
/*
* (non-Javadoc)
*
* @see eu.dnetlib.pace.model.FieldList#stringList()
*/
@Override

View File

@ -1,3 +1,4 @@
package eu.dnetlib.pace.model;
/**

View File

@ -1,3 +1,4 @@
package eu.dnetlib.pace.model;
import java.net.MalformedURLException;
@ -6,9 +7,10 @@ import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import eu.dnetlib.pace.config.Type;
import org.apache.commons.lang3.StringUtils;
import eu.dnetlib.pace.config.Type;
/**
* The Class FieldValueImpl.
*/
@ -20,7 +22,8 @@ public class FieldValueImpl extends AbstractField implements FieldValue {
/**
* Instantiates a new field value impl.
*/
public FieldValueImpl() {}
public FieldValueImpl() {
}
/**
* Instantiates a new field value impl.
@ -39,12 +42,12 @@ public class FieldValueImpl extends AbstractField implements FieldValue {
/*
* (non-Javadoc)
*
* @see eu.dnetlib.pace.model.Field#isEmpty()
*/
@Override
public boolean isEmpty() {
if (value == null) return false;
if (value == null)
return false;
switch (type) {
case String:
@ -78,7 +81,6 @@ public class FieldValueImpl extends AbstractField implements FieldValue {
/*
* (non-Javadoc)
*
* @see eu.dnetlib.pace.model.FieldValue#getValue()
*/
@Override
@ -88,7 +90,6 @@ public class FieldValueImpl extends AbstractField implements FieldValue {
/*
* (non-Javadoc)
*
* @see eu.dnetlib.pace.model.FieldValue#setValue(java.lang.Object)
*/
@Override
@ -98,7 +99,6 @@ public class FieldValueImpl extends AbstractField implements FieldValue {
/*
* (non-Javadoc)
*
* @see eu.dnetlib.pace.model.Field#stringValue()
*/
@Override
@ -124,7 +124,6 @@ public class FieldValueImpl extends AbstractField implements FieldValue {
/*
* (non-Javadoc)
*
* @see java.lang.Iterable#iterator()
*/
@Override

View File

@ -1,3 +1,4 @@
package eu.dnetlib.pace.model;
import java.io.Serializable;
@ -57,7 +58,6 @@ public class MapDocument implements Document, Serializable {
/*
* (non-Javadoc)
*
* @see eu.dnetlib.pace.model.document.Document#fields()
*/
@Override
@ -67,7 +67,6 @@ public class MapDocument implements Document, Serializable {
/*
* (non-Javadoc)
*
* @see eu.dnetlib.pace.model.document.Document#values(java.lang.String)
*/
@Override
@ -77,7 +76,6 @@ public class MapDocument implements Document, Serializable {
/*
* (non-Javadoc)
*
* @see eu.dnetlib.pace.model.document.Document#fieldNames()
*/
@Override
@ -87,7 +85,6 @@ public class MapDocument implements Document, Serializable {
/*
* (non-Javadoc)
*
* @see java.lang.Object#toString()
*/
@Override
@ -107,7 +104,6 @@ public class MapDocument implements Document, Serializable {
/*
* (non-Javadoc)
*
* @see eu.dnetlib.pace.model.document.Document#getIdentifier()
*/
@Override

View File

@ -1,3 +1,4 @@
package eu.dnetlib.pace.model;
import java.util.Comparator;
@ -28,18 +29,19 @@ public class MapDocumentComparator implements Comparator<Document> {
/*
* (non-Javadoc)
*
* @see java.util.Comparator#compare(java.lang.Object, java.lang.Object)
*/
@Override
public int compare(final Document d1, final Document d2) {
if (d1.values(comparatorField).isEmpty() || d2.values(comparatorField).isEmpty()) return 0;
if (d1.values(comparatorField).isEmpty() || d2.values(comparatorField).isEmpty())
return 0;
final String o1 = Iterables.getFirst(d1.values(comparatorField), emptyField).stringValue();
final String o2 = Iterables.getFirst(d2.values(comparatorField), emptyField).stringValue();
if ((o1 == null) || (o2 == null)) return 0;
if ((o1 == null) || (o2 == null))
return 0;
final String to1 = NGramUtils.cleanupForOrdering(o1);
final String to2 = NGramUtils.cleanupForOrdering(o2);

View File

@ -1,3 +1,4 @@
package eu.dnetlib.pace.model;
import java.lang.reflect.Type;
@ -33,7 +34,8 @@ public class MapDocumentSerializer implements InstanceCreator<MapDocument> {
gson.registerTypeAdapter(Field.class, new JsonDeserializer<Field>() {
@Override
public Field deserialize(final JsonElement json, final Type typeOfT, final JsonDeserializationContext context) throws JsonParseException {
public Field deserialize(final JsonElement json, final Type typeOfT,
final JsonDeserializationContext context) throws JsonParseException {
final FieldListImpl fl = new FieldListImpl();
if (json.isJsonObject()) {

View File

@ -1,3 +1,4 @@
package eu.dnetlib.pace.model;
import java.nio.charset.Charset;

View File

@ -1,3 +1,4 @@
package eu.dnetlib.pace.model;
import java.util.ArrayList;

View File

@ -1,13 +1,15 @@
package eu.dnetlib.pace.tree;
import java.util.Map;
import com.wcohen.ss.AbstractStringDistance;
import eu.dnetlib.pace.config.Config;
import eu.dnetlib.pace.model.Field;
import eu.dnetlib.pace.tree.support.AbstractComparator;
import eu.dnetlib.pace.tree.support.ComparatorClass;
import java.util.Map;
@ComparatorClass("alwaysMatch")
public class AlwaysMatch extends AbstractComparator {
@ -39,4 +41,3 @@ public class AlwaysMatch extends AbstractComparator {
}
}

View File

@ -1,13 +1,5 @@
package eu.dnetlib.pace.tree;
import com.google.common.collect.Iterables;
import eu.dnetlib.pace.config.Config;
import eu.dnetlib.pace.model.Field;
import eu.dnetlib.pace.model.FieldList;
import eu.dnetlib.pace.model.Person;
import eu.dnetlib.pace.tree.support.AbstractComparator;
import eu.dnetlib.pace.tree.support.ComparatorClass;
import com.wcohen.ss.AbstractStringDistance;
package eu.dnetlib.pace.tree;
import java.util.Comparator;
import java.util.List;
@ -16,6 +8,16 @@ import java.util.function.Function;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import com.google.common.collect.Iterables;
import com.wcohen.ss.AbstractStringDistance;
import eu.dnetlib.pace.config.Config;
import eu.dnetlib.pace.model.Field;
import eu.dnetlib.pace.model.FieldList;
import eu.dnetlib.pace.model.Person;
import eu.dnetlib.pace.tree.support.AbstractComparator;
import eu.dnetlib.pace.tree.support.ComparatorClass;
@ComparatorClass("authorsMatch")
public class AuthorsMatch extends AbstractComparator {
@ -55,8 +57,16 @@ public class AuthorsMatch extends AbstractComparator {
if (((FieldList) a).size() > SIZE_THRESHOLD || ((FieldList) b).size() > SIZE_THRESHOLD)
return 1.0;
List<Person> aList = ((FieldList) a).stringList().stream().map(author -> new Person(author, false)).collect(Collectors.toList());
List<Person> bList = ((FieldList) b).stringList().stream().map(author -> new Person(author, false)).collect(Collectors.toList());
List<Person> aList = ((FieldList) a)
.stringList()
.stream()
.map(author -> new Person(author, false))
.collect(Collectors.toList());
List<Person> bList = ((FieldList) b)
.stringList()
.stream()
.map(author -> new Person(author, false))
.collect(Collectors.toList());
common = 0;
// compare each element of List1 with each element of List2
@ -67,8 +77,10 @@ public class AuthorsMatch extends AbstractComparator {
// both persons are inaccurate
if (!p1.isAccurate() && !p2.isAccurate()) {
// compare just normalized fullnames
String fullname1 = normalization(p1.getNormalisedFullname().isEmpty()? p1.getOriginal() : p1.getNormalisedFullname());
String fullname2 = normalization(p2.getNormalisedFullname().isEmpty()? p2.getOriginal() : p2.getNormalisedFullname());
String fullname1 = normalization(
p1.getNormalisedFullname().isEmpty() ? p1.getOriginal() : p1.getNormalisedFullname());
String fullname2 = normalization(
p2.getNormalisedFullname().isEmpty() ? p2.getOriginal() : p2.getNormalisedFullname());
if (ssalgo.score(fullname1, fullname2) > FULLNAME_THRESHOLD) {
common += 1;
@ -80,13 +92,16 @@ public class AuthorsMatch extends AbstractComparator {
if (p1.isAccurate() ^ p2.isAccurate()) {
// prepare data
// data for the accurate person
String name = normalization(p1.isAccurate()? p1.getNormalisedFirstName() : p2.getNormalisedFirstName());
String surname = normalization(p1.isAccurate()? p1.getNormalisedSurname() : p2.getNormalisedSurname());
String name = normalization(
p1.isAccurate() ? p1.getNormalisedFirstName() : p2.getNormalisedFirstName());
String surname = normalization(
p1.isAccurate() ? p1.getNormalisedSurname() : p2.getNormalisedSurname());
// data for the inaccurate person
String fullname = normalization(
p1.isAccurate() ? ((p2.getNormalisedFullname().isEmpty()) ? p2.getOriginal() : p2.getNormalisedFullname()) : (p1.getNormalisedFullname().isEmpty() ? p1.getOriginal() : p1.getNormalisedFullname())
);
p1.isAccurate()
? ((p2.getNormalisedFullname().isEmpty()) ? p2.getOriginal() : p2.getNormalisedFullname())
: (p1.getNormalisedFullname().isEmpty() ? p1.getOriginal() : p1.getNormalisedFullname()));
if (fullname.contains(surname)) {
if (MODE.equals("full")) {
@ -94,8 +109,7 @@ public class AuthorsMatch extends AbstractComparator {
common += 1;
break;
}
}
else { //MODE equals "surname"
} else { // MODE equals "surname"
common += 1;
break;
}
@ -111,8 +125,7 @@ public class AuthorsMatch extends AbstractComparator {
common += 1;
break;
}
}
else { //MODE equals "surname"
} else { // MODE equals "surname"
common += 1;
break;
}
@ -127,14 +140,15 @@ public class AuthorsMatch extends AbstractComparator {
if (TYPE.equals("percentage")) {
return (double) common / normFactor;
}
else {
} else {
return (double) common;
}
}
public boolean compareSurname(Person p1, Person p2) {
return ssalgo.score(normalization(p1.getNormalisedSurname()), normalization(p2.getNormalisedSurname())) > SURNAME_THRESHOLD;
return ssalgo
.score(
normalization(p1.getNormalisedSurname()), normalization(p2.getNormalisedSurname())) > SURNAME_THRESHOLD;
}
public boolean compareFirstname(Person p1, Person p2) {
@ -144,7 +158,10 @@ public class AuthorsMatch extends AbstractComparator {
return true;
}
return ssalgo.score(normalization(p1.getNormalisedFirstName()), normalization(p2.getNormalisedFirstName())) > NAME_THRESHOLD;
return ssalgo
.score(
normalization(p1.getNormalisedFirstName()),
normalization(p2.getNormalisedFirstName())) > NAME_THRESHOLD;
}
public String normalization(String s) {

View File

@ -1,12 +1,13 @@
package eu.dnetlib.pace.tree;
import java.util.Map;
import java.util.Set;
import eu.dnetlib.pace.config.Config;
import eu.dnetlib.pace.tree.support.AbstractComparator;
import eu.dnetlib.pace.tree.support.ComparatorClass;
import java.util.Map;
import java.util.Set;
@ComparatorClass("cityMatch")
public class CityMatch extends AbstractComparator {

View File

@ -1,5 +1,11 @@
package eu.dnetlib.pace.tree;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import eu.dnetlib.pace.config.Config;
import eu.dnetlib.pace.model.Field;
import eu.dnetlib.pace.model.FieldList;
@ -8,11 +14,6 @@ import eu.dnetlib.pace.model.Person;
import eu.dnetlib.pace.tree.support.AbstractComparator;
import eu.dnetlib.pace.tree.support.ComparatorClass;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
@ComparatorClass("cosineSimilarity")
public class CosineSimilarity extends AbstractComparator {
@ -49,5 +50,4 @@ public class CosineSimilarity extends AbstractComparator {
return dotProduct / eucledianDist;
}
}

View File

@ -1,3 +1,4 @@
package eu.dnetlib.pace.tree;
import java.util.Map;

View File

@ -1,12 +1,13 @@
package eu.dnetlib.pace.tree;
import eu.dnetlib.pace.model.Field;
import eu.dnetlib.pace.tree.support.ComparatorClass;
package eu.dnetlib.pace.tree;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.Map;
import eu.dnetlib.pace.model.Field;
import eu.dnetlib.pace.tree.support.ComparatorClass;
@ComparatorClass("domainExactMatch")
public class DomainExactMatch extends ExactMatchIgnoreCase {

View File

@ -1,12 +1,14 @@
package eu.dnetlib.pace.tree;
import java.util.Map;
import com.wcohen.ss.AbstractStringDistance;
import eu.dnetlib.pace.config.Config;
import eu.dnetlib.pace.tree.support.AbstractComparator;
import eu.dnetlib.pace.tree.support.ComparatorClass;
import java.util.Map;
@ComparatorClass("exactMatch")
public class ExactMatch extends AbstractComparator {

View File

@ -1,12 +1,13 @@
package eu.dnetlib.pace.tree;
import java.util.Map;
import eu.dnetlib.pace.config.Config;
import eu.dnetlib.pace.model.Field;
import eu.dnetlib.pace.tree.support.AbstractComparator;
import eu.dnetlib.pace.tree.support.ComparatorClass;
import java.util.Map;
@ComparatorClass("exactMatchIgnoreCase")
public class ExactMatchIgnoreCase extends AbstractComparator {

View File

@ -1,11 +1,5 @@
package eu.dnetlib.pace.tree;
import com.google.common.collect.Sets;
import eu.dnetlib.pace.config.Config;
import eu.dnetlib.pace.model.Field;
import eu.dnetlib.pace.model.FieldList;
import eu.dnetlib.pace.tree.support.AbstractComparator;
import eu.dnetlib.pace.tree.support.ComparatorClass;
package eu.dnetlib.pace.tree;
import java.util.HashMap;
import java.util.List;
@ -13,6 +7,14 @@ import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;
import com.google.common.collect.Sets;
import eu.dnetlib.pace.config.Config;
import eu.dnetlib.pace.model.Field;
import eu.dnetlib.pace.model.FieldList;
import eu.dnetlib.pace.tree.support.AbstractComparator;
import eu.dnetlib.pace.tree.support.ComparatorClass;
@ComparatorClass("instanceTypeMatch")
public class InstanceTypeMatch extends AbstractComparator {
@ -39,7 +41,6 @@ public class InstanceTypeMatch extends AbstractComparator {
translationMap.put("Doctoral thesis", "Thesis");
}
@Override
public double compare(final Field a, final Field b, final Config conf) {

View File

@ -1,12 +1,14 @@
package eu.dnetlib.pace.tree;
import java.util.Map;
import com.wcohen.ss.AbstractStringDistance;
import eu.dnetlib.pace.config.Config;
import eu.dnetlib.pace.tree.support.AbstractComparator;
import eu.dnetlib.pace.tree.support.ComparatorClass;
import java.util.Map;
//case class JaroWinkler(w: Double) extends SecondStringDistanceAlgo(w, new com.wcohen.ss.JaroWinkler())
@ComparatorClass("jaroWinkler")
public class JaroWinkler extends AbstractComparator {

View File

@ -1,15 +1,15 @@
package eu.dnetlib.pace.tree;
import com.wcohen.ss.AbstractStringDistance;
import eu.dnetlib.pace.tree.support.AbstractComparator;
import eu.dnetlib.pace.tree.support.ComparatorClass;
import eu.dnetlib.pace.config.Config;
import java.util.Map;
import java.util.Set;
import com.wcohen.ss.AbstractStringDistance;
import eu.dnetlib.pace.config.Config;
import eu.dnetlib.pace.tree.support.AbstractComparator;
import eu.dnetlib.pace.tree.support.ComparatorClass;
@ComparatorClass("jaroWinklerNormalizedName")
public class JaroWinklerNormalizedName extends AbstractComparator {
@ -39,8 +39,10 @@ public class JaroWinklerNormalizedName extends AbstractComparator {
ca = filterAllStopWords(ca);
cb = filterAllStopWords(cb);
Set<String> keywords1 = getKeywords(ca, conf.translationMap(), Integer.parseInt(params.getOrDefault("windowSize", "4")));
Set<String> keywords2 = getKeywords(cb, conf.translationMap(), Integer.parseInt(params.getOrDefault("windowSize", "4")));
Set<String> keywords1 = getKeywords(
ca, conf.translationMap(), Integer.parseInt(params.getOrDefault("windowSize", "4")));
Set<String> keywords2 = getKeywords(
cb, conf.translationMap(), Integer.parseInt(params.getOrDefault("windowSize", "4")));
Set<String> cities1 = getCities(ca, Integer.parseInt(params.getOrDefault("windowSize", "4")));
Set<String> cities2 = getCities(cb, Integer.parseInt(params.getOrDefault("windowSize", "4")));

View File

@ -1,12 +1,13 @@
package eu.dnetlib.pace.tree;
import java.util.Map;
import com.wcohen.ss.AbstractStringDistance;
import eu.dnetlib.pace.tree.support.AbstractComparator;
import eu.dnetlib.pace.tree.support.ComparatorClass;
import eu.dnetlib.pace.config.Config;
import java.util.Map;
import eu.dnetlib.pace.tree.support.AbstractComparator;
import eu.dnetlib.pace.tree.support.ComparatorClass;
//case class JaroWinkler(w: Double) extends SecondStringDistanceAlgo(w, new com.wcohen.ss.JaroWinkler())
@ComparatorClass("jaroWinklerTitle")

View File

@ -1,19 +1,22 @@
package eu.dnetlib.pace.tree;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import com.google.common.collect.Sets;
import eu.dnetlib.pace.config.Config;
import eu.dnetlib.pace.model.Field;
import eu.dnetlib.pace.model.FieldList;
import eu.dnetlib.pace.tree.support.AbstractComparator;
import eu.dnetlib.pace.tree.support.ComparatorClass;
import eu.dnetlib.pace.util.MapDocumentUtil;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;
@ComparatorClass("jsonListMatch")
public class JsonListMatch extends AbstractComparator {
@ -60,7 +63,8 @@ public class JsonListMatch extends AbstractComparator {
// converts every json into a comparable string basing on parameters
private String toComparableString(String json) {
StringBuilder st = new StringBuilder(); //to build the string used for comparisons basing on the jpath into parameters
StringBuilder st = new StringBuilder(); // to build the string used for comparisons basing on the jpath into
// parameters
// for each path in the param list
for (String key : params.keySet().stream().filter(k -> k.contains("jpath")).collect(Collectors.toList())) {

View File

@ -1,12 +1,13 @@
package eu.dnetlib.pace.tree;
import java.util.Map;
import java.util.Set;
import eu.dnetlib.pace.config.Config;
import eu.dnetlib.pace.tree.support.AbstractComparator;
import eu.dnetlib.pace.tree.support.ComparatorClass;
import java.util.Map;
import java.util.Set;
@ComparatorClass("keywordMatch")
public class KeywordMatch extends AbstractComparator {
@ -29,8 +30,10 @@ public class KeywordMatch extends AbstractComparator {
ca = filterAllStopWords(ca);
cb = filterAllStopWords(cb);
Set<String> keywords1 = getKeywords(ca, conf.translationMap(), Integer.parseInt(params.getOrDefault("windowSize", "4")));
Set<String> keywords2 = getKeywords(cb, conf.translationMap(), Integer.parseInt(params.getOrDefault("windowSize", "4")));
Set<String> keywords1 = getKeywords(
ca, conf.translationMap(), Integer.parseInt(params.getOrDefault("windowSize", "4")));
Set<String> keywords2 = getKeywords(
cb, conf.translationMap(), Integer.parseInt(params.getOrDefault("windowSize", "4")));
Set<String> codes1 = toCodes(keywords1, conf.translationMap());
Set<String> codes2 = toCodes(keywords2, conf.translationMap());

View File

@ -1,11 +1,13 @@
package eu.dnetlib.pace.tree;
import java.util.Map;
import com.wcohen.ss.AbstractStringDistance;
import eu.dnetlib.pace.tree.support.AbstractComparator;
import eu.dnetlib.pace.tree.support.ComparatorClass;
import java.util.Map;
@ComparatorClass("level2JaroWinkler")
public class Level2JaroWinkler extends AbstractComparator {

View File

@ -1,11 +1,13 @@
package eu.dnetlib.pace.tree;
import java.util.Map;
import com.wcohen.ss.AbstractStringDistance;
import eu.dnetlib.pace.config.Config;
import eu.dnetlib.pace.tree.support.AbstractComparator;
import eu.dnetlib.pace.tree.support.ComparatorClass;
import eu.dnetlib.pace.config.Config;
import java.util.Map;
@ComparatorClass("level2JaroWinklerTitle")
public class Level2JaroWinklerTitle extends AbstractComparator {
@ -29,7 +31,8 @@ public class Level2JaroWinklerTitle extends AbstractComparator {
final boolean check = checkNumbers(ca, cb);
if (check) return 0.5;
if (check)
return 0.5;
return ssalgo.score(ca, cb);
}

View File

@ -1,11 +1,13 @@
package eu.dnetlib.pace.tree;
import java.util.Map;
import com.wcohen.ss.AbstractStringDistance;
import eu.dnetlib.pace.tree.support.AbstractComparator;
import eu.dnetlib.pace.tree.support.ComparatorClass;
import java.util.Map;
@ComparatorClass("level2Levenstein")
public class Level2Levenstein extends AbstractComparator {

View File

@ -1,11 +1,13 @@
package eu.dnetlib.pace.tree;
import java.util.Map;
import com.wcohen.ss.AbstractStringDistance;
import eu.dnetlib.pace.tree.support.AbstractComparator;
import eu.dnetlib.pace.tree.support.ComparatorClass;
import java.util.Map;
@ComparatorClass("levenstein")
public class Levenstein extends AbstractComparator {

View File

@ -1,14 +1,16 @@
package eu.dnetlib.pace.tree;
import com.wcohen.ss.AbstractStringDistance;
import eu.dnetlib.pace.tree.support.AbstractComparator;
import eu.dnetlib.pace.tree.support.ComparatorClass;
import eu.dnetlib.pace.config.Config;
import java.util.Map;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import java.util.Map;
import com.wcohen.ss.AbstractStringDistance;
import eu.dnetlib.pace.config.Config;
import eu.dnetlib.pace.tree.support.AbstractComparator;
import eu.dnetlib.pace.tree.support.ComparatorClass;
@ComparatorClass("levensteinTitle")
public class LevensteinTitle extends AbstractComparator {
@ -34,7 +36,8 @@ public class LevensteinTitle extends AbstractComparator {
final boolean check = checkNumbers(ca, cb);
if (check) return 0.5;
if (check)
return 0.5;
return normalize(ssalgo.score(ca, cb), ca.length(), cb.length());
}

View File

@ -1,12 +1,13 @@
package eu.dnetlib.pace.tree;
import java.util.Map;
import com.wcohen.ss.AbstractStringDistance;
import eu.dnetlib.pace.config.Config;
import eu.dnetlib.pace.tree.support.AbstractComparator;
import eu.dnetlib.pace.tree.support.ComparatorClass;
import eu.dnetlib.pace.config.Config;
import java.util.Map;
/**
* Compared compare between two titles, ignoring version numbers. Suitable for Software entities.

View File

@ -1,17 +1,19 @@
package eu.dnetlib.pace.tree;
import com.google.common.collect.Sets;
import eu.dnetlib.pace.config.Config;
import eu.dnetlib.pace.model.Field;
import eu.dnetlib.pace.model.FieldList;
import eu.dnetlib.pace.tree.support.AbstractComparator;
import eu.dnetlib.pace.tree.support.ComparatorClass;
package eu.dnetlib.pace.tree;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;
import com.google.common.collect.Sets;
import eu.dnetlib.pace.config.Config;
import eu.dnetlib.pace.model.Field;
import eu.dnetlib.pace.model.FieldList;
import eu.dnetlib.pace.tree.support.AbstractComparator;
import eu.dnetlib.pace.tree.support.ComparatorClass;
/**
* The Class Contains match
*
@ -71,4 +73,3 @@ public class ListContainsMatch extends AbstractComparator {
}
}

View File

@ -1,11 +1,13 @@
package eu.dnetlib.pace.tree;
import java.util.Map;
import com.wcohen.ss.AbstractStringDistance;
import eu.dnetlib.pace.config.Config;
import eu.dnetlib.pace.tree.support.AbstractComparator;
import eu.dnetlib.pace.tree.support.ComparatorClass;
import eu.dnetlib.pace.config.Config;
import java.util.Map;
@ComparatorClass("mustBeDifferent")
public class MustBeDifferent extends AbstractComparator {

View File

@ -1,12 +1,13 @@
package eu.dnetlib.pace.tree;
import java.util.Map;
import eu.dnetlib.pace.config.Config;
import eu.dnetlib.pace.model.Field;
import eu.dnetlib.pace.tree.support.Comparator;
import eu.dnetlib.pace.tree.support.ComparatorClass;
import java.util.Map;
/**
* Not all fields of a document need to partecipate in the compare measure. We model those fields as having a
* NullDistanceAlgo.

View File

@ -1,11 +1,12 @@
package eu.dnetlib.pace.tree;
import java.util.Map;
import eu.dnetlib.pace.config.Config;
import eu.dnetlib.pace.tree.support.AbstractComparator;
import eu.dnetlib.pace.tree.support.ComparatorClass;
import java.util.Map;
@ComparatorClass("numbersComparator")
public class NumbersComparator extends AbstractComparator {

View File

@ -1,15 +1,15 @@
package eu.dnetlib.pace.tree;
import java.util.Map;
import eu.dnetlib.pace.config.Config;
import eu.dnetlib.pace.tree.support.AbstractComparator;
import eu.dnetlib.pace.tree.support.ComparatorClass;
import java.util.Map;
@ComparatorClass("numbersMatch")
public class NumbersMatch extends AbstractComparator {
public NumbersMatch(Map<String, String> params) {
super(params);
}

View File

@ -1,15 +1,15 @@
package eu.dnetlib.pace.tree;
import java.util.Map;
import eu.dnetlib.pace.config.Config;
import eu.dnetlib.pace.tree.support.AbstractComparator;
import eu.dnetlib.pace.tree.support.ComparatorClass;
import java.util.Map;
@ComparatorClass("romansMatch")
public class RomansMatch extends AbstractComparator {
public RomansMatch(Map<String, String> params) {
super(params);
}

View File

@ -1,3 +1,4 @@
package eu.dnetlib.pace.tree;
import java.util.List;

View File

@ -1,11 +1,13 @@
package eu.dnetlib.pace.tree;
import java.util.Map;
import com.wcohen.ss.AbstractStringDistance;
import eu.dnetlib.pace.tree.support.AbstractSortedComparator;
import eu.dnetlib.pace.tree.support.ComparatorClass;
import java.util.Map;
/**
* The Class SortedJaroWinkler.
*/
@ -40,7 +42,6 @@ public class SortedJaroWinkler extends AbstractSortedComparator {
/*
* (non-Javadoc)
*
* @see eu.dnetlib.pace.compare.DistanceAlgo#getWeight()
*/
@Override
@ -50,7 +51,6 @@ public class SortedJaroWinkler extends AbstractSortedComparator {
/*
* (non-Javadoc)
*
* @see eu.dnetlib.pace.compare.SecondStringDistanceAlgo#normalize(double)
*/
@Override

View File

@ -1,11 +1,13 @@
package eu.dnetlib.pace.tree;
import java.util.Map;
import com.wcohen.ss.AbstractStringDistance;
import eu.dnetlib.pace.tree.support.AbstractSortedComparator;
import eu.dnetlib.pace.tree.support.ComparatorClass;
import java.util.Map;
/**
* The Class SortedJaroWinkler.
*/
@ -40,7 +42,6 @@ public class SortedLevel2JaroWinkler extends AbstractSortedComparator {
/*
* (non-Javadoc)
*
* @see eu.dnetlib.pace.compare.DistanceAlgo#getWeight()
*/
@Override
@ -50,7 +51,6 @@ public class SortedLevel2JaroWinkler extends AbstractSortedComparator {
/*
* (non-Javadoc)
*
* @see eu.dnetlib.pace.compare.SecondStringDistanceAlgo#normalize(double)
*/
@Override

View File

@ -1,11 +1,12 @@
package eu.dnetlib.pace.tree;
import java.util.Map;
import eu.dnetlib.pace.config.Config;
import eu.dnetlib.pace.tree.support.AbstractComparator;
import eu.dnetlib.pace.tree.support.ComparatorClass;
import java.util.Map;
/**
* The Class Contains match
*

View File

@ -1,17 +1,20 @@
package eu.dnetlib.pace.tree;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import com.google.common.collect.Sets;
import eu.dnetlib.pace.config.Config;
import eu.dnetlib.pace.model.Field;
import eu.dnetlib.pace.model.FieldList;
import eu.dnetlib.pace.tree.support.AbstractComparator;
import eu.dnetlib.pace.tree.support.ComparatorClass;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
@ComparatorClass("stringListMatch")
public class StringListMatch extends AbstractComparator {

View File

@ -1,17 +1,18 @@
package eu.dnetlib.pace.tree;
import eu.dnetlib.pace.config.Config;
import java.util.Map;
import org.apache.commons.lang3.StringUtils;
import com.wcohen.ss.AbstractStringDistance;
import eu.dnetlib.pace.config.Config;
import eu.dnetlib.pace.config.Type;
import eu.dnetlib.pace.model.Field;
import eu.dnetlib.pace.tree.support.AbstractComparator;
import eu.dnetlib.pace.tree.support.ComparatorClass;
import java.util.Map;
/**
* The Class SubStringLevenstein.
*/
@ -66,8 +67,8 @@ public class SubStringLevenstein extends AbstractComparator {
/*
* (non-Javadoc)
*
* @see eu.dnetlib.pace.compare.SecondStringDistanceAlgo#compare(eu.dnetlib.pace.model.Field, eu.dnetlib.pace.model.Field)
* @see eu.dnetlib.pace.compare.SecondStringDistanceAlgo#compare(eu.dnetlib.pace.model.Field,
* eu.dnetlib.pace.model.Field)
*/
@Override
public double distance(final Field a, final Field b, final Config conf) {
@ -79,7 +80,6 @@ public class SubStringLevenstein extends AbstractComparator {
/*
* (non-Javadoc)
*
* @see eu.dnetlib.pace.compare.DistanceAlgo#getWeight()
*/
@Override
@ -89,7 +89,6 @@ public class SubStringLevenstein extends AbstractComparator {
/*
* (non-Javadoc)
*
* @see eu.dnetlib.pace.compare.SecondStringDistanceAlgo#normalize(double)
*/
@Override

View File

@ -1,3 +1,4 @@
package eu.dnetlib.pace.tree;
import java.util.List;

View File

@ -1,14 +1,16 @@
package eu.dnetlib.pace.tree;
import eu.dnetlib.pace.config.Config;
import eu.dnetlib.pace.model.Field;
import eu.dnetlib.pace.tree.support.ComparatorClass;
import org.apache.commons.lang3.StringUtils;
package eu.dnetlib.pace.tree;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.Map;
import org.apache.commons.lang3.StringUtils;
import eu.dnetlib.pace.config.Config;
import eu.dnetlib.pace.model.Field;
import eu.dnetlib.pace.tree.support.ComparatorClass;
@ComparatorClass("urlMatcher")
public class UrlMatcher extends Levenstein {

View File

@ -1,12 +1,14 @@
package eu.dnetlib.pace.tree;
import java.util.Map;
import org.apache.commons.lang3.StringUtils;
import eu.dnetlib.pace.config.Config;
import eu.dnetlib.pace.model.Field;
import eu.dnetlib.pace.tree.support.AbstractComparator;
import eu.dnetlib.pace.tree.support.ComparatorClass;
import org.apache.commons.lang3.StringUtils;
import java.util.Map;
/**
* Returns true if the year of the date field in the given documents are the same, false when any of the two is invalid or it's missing.

View File

@ -1,15 +1,17 @@
package eu.dnetlib.pace.tree.support;
import java.util.List;
import java.util.Map;
import com.wcohen.ss.AbstractStringDistance;
import eu.dnetlib.pace.common.AbstractPaceFunctions;
import eu.dnetlib.pace.config.Config;
import eu.dnetlib.pace.config.Type;
import eu.dnetlib.pace.model.Field;
import eu.dnetlib.pace.model.FieldList;
import java.util.List;
import java.util.Map;
public abstract class AbstractComparator extends AbstractPaceFunctions implements Comparator {
/** The ssalgo. */
@ -90,8 +92,10 @@ public abstract class AbstractComparator extends AbstractPaceFunctions implement
}
public double distance(final Field a, final Field b, final Config conf) {
if (a.getType().equals(Type.String) && b.getType().equals(Type.String)) return distance(a.stringValue(), b.stringValue(), conf);
if (a.getType().equals(Type.List) && b.getType().equals(Type.List)) return distance(toList(a), toList(b), conf);
if (a.getType().equals(Type.String) && b.getType().equals(Type.String))
return distance(a.stringValue(), b.stringValue(), conf);
if (a.getType().equals(Type.List) && b.getType().equals(Type.List))
return distance(toList(a), toList(b), conf);
throw new IllegalArgumentException("invalid types\n- A: " + a.toString() + "\n- B: " + b.toString());
}
@ -100,8 +104,10 @@ public abstract class AbstractComparator extends AbstractPaceFunctions implement
public double compare(final Field a, final Field b, final Config conf) {
if (a.isEmpty() || b.isEmpty())
return -1;
if (a.getType().equals(Type.String) && b.getType().equals(Type.String)) return distance(a.stringValue(), b.stringValue(), conf);
if (a.getType().equals(Type.List) && b.getType().equals(Type.List)) return distance(toList(a), toList(b), conf);
if (a.getType().equals(Type.String) && b.getType().equals(Type.String))
return distance(a.stringValue(), b.stringValue(), conf);
if (a.getType().equals(Type.List) && b.getType().equals(Type.List))
return distance(toList(a), toList(b), conf);
throw new IllegalArgumentException("invalid types\n- A: " + a.toString() + "\n- B: " + b.toString());
}

View File

@ -1,14 +1,16 @@
package eu.dnetlib.pace.tree.support;
import com.google.common.collect.Lists;
import com.wcohen.ss.AbstractStringDistance;
import eu.dnetlib.pace.model.Field;
import eu.dnetlib.pace.model.FieldList;
package eu.dnetlib.pace.tree.support;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import com.google.common.collect.Lists;
import com.wcohen.ss.AbstractStringDistance;
import eu.dnetlib.pace.model.Field;
import eu.dnetlib.pace.model.FieldList;
public abstract class AbstractSortedComparator extends AbstractComparator {
/**

View File

@ -1,3 +1,4 @@
package eu.dnetlib.pace.tree.support;
import eu.dnetlib.pace.util.PaceException;
@ -6,18 +7,14 @@ public enum AggType {
W_MEAN, // weighted mean
AVG, // average
SUM,
MAX,
MIN,
AND, //used for necessary conditions
SUM, MAX, MIN, AND, // used for necessary conditions
OR; // used for sufficient conditions
public static AggType getEnum(String value) {
try {
return AggType.valueOf(value);
}
catch (IllegalArgumentException e) {
} catch (IllegalArgumentException e) {
throw new PaceException("Undefined aggregation type", e);
}
}

View File

@ -1,3 +1,4 @@
package eu.dnetlib.pace.tree.support;
import eu.dnetlib.pace.config.Config;
@ -6,9 +7,8 @@ import eu.dnetlib.pace.model.Field;
public interface Comparator {
/*
* return : -1 -> can't decide (i.e. missing field)
* >0 -> similarity degree (depends on the algorithm)
* */
* return : -1 -> can't decide (i.e. missing field) >0 -> similarity degree (depends on the algorithm)
*/
public double compare(Field a, Field b, Config conf);
}

View File

@ -1,3 +1,4 @@
package eu.dnetlib.pace.tree.support;
import java.lang.annotation.ElementType;

View File

@ -1,13 +1,14 @@
package eu.dnetlib.pace.tree.support;
import com.fasterxml.jackson.databind.ObjectMapper;
import eu.dnetlib.pace.util.PaceException;
import java.io.IOException;
import java.io.Serializable;
import java.util.Map;
import com.fasterxml.jackson.databind.ObjectMapper;
import eu.dnetlib.pace.util.PaceException;
/**
* The class that defines the configuration of each field in the decision tree.
* */
@ -31,7 +32,8 @@ public class FieldConf implements Serializable {
public FieldConf() {
}
public FieldConf(String field, String comparator, double weight, Map<String, String> params, boolean countIfUndefined) {
public FieldConf(String field, String comparator, double weight, Map<String, String> params,
boolean countIfUndefined) {
this.field = field;
this.comparator = comparator;
this.weight = weight;

View File

@ -1,13 +1,14 @@
package eu.dnetlib.pace.tree.support;
import com.fasterxml.jackson.databind.ObjectMapper;
import eu.dnetlib.pace.model.Field;
import eu.dnetlib.pace.util.PaceException;
import java.io.IOException;
import java.io.Serializable;
import com.fasterxml.jackson.databind.ObjectMapper;
import eu.dnetlib.pace.model.Field;
import eu.dnetlib.pace.util.PaceException;
/**
* The class that contains the result of each comparison in the decision tree
* */

View File

@ -1,17 +1,15 @@
package eu.dnetlib.pace.tree.support;
public enum MatchType {
MATCH,
NO_MATCH,
UNDEFINED;
MATCH, NO_MATCH, UNDEFINED;
public static MatchType parse(String value) {
try {
return MatchType.valueOf(value);
}
catch (IllegalArgumentException e) {
} catch (IllegalArgumentException e) {
return MatchType.UNDEFINED; // return UNDEFINED if the enum is not parsable
}
}

View File

@ -1,18 +1,21 @@
package eu.dnetlib.pace.tree.support;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import eu.dnetlib.pace.config.Config;
import eu.dnetlib.pace.config.PaceConfig;
import eu.dnetlib.pace.model.MapDocument;
import eu.dnetlib.pace.util.PaceException;
import org.apache.commons.lang3.StringUtils;
package eu.dnetlib.pace.tree.support;
import java.io.IOException;
import java.io.Serializable;
import java.io.StringWriter;
import java.util.List;
import org.apache.commons.lang3.StringUtils;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import eu.dnetlib.pace.config.Config;
import eu.dnetlib.pace.config.PaceConfig;
import eu.dnetlib.pace.model.MapDocument;
import eu.dnetlib.pace.util.PaceException;
public class TreeNodeDef implements Serializable {
final static String CROSS_COMPARE = "crossCompare";
@ -28,7 +31,8 @@ public class TreeNodeDef implements Serializable {
boolean ignoreUndefined;
public TreeNodeDef(List<FieldConf> fields, AggType aggregation, double threshold, String positive, String negative, String undefined, boolean ignoreUndefined) {
public TreeNodeDef(List<FieldConf> fields, AggType aggregation, double threshold, String positive, String negative,
String undefined, boolean ignoreUndefined) {
this.fields = fields;
this.aggregation = aggregation;
this.threshold = threshold;
@ -38,7 +42,8 @@ public class TreeNodeDef implements Serializable {
this.ignoreUndefined = ignoreUndefined;
}
public TreeNodeDef() {}
public TreeNodeDef() {
}
// function for the evaluation of the node
public TreeNodeStats evaluate(MapDocument doc1, MapDocument doc2, Config conf) {
@ -52,18 +57,24 @@ public class TreeNodeDef implements Serializable {
double result;
//if the param specifies a cross comparison (i.e. compare elements from different fields), compute the result for both sides and return the maximum
// if the param specifies a cross comparison (i.e. compare elements from different fields), compute the
// result for both sides and return the maximum
if (fieldConf.getParams().keySet().stream().anyMatch(k -> k.contains(CROSS_COMPARE))) {
String crossField = fieldConf.getParams().get(CROSS_COMPARE);
double result1 = comparator(fieldConf).compare(doc1.getFieldMap().get(fieldConf.getField()), doc2.getFieldMap().get(crossField), conf);
double result2 = comparator(fieldConf).compare(doc1.getFieldMap().get(crossField), doc2.getFieldMap().get(fieldConf.getField()), conf);
double result1 = comparator(fieldConf)
.compare(doc1.getFieldMap().get(fieldConf.getField()), doc2.getFieldMap().get(crossField), conf);
double result2 = comparator(fieldConf)
.compare(doc1.getFieldMap().get(crossField), doc2.getFieldMap().get(fieldConf.getField()), conf);
result = Math.max(result1, result2);
}
else {
result = comparator(fieldConf).compare(doc1.getFieldMap().get(fieldConf.getField()), doc2.getFieldMap().get(fieldConf.getField()), conf);
} else {
result = comparator(fieldConf)
.compare(
doc1.getFieldMap().get(fieldConf.getField()), doc2.getFieldMap().get(fieldConf.getField()),
conf);
}
stats.addFieldStats(
stats
.addFieldStats(
fieldConf.getComparator() + " on " + fieldConf.getField() + " " + fields.indexOf(fieldConf),
new FieldStats(
weight,
@ -71,8 +82,7 @@ public class TreeNodeDef implements Serializable {
result,
fieldConf.isCountIfUndefined(),
doc1.getFieldMap().get(fieldConf.getField()),
doc2.getFieldMap().get(fieldConf.getField())
));
doc2.getFieldMap().get(fieldConf.getField())));
}
return stats;

View File

@ -1,3 +1,4 @@
package eu.dnetlib.pace.tree.support;
import java.io.Serializable;
@ -100,8 +101,7 @@ public class TreeNodeStats implements Serializable {
if (fieldStats.getResult() == -1) {
if (fieldStats.isCountIfUndefined())
return 0.0;
}
else {
} else {
if (fieldStats.getResult() < fieldStats.getThreshold())
return 0.0;
}

View File

@ -1,11 +1,12 @@
package eu.dnetlib.pace.tree.support;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import eu.dnetlib.pace.config.Config;
import eu.dnetlib.pace.model.MapDocument;
import eu.dnetlib.pace.util.PaceException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
/**
* The compare between two documents is given by the weighted mean of the field distances
@ -48,8 +49,7 @@ public class TreeProcessor{
// if ignoreUndefined=true the miss is ignored and the score computed anyway
else if (stats.getFinalScore(currentNode.getAggregation()) >= currentNode.getThreshold()) {
current = currentNode.getPositive();
}
else {
} else {
current = currentNode.getNegative();
}
@ -80,8 +80,7 @@ public class TreeProcessor{
// if ignoreUndefined=true the miss is ignored and the score computed anyway
else if (stats.getFinalScore(currentNode.getAggregation()) >= currentNode.getThreshold()) {
current = currentNode.getPositive();
}
else {
} else {
current = currentNode.getNegative();
}

View File

@ -1,12 +1,14 @@
package eu.dnetlib.pace.tree.support;
import eu.dnetlib.pace.util.PaceException;
import com.fasterxml.jackson.databind.ObjectMapper;
package eu.dnetlib.pace.tree.support;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import com.fasterxml.jackson.databind.ObjectMapper;
import eu.dnetlib.pace.util.PaceException;
public class TreeStats {
// <layer_id, <field:comparator, result>>
@ -47,5 +49,4 @@ public class TreeStats {
}
}
}

View File

@ -1,18 +1,21 @@
package eu.dnetlib.pace.util;
import com.google.common.collect.Lists;
import eu.dnetlib.pace.clustering.NGramUtils;
import eu.dnetlib.pace.config.DedupConfig;
import eu.dnetlib.pace.config.WfConfig;
import eu.dnetlib.pace.tree.support.TreeProcessor;
import eu.dnetlib.pace.model.Field;
import eu.dnetlib.pace.model.MapDocument;
import eu.dnetlib.pace.model.MapDocumentComparator;
import java.util.*;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import java.util.*;
import com.google.common.collect.Lists;
import eu.dnetlib.pace.clustering.NGramUtils;
import eu.dnetlib.pace.config.DedupConfig;
import eu.dnetlib.pace.config.WfConfig;
import eu.dnetlib.pace.model.Field;
import eu.dnetlib.pace.model.MapDocument;
import eu.dnetlib.pace.model.MapDocumentComparator;
import eu.dnetlib.pace.tree.support.TreeProcessor;
public class BlockProcessor {
@ -24,11 +27,24 @@ public class BlockProcessor {
public static void constructAccumulator(final DedupConfig dedupConf) {
accumulators.add(String.format("%s::%s", dedupConf.getWf().getEntityType(), "records per hash key = 1"));
accumulators.add(String.format("%s::%s",dedupConf.getWf().getEntityType(), "missing " + dedupConf.getWf().getOrderField()));
accumulators.add(String.format("%s::%s",dedupConf.getWf().getEntityType(), String.format("Skipped records for count(%s) >= %s", dedupConf.getWf().getOrderField(), dedupConf.getWf().getGroupMaxSize())));
accumulators
.add(
String
.format(
"%s::%s", dedupConf.getWf().getEntityType(), "missing " + dedupConf.getWf().getOrderField()));
accumulators
.add(
String
.format(
"%s::%s", dedupConf.getWf().getEntityType(),
String
.format(
"Skipped records for count(%s) >= %s", dedupConf.getWf().getOrderField(),
dedupConf.getWf().getGroupMaxSize())));
accumulators.add(String.format("%s::%s", dedupConf.getWf().getEntityType(), "skip list"));
accumulators.add(String.format("%s::%s", dedupConf.getWf().getEntityType(), "dedupSimilarity (x2)"));
accumulators.add(String.format("%s::%s",dedupConf.getWf().getEntityType(), "d < " + dedupConf.getWf().getThreshold()));
accumulators
.add(String.format("%s::%s", dedupConf.getWf().getEntityType(), "d < " + dedupConf.getWf().getThreshold()));
}
public BlockProcessor(DedupConfig dedupConf) {
@ -59,7 +75,8 @@ public class BlockProcessor {
}
private Queue<MapDocument> prepare(final Iterable<MapDocument> documents) {
final Queue<MapDocument> queue = new PriorityQueue<>(100, new MapDocumentComparator(dedupConf.getWf().getOrderField()));
final Queue<MapDocument> queue = new PriorityQueue<>(100,
new MapDocumentComparator(dedupConf.getWf().getOrderField()));
final Set<String> seen = new HashSet<String>();
final int queueMaxSize = dedupConf.getWf().getQueueMaxSize();
@ -78,7 +95,8 @@ public class BlockProcessor {
return queue;
}
private Queue<MapDocument> simplifyQueue(final Queue<MapDocument> queue, final String ngram, final Reporter context) {
private Queue<MapDocument> simplifyQueue(final Queue<MapDocument> queue, final String ngram,
final Reporter context) {
final Queue<MapDocument> q = new LinkedList<>();
String fieldRef = "";
@ -100,7 +118,9 @@ public class BlockProcessor {
fieldRef = field;
}
} else {
context.incrementCounter(dedupConf.getWf().getEntityType(), "missing " + dedupConf.getWf().getOrderField(), 1);
context
.incrementCounter(
dedupConf.getWf().getEntityType(), "missing " + dedupConf.getWf().getOrderField(), 1);
}
}
populateSimplifiedQueue(q, tempResults, context, fieldRef, ngram);
@ -117,7 +137,11 @@ public class BlockProcessor {
if (tempResults.size() < wf.getGroupMaxSize()) {
q.addAll(tempResults);
} else {
context.incrementCounter(wf.getEntityType(), String.format("Skipped records for count(%s) >= %s", wf.getOrderField(), wf.getGroupMaxSize()), tempResults.size());
context
.incrementCounter(
wf.getEntityType(),
String.format("Skipped records for count(%s) >= %s", wf.getOrderField(), wf.getGroupMaxSize()),
tempResults.size());
// log.info("Skipped field: " + fieldRef + " - size: " + tempResults.size() + " - ngram: " + ngram);
}
}
@ -150,7 +174,8 @@ public class BlockProcessor {
}
final Field fieldsCurr = curr.values(wf.getOrderField());
final String fieldCurr = (fieldsCurr == null) || fieldsCurr.isEmpty() ? null : fieldsCurr.stringValue();
final String fieldCurr = (fieldsCurr == null) || fieldsCurr.isEmpty() ? null
: fieldsCurr.stringValue();
if (!idCurr.equals(idPivot) && (fieldCurr != null)) {

View File

@ -1,6 +1,14 @@
package eu.dnetlib.pace.util;
import java.util.*;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import com.google.common.collect.Lists;
import eu.dnetlib.pace.clustering.NGramUtils;
import eu.dnetlib.pace.config.DedupConfig;
import eu.dnetlib.pace.config.WfConfig;
@ -9,11 +17,6 @@ import eu.dnetlib.pace.model.MapDocument;
import eu.dnetlib.pace.model.MapDocumentComparator;
import eu.dnetlib.pace.tree.*;
import eu.dnetlib.pace.tree.support.TreeProcessor;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import java.util.*;
public class BlockProcessorForTesting {
@ -25,18 +28,32 @@ public class BlockProcessorForTesting {
public static void constructAccumulator(final DedupConfig dedupConf) {
accumulators.add(String.format("%s::%s", dedupConf.getWf().getEntityType(), "records per hash key = 1"));
accumulators.add(String.format("%s::%s",dedupConf.getWf().getEntityType(), "missing " + dedupConf.getWf().getOrderField()));
accumulators.add(String.format("%s::%s",dedupConf.getWf().getEntityType(), String.format("Skipped records for count(%s) >= %s", dedupConf.getWf().getOrderField(), dedupConf.getWf().getGroupMaxSize())));
accumulators
.add(
String
.format(
"%s::%s", dedupConf.getWf().getEntityType(), "missing " + dedupConf.getWf().getOrderField()));
accumulators
.add(
String
.format(
"%s::%s", dedupConf.getWf().getEntityType(),
String
.format(
"Skipped records for count(%s) >= %s", dedupConf.getWf().getOrderField(),
dedupConf.getWf().getGroupMaxSize())));
accumulators.add(String.format("%s::%s", dedupConf.getWf().getEntityType(), "skip list"));
accumulators.add(String.format("%s::%s", dedupConf.getWf().getEntityType(), "dedupSimilarity (x2)"));
accumulators.add(String.format("%s::%s",dedupConf.getWf().getEntityType(), "d < " + dedupConf.getWf().getThreshold()));
accumulators
.add(String.format("%s::%s", dedupConf.getWf().getEntityType(), "d < " + dedupConf.getWf().getThreshold()));
}
public BlockProcessorForTesting(DedupConfig dedupConf) {
this.dedupConf = dedupConf;
}
public void processSortedBlock(final String key, final List<MapDocument> documents, final Reporter context, boolean useTree, boolean noMatch) {
public void processSortedBlock(final String key, final List<MapDocument> documents, final Reporter context,
boolean useTree, boolean noMatch) {
if (documents.size() > 1) {
// log.info("reducing key: '" + key + "' records: " + q.size());
process(prepare(documents), context, useTree, noMatch);
@ -46,7 +63,8 @@ public class BlockProcessorForTesting {
}
}
public void process(final String key, final Iterable<MapDocument> documents, final Reporter context, boolean useTree, boolean noMatch) {
public void process(final String key, final Iterable<MapDocument> documents, final Reporter context,
boolean useTree, boolean noMatch) {
final Queue<MapDocument> q = prepare(documents);
@ -60,7 +78,8 @@ public class BlockProcessorForTesting {
}
private Queue<MapDocument> prepare(final Iterable<MapDocument> documents) {
final Queue<MapDocument> queue = new PriorityQueue<>(100, new MapDocumentComparator(dedupConf.getWf().getOrderField()));
final Queue<MapDocument> queue = new PriorityQueue<>(100,
new MapDocumentComparator(dedupConf.getWf().getOrderField()));
final Set<String> seen = new HashSet<String>();
final int queueMaxSize = dedupConf.getWf().getQueueMaxSize();
@ -79,7 +98,8 @@ public class BlockProcessorForTesting {
return queue;
}
private Queue<MapDocument> simplifyQueue(final Queue<MapDocument> queue, final String ngram, final Reporter context) {
private Queue<MapDocument> simplifyQueue(final Queue<MapDocument> queue, final String ngram,
final Reporter context) {
final Queue<MapDocument> q = new LinkedList<>();
String fieldRef = "";
@ -101,7 +121,9 @@ public class BlockProcessorForTesting {
fieldRef = field;
}
} else {
context.incrementCounter(dedupConf.getWf().getEntityType(), "missing " + dedupConf.getWf().getOrderField(), 1);
context
.incrementCounter(
dedupConf.getWf().getEntityType(), "missing " + dedupConf.getWf().getOrderField(), 1);
}
}
populateSimplifiedQueue(q, tempResults, context, fieldRef, ngram);
@ -118,7 +140,11 @@ public class BlockProcessorForTesting {
if (tempResults.size() < wf.getGroupMaxSize()) {
q.addAll(tempResults);
} else {
context.incrementCounter(wf.getEntityType(), String.format("Skipped records for count(%s) >= %s", wf.getOrderField(), wf.getGroupMaxSize()), tempResults.size());
context
.incrementCounter(
wf.getEntityType(),
String.format("Skipped records for count(%s) >= %s", wf.getOrderField(), wf.getGroupMaxSize()),
tempResults.size());
// log.info("Skipped field: " + fieldRef + " - size: " + tempResults.size() + " - ngram: " + ngram);
}
}
@ -151,16 +177,17 @@ public class BlockProcessorForTesting {
}
final Field fieldsCurr = curr.values(wf.getOrderField());
final String fieldCurr = (fieldsCurr == null) || fieldsCurr.isEmpty() ? null : fieldsCurr.stringValue();
final String fieldCurr = (fieldsCurr == null) || fieldsCurr.isEmpty() ? null
: fieldsCurr.stringValue();
if (!idCurr.equals(idPivot) && (fieldCurr != null)) {
// draws no match relations (test purpose)
if (noMatch) {
emitOutput(!new TreeProcessor(dedupConf).compare(pivot, curr), idPivot, idCurr, context);
}
else {
//use the decision tree implementation or the "normal" implementation of the similarity score (valid only for publications)
} else {
// use the decision tree implementation or the "normal" implementation of the similarity
// score (valid only for publications)
if (useTree)
emitOutput(new TreeProcessor(dedupConf).compare(pivot, curr), idPivot, idCurr, context);
else
@ -179,7 +206,8 @@ public class BlockProcessorForTesting {
protected static boolean compareInstanceType(MapDocument a, MapDocument b, DedupConfig conf) {
Map<String, String> params = new HashMap<>();
InstanceTypeMatch instanceTypeMatch = new InstanceTypeMatch(params);
double compare = instanceTypeMatch.compare(a.getFieldMap().get("instance"), b.getFieldMap().get("instance"), conf);
double compare = instanceTypeMatch
.compare(a.getFieldMap().get("instance"), b.getFieldMap().get("instance"), conf);
return compare >= 1.0;
}

View File

@ -1,12 +1,15 @@
package eu.dnetlib.pace.util;
import org.apache.commons.lang3.text.WordUtils;
import com.google.common.base.Function;
import org.apache.commons.lang3.text.WordUtils;
public class Capitalise implements Function<String, String> {
private final char[] DELIM = {' ', '-'};
private final char[] DELIM = {
' ', '-'
};
@Override
public String apply(final String s) {

View File

@ -1,3 +1,4 @@
package eu.dnetlib.pace.util;
/*
@ -17,7 +18,6 @@ package eu.dnetlib.pace.util;
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.UnsupportedEncodingException;
import java.net.URLDecoder;
import java.net.URLEncoder;
@ -94,10 +94,8 @@ public class DiffPatchMatch {
}
}
// DIFF FUNCTIONS
/**
* The data structure representing a diff is a Linked list of Diff objects:
* {Diff(Operation.DELETE, "Hello"), Diff(Operation.INSERT, "Goodbye"),
@ -230,8 +228,7 @@ public class DiffPatchMatch {
int i = longtext.indexOf(shorttext);
if (i != -1) {
// Shorter text is inside the longer text (speedup).
Operation op = (text1.length() > text2.length()) ?
Operation.DELETE : Operation.INSERT;
Operation op = (text1.length() > text2.length()) ? Operation.DELETE : Operation.INSERT;
diffs.add(new Diff(op, longtext.substring(0, i)));
diffs.add(new Diff(Operation.EQUAL, shorttext));
diffs.add(new Diff(op, longtext.substring(i + shorttext.length())));
@ -256,9 +253,11 @@ public class DiffPatchMatch {
String text2_b = hm[3];
String mid_common = hm[4];
// Send both pairs off for separate processing.
LinkedList<Diff> diffs_a = diff_main(text1_a, text2_a,
LinkedList<Diff> diffs_a = diff_main(
text1_a, text2_a,
checklines, deadline);
LinkedList<Diff> diffs_b = diff_main(text1_b, text2_b,
LinkedList<Diff> diffs_b = diff_main(
text1_b, text2_b,
checklines, deadline);
// Merge the results.
diffs = diffs_a;
@ -326,7 +325,8 @@ public class DiffPatchMatch {
pointer.previous();
pointer.remove();
}
for (Diff subDiff : diff_main(text_delete, text_insert, false,
for (Diff subDiff : diff_main(
text_delete, text_insert, false,
deadline)) {
pointer.add(subDiff);
}
@ -431,8 +431,7 @@ public class DiffPatchMatch {
}
int y2 = x2 - k2;
while (x2 < text1_length && y2 < text2_length
&& text1.charAt(text1_length - x2 - 1)
== text2.charAt(text2_length - y2 - 1)) {
&& text1.charAt(text1_length - x2 - 1) == text2.charAt(text2_length - y2 - 1)) {
x2++;
y2++;
}
@ -652,7 +651,9 @@ public class DiffPatchMatch {
return best;
}
length += found;
if (found == 0 || text1.substring(text_length - length).equals(
if (found == 0 || text1
.substring(text_length - length)
.equals(
text2.substring(0, length))) {
best = length;
length++;
@ -682,10 +683,12 @@ public class DiffPatchMatch {
}
// First check if the second quarter is the seed for a half-match.
String[] hm1 = diff_halfMatchI(longtext, shorttext,
String[] hm1 = diff_halfMatchI(
longtext, shorttext,
(longtext.length() + 3) / 4);
// Check again based on the third quarter.
String[] hm2 = diff_halfMatchI(longtext, shorttext,
String[] hm2 = diff_halfMatchI(
longtext, shorttext,
(longtext.length() + 1) / 2);
String[] hm;
if (hm1 == null && hm2 == null) {
@ -704,7 +707,9 @@ public class DiffPatchMatch {
return hm;
// return new String[]{hm[0], hm[1], hm[2], hm[3], hm[4]};
} else {
return new String[]{hm[2], hm[3], hm[0], hm[1], hm[4]};
return new String[] {
hm[2], hm[3], hm[0], hm[1], hm[4]
};
}
}
@ -726,9 +731,11 @@ public class DiffPatchMatch {
String best_longtext_a = "", best_longtext_b = "";
String best_shorttext_a = "", best_shorttext_b = "";
while ((j = shorttext.indexOf(seed, j + 1)) != -1) {
int prefixLength = diff_commonPrefix(longtext.substring(i),
int prefixLength = diff_commonPrefix(
longtext.substring(i),
shorttext.substring(j));
int suffixLength = diff_commonSuffix(longtext.substring(0, i),
int suffixLength = diff_commonSuffix(
longtext.substring(0, i),
shorttext.substring(0, j));
if (best_common.length() < suffixLength + prefixLength) {
best_common = shorttext.substring(j - suffixLength, j)
@ -740,8 +747,10 @@ public class DiffPatchMatch {
}
}
if (best_common.length() * 2 >= longtext.length()) {
return new String[]{best_longtext_a, best_longtext_b,
best_shorttext_a, best_shorttext_b, best_common};
return new String[] {
best_longtext_a, best_longtext_b,
best_shorttext_a, best_shorttext_b, best_common
};
} else {
return null;
}
@ -784,10 +793,8 @@ public class DiffPatchMatch {
}
// Eliminate an equality that is smaller or equal to the edits on both
// sides of it.
if (lastEquality != null && (lastEquality.length()
<= Math.max(length_insertions1, length_deletions1))
&& (lastEquality.length()
<= Math.max(length_insertions2, length_deletions2))) {
if (lastEquality != null && (lastEquality.length() <= Math.max(length_insertions1, length_deletions1))
&& (lastEquality.length() <= Math.max(length_insertions2, length_deletions2))) {
// System.out.println("Splitting: '" + lastEquality + "'");
// Walk back to offending equality.
while (thisDiff != equalities.peek()) {
@ -862,10 +869,11 @@ public class DiffPatchMatch {
overlap_length1 >= insertion.length() / 2.0) {
// Overlap found. Insert an equality and trim the surrounding edits.
pointer.previous();
pointer.add(new Diff(Operation.EQUAL,
pointer
.add(
new Diff(Operation.EQUAL,
insertion.substring(0, overlap_length1)));
prevDiff.text =
deletion.substring(0, deletion.length() - overlap_length1);
prevDiff.text = deletion.substring(0, deletion.length() - overlap_length1);
thisDiff.text = insertion.substring(overlap_length1);
// pointer.add inserts the element before the cursor, so there is
// no need to step past the new element.
@ -876,11 +884,12 @@ public class DiffPatchMatch {
// Reverse overlap found.
// Insert an equality and swap and trim the surrounding edits.
pointer.previous();
pointer.add(new Diff(Operation.EQUAL,
pointer
.add(
new Diff(Operation.EQUAL,
deletion.substring(0, overlap_length2)));
prevDiff.operation = Operation.INSERT;
prevDiff.text =
insertion.substring(0, insertion.length() - overlap_length2);
prevDiff.text = insertion.substring(0, insertion.length() - overlap_length2);
thisDiff.operation = Operation.DELETE;
thisDiff.text = deletion.substring(overlap_length2);
// pointer.add inserts the element before the cursor, so there is
@ -1031,10 +1040,8 @@ public class DiffPatchMatch {
}
// Define some regex patterns for matching boundaries.
private Pattern BLANKLINEEND
= Pattern.compile("\\n\\r?\\n\\Z", Pattern.DOTALL);
private Pattern BLANKLINESTART
= Pattern.compile("\\A\\r?\\n\\r?\\n", Pattern.DOTALL);
private Pattern BLANKLINEEND = Pattern.compile("\\n\\r?\\n\\Z", Pattern.DOTALL);
private Pattern BLANKLINESTART = Pattern.compile("\\A\\r?\\n\\r?\\n", Pattern.DOTALL);
/**
* Reduce the number of edits by eliminating operationally trivial equalities.
@ -1082,12 +1089,9 @@ public class DiffPatchMatch {
post_ins = true;
}
/*
* Five types to be split:
* <ins>A</ins><del>B</del>XY<ins>C</ins><del>D</del>
* <ins>A</ins>X<ins>C</ins><del>D</del>
* <ins>A</ins><del>B</del>X<ins>C</ins>
* <ins>A</del>X<ins>C</ins><del>D</del>
* <ins>A</ins><del>B</del>X<del>C</del>
* Five types to be split: <ins>A</ins><del>B</del>XY<ins>C</ins><del>D</del>
* <ins>A</ins>X<ins>C</ins><del>D</del> <ins>A</ins><del>B</del>X<ins>C</ins>
* <ins>A</del>X<ins>C</ins><del>D</del> <ins>A</ins><del>B</del>X<del>C</del>
*/
if (lastEquality != null
&& ((pre_ins && pre_del && post_ins && post_del)
@ -1189,12 +1193,13 @@ public class DiffPatchMatch {
if (commonlength != 0) {
if (pointer.hasPrevious()) {
thisDiff = pointer.previous();
assert thisDiff.operation == Operation.EQUAL
: "Previous diff should have been an equality.";
assert thisDiff.operation == Operation.EQUAL : "Previous diff should have been an equality.";
thisDiff.text += text_insert.substring(0, commonlength);
pointer.next();
} else {
pointer.add(new Diff(Operation.EQUAL,
pointer
.add(
new Diff(Operation.EQUAL,
text_insert.substring(0, commonlength)));
}
text_insert = text_insert.substring(commonlength);
@ -1204,11 +1209,18 @@ public class DiffPatchMatch {
commonlength = diff_commonSuffix(text_insert, text_delete);
if (commonlength != 0) {
thisDiff = pointer.next();
thisDiff.text = text_insert.substring(text_insert.length()
- commonlength) + thisDiff.text;
text_insert = text_insert.substring(0, text_insert.length()
thisDiff.text = text_insert
.substring(
text_insert.length()
- commonlength)
+ thisDiff.text;
text_insert = text_insert
.substring(
0, text_insert.length()
- commonlength);
text_delete = text_delete.substring(0, text_delete.length()
text_delete = text_delete
.substring(
0, text_delete.length()
- commonlength);
pointer.previous();
}
@ -1243,9 +1255,8 @@ public class DiffPatchMatch {
}
/*
* Second pass: look for single edits surrounded on both sides by equalities
* which can be shifted sideways to eliminate an equality.
* e.g: A<ins>BA</ins>C -> <ins>AB</ins>AC
* Second pass: look for single edits surrounded on both sides by equalities which can be shifted sideways to
* eliminate an equality. e.g: A<ins>BA</ins>C -> <ins>AB</ins>AC
*/
boolean changes = false;
// Create a new iterator at the start.
@ -1262,7 +1273,9 @@ public class DiffPatchMatch {
if (thisDiff.text.endsWith(prevDiff.text)) {
// Shift the edit over the previous equality.
thisDiff.text = prevDiff.text
+ thisDiff.text.substring(0, thisDiff.text.length()
+ thisDiff.text
.substring(
0, thisDiff.text.length()
- prevDiff.text.length());
nextDiff.text = prevDiff.text + nextDiff.text;
pointer.previous(); // Walk past nextDiff.
@ -1340,15 +1353,22 @@ public class DiffPatchMatch {
public String diff_prettyHtml(List<Diff> diffs) {
StringBuilder html = new StringBuilder();
for (Diff aDiff : diffs) {
String text = aDiff.text.replace("&", "&amp;").replace("<", "&lt;")
.replace(">", "&gt;").replace("\n", "&para;<br>");
String text = aDiff.text
.replace("&", "&amp;")
.replace("<", "&lt;")
.replace(">", "&gt;")
.replace("\n", "&para;<br>");
switch (aDiff.operation) {
case INSERT:
html.append("<ins style=\"background:#e6ffe6;\">").append(text)
html
.append("<ins style=\"background:#e6ffe6;\">")
.append(text)
.append("</ins>");
break;
case DELETE:
html.append("<del style=\"background:#ffe6e6;\">").append(text)
html
.append("<del style=\"background:#ffe6e6;\">")
.append(text)
.append("</del>");
break;
case EQUAL:
@ -1433,8 +1453,13 @@ public class DiffPatchMatch {
switch (aDiff.operation) {
case INSERT:
try {
text.append("+").append(URLEncoder.encode(aDiff.text, "UTF-8")
.replace('+', ' ')).append("\t");
text
.append("+")
.append(
URLEncoder
.encode(aDiff.text, "UTF-8")
.replace('+', ' '))
.append("\t");
} catch (UnsupportedEncodingException e) {
// Not likely on modern system.
throw new Error("This system does not support UTF-8.", e);
@ -1535,10 +1560,8 @@ public class DiffPatchMatch {
return diffs;
}
// MATCH FUNCTIONS
/**
* Locate the best instance of 'pattern' in 'text' near 'loc'.
* Returns -1 if no match found.
@ -1579,8 +1602,7 @@ public class DiffPatchMatch {
* @return Best match index or -1.
*/
protected int match_bitap(String text, String pattern, int loc) {
assert (Match_MaxBits == 0 || pattern.length() <= Match_MaxBits)
: "Pattern too long for this application.";
assert (Match_MaxBits == 0 || pattern.length() <= Match_MaxBits) : "Pattern too long for this application.";
// Initialise the alphabet.
Map<Character, Integer> s = match_alphabet(pattern);
@ -1590,12 +1612,16 @@ public class DiffPatchMatch {
// Is there a nearby exact match? (speedup)
int best_loc = text.indexOf(pattern, loc);
if (best_loc != -1) {
score_threshold = Math.min(match_bitapScore(0, best_loc, loc, pattern),
score_threshold = Math
.min(
match_bitapScore(0, best_loc, loc, pattern),
score_threshold);
// What about in the other direction? (speedup)
best_loc = text.lastIndexOf(pattern, loc + pattern.length());
if (best_loc != -1) {
score_threshold = Math.min(match_bitapScore(0, best_loc, loc, pattern),
score_threshold = Math
.min(
match_bitapScore(0, best_loc, loc, pattern),
score_threshold);
}
}
@ -1615,8 +1641,7 @@ public class DiffPatchMatch {
bin_min = 0;
bin_mid = bin_max;
while (bin_min < bin_mid) {
if (match_bitapScore(d, loc + bin_mid, loc, pattern)
<= score_threshold) {
if (match_bitapScore(d, loc + bin_mid, loc, pattern) <= score_threshold) {
bin_min = bin_mid;
} else {
bin_max = bin_mid;
@ -1710,10 +1735,8 @@ public class DiffPatchMatch {
return s;
}
// PATCH FUNCTIONS
/**
* Increase the context until it is unique,
* but don't let the pattern expand beyond Match_MaxBits.
@ -1732,20 +1755,26 @@ public class DiffPatchMatch {
while (text.indexOf(pattern) != text.lastIndexOf(pattern)
&& pattern.length() < Match_MaxBits - Patch_Margin - Patch_Margin) {
padding += Patch_Margin;
pattern = text.substring(Math.max(0, patch.start2 - padding),
pattern = text
.substring(
Math.max(0, patch.start2 - padding),
Math.min(text.length(), patch.start2 + patch.length1 + padding));
}
// Add one chunk for good luck.
padding += Patch_Margin;
// Add the prefix.
String prefix = text.substring(Math.max(0, patch.start2 - padding),
String prefix = text
.substring(
Math.max(0, patch.start2 - padding),
patch.start2);
if (prefix.length() != 0) {
patch.diffs.addFirst(new Diff(Operation.EQUAL, prefix));
}
// Add the suffix.
String suffix = text.substring(patch.start2 + patch.length1,
String suffix = text
.substring(
patch.start2 + patch.length1,
Math.min(text.length(), patch.start2 + patch.length1 + padding));
if (suffix.length() != 0) {
patch.diffs.addLast(new Diff(Operation.EQUAL, suffix));
@ -1803,7 +1832,8 @@ public class DiffPatchMatch {
* @return LinkedList of Patch objects.
* @deprecated Prefer patch_make(String text1, LinkedList<Diff> diffs).
*/
@Deprecated public LinkedList<Patch> patch_make(String text1, String text2,
@Deprecated
public LinkedList<Patch> patch_make(String text1, String text2,
LinkedList<Diff> diffs) {
return patch_make(text1, diffs);
}
@ -1927,7 +1957,9 @@ public class DiffPatchMatch {
*/
public Object[] patch_apply(LinkedList<Patch> patches, String text) {
if (patches.isEmpty()) {
return new Object[]{text, new boolean[0]};
return new Object[] {
text, new boolean[0]
};
}
// Deep copy the patches so that no changes are made to originals.
@ -1952,10 +1984,12 @@ public class DiffPatchMatch {
if (text1.length() > this.Match_MaxBits) {
// patch_splitMax will only provide an oversized pattern in the case of
// a monster delete.
start_loc = match_main(text,
start_loc = match_main(
text,
text1.substring(0, this.Match_MaxBits), expected_loc);
if (start_loc != -1) {
end_loc = match_main(text,
end_loc = match_main(
text,
text1.substring(text1.length() - this.Match_MaxBits),
expected_loc + text1.length() - this.Match_MaxBits);
if (end_loc == -1 || start_loc >= end_loc) {
@ -1977,10 +2011,14 @@ public class DiffPatchMatch {
delta = start_loc - expected_loc;
String text2;
if (end_loc == -1) {
text2 = text.substring(start_loc,
text2 = text
.substring(
start_loc,
Math.min(start_loc + text1.length(), text.length()));
} else {
text2 = text.substring(start_loc,
text2 = text
.substring(
start_loc,
Math.min(end_loc + this.Match_MaxBits, text.length()));
}
if (text1.equals(text2)) {
@ -1992,8 +2030,7 @@ public class DiffPatchMatch {
// indices.
LinkedList<Diff> diffs = diff_main(text1, text2, false);
if (text1.length() > this.Match_MaxBits
&& diff_levenshtein(diffs) / (float) text1.length()
> this.Patch_DeleteThreshold) {
&& diff_levenshtein(diffs) / (float) text1.length() > this.Patch_DeleteThreshold) {
// The end points match, but the content is unacceptably bad.
results[x] = false;
} else {
@ -2009,7 +2046,10 @@ public class DiffPatchMatch {
} else if (aDiff.operation == Operation.DELETE) {
// Deletion
text = text.substring(0, start_loc + index2)
+ text.substring(start_loc + diff_xIndex(diffs,
+ text
.substring(
start_loc + diff_xIndex(
diffs,
index1 + aDiff.text.length()));
}
}
@ -2023,9 +2063,13 @@ public class DiffPatchMatch {
x++;
}
// Strip the padding off.
text = text.substring(nullPadding.length(), text.length()
text = text
.substring(
nullPadding.length(), text.length()
- nullPadding.length());
return new Object[]{text, results};
return new Object[] {
text, results
};
}
/**
@ -2146,7 +2190,11 @@ public class DiffPatchMatch {
bigpatch.diffs.removeFirst();
} else {
// Deletion or equality. Only take as much as we can stomach.
diff_text = diff_text.substring(0, Math.min(diff_text.length(),
diff_text = diff_text
.substring(
0, Math
.min(
diff_text.length(),
patch_size - patch.length1 - Patch_Margin));
patch.length1 += diff_text.length();
start1 += diff_text.length();
@ -2167,7 +2215,11 @@ public class DiffPatchMatch {
}
// Compute the head context for the next patch.
precontext = diff_text2(patch.diffs);
precontext = precontext.substring(Math.max(0, precontext.length()
precontext = precontext
.substring(
Math
.max(
0, precontext.length()
- Patch_Margin));
// Append the end context for this patch.
if (diff_text1(bigpatch.diffs).length() > Patch_Margin) {
@ -2222,8 +2274,7 @@ public class DiffPatchMatch {
List<String> textList = Arrays.asList(textline.split("\n"));
LinkedList<String> text = new LinkedList<String>(textList);
Patch patch;
Pattern patchHeader
= Pattern.compile("^@@ -(\\d+),?(\\d*) \\+(\\d+),?(\\d*) @@$");
Pattern patchHeader = Pattern.compile("^@@ -(\\d+),?(\\d*) \\+(\\d+),?(\\d*) @@$");
Matcher m;
char sign;
String line;
@ -2301,7 +2352,6 @@ public class DiffPatchMatch {
return patches;
}
/**
* Class representing one diff operation.
*/
@ -2379,7 +2429,6 @@ public class DiffPatchMatch {
}
}
/**
* Class representing one patch operation.
*/
@ -2420,7 +2469,11 @@ public class DiffPatchMatch {
coords2 = (this.start2 + 1) + "," + this.length2;
}
StringBuilder text = new StringBuilder();
text.append("@@ -").append(coords1).append(" +").append(coords2)
text
.append("@@ -")
.append(coords1)
.append(" +")
.append(coords2)
.append(" @@\n");
// Escape the body of the patch with %xx notation.
for (Diff aDiff : this.diffs) {
@ -2436,7 +2489,8 @@ public class DiffPatchMatch {
break;
}
try {
text.append(URLEncoder.encode(aDiff.text, "UTF-8").replace('+', ' '))
text
.append(URLEncoder.encode(aDiff.text, "UTF-8").replace('+', ' '))
.append("\n");
} catch (UnsupportedEncodingException e) {
// Not likely on modern system.
@ -2461,11 +2515,22 @@ public class DiffPatchMatch {
* @return The escaped string.
*/
private static String unescapeForEncodeUriCompatability(String str) {
return str.replace("%21", "!").replace("%7E", "~")
.replace("%27", "'").replace("%28", "(").replace("%29", ")")
.replace("%3B", ";").replace("%2F", "/").replace("%3F", "?")
.replace("%3A", ":").replace("%40", "@").replace("%26", "&")
.replace("%3D", "=").replace("%2B", "+").replace("%24", "$")
.replace("%2C", ",").replace("%23", "#");
return str
.replace("%21", "!")
.replace("%7E", "~")
.replace("%27", "'")
.replace("%28", "(")
.replace("%29", ")")
.replace("%3B", ";")
.replace("%2F", "/")
.replace("%3F", "?")
.replace("%3A", ":")
.replace("%40", "@")
.replace("%26", "&")
.replace("%3D", "=")
.replace("%2B", "+")
.replace("%24", "$")
.replace("%2C", ",")
.replace("%23", "#");
}
}

View File

@ -1,3 +1,4 @@
package eu.dnetlib.pace.util;
import com.google.common.base.Function;

View File

@ -1,20 +1,22 @@
package eu.dnetlib.pace.util;
import java.math.BigDecimal;
import java.util.*;
import java.util.function.Predicate;
import java.util.stream.Collectors;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.jayway.jsonpath.Configuration;
import com.jayway.jsonpath.JsonPath;
import com.jayway.jsonpath.Option;
import eu.dnetlib.pace.config.DedupConfig;
import eu.dnetlib.pace.config.Type;
import eu.dnetlib.pace.model.*;
import net.minidev.json.JSONArray;
import java.math.BigDecimal;
import java.util.*;
import java.util.function.Predicate;
import java.util.stream.Collectors;
public class MapDocumentUtil {
public static final String URL_REGEX = "^(http|https|ftp)\\://.*";
@ -28,11 +30,15 @@ public class MapDocumentUtil {
switch (fdef.getType()) {
case String:
case Int:
stringField.put(fdef.getName(), new FieldValueImpl(fdef.getType(), fdef.getName(), truncateValue(getJPathString(fdef.getPath(), json), fdef.getLength())));
stringField
.put(
fdef.getName(), new FieldValueImpl(fdef.getType(), fdef.getName(),
truncateValue(getJPathString(fdef.getPath(), json), fdef.getLength())));
break;
case URL:
String uv = getJPathString(fdef.getPath(), json);
if (!urlFilter.test(uv)) uv = "";
if (!urlFilter.test(uv))
uv = "";
stringField.put(fdef.getName(), new FieldValueImpl(fdef.getType(), fdef.getName(), uv));
break;
case List:
@ -45,23 +51,26 @@ public class MapDocumentUtil {
stringField.put(fdef.getName(), fi);
break;
case DoubleArray:
stringField.put(
stringField
.put(
fdef.getName(),
new FieldValueImpl(Type.DoubleArray,
fdef.getName(),
getJPathArray(fdef.getPath(), json))
);
getJPathArray(fdef.getPath(), json)));
break;
case StringConcat:
String[] jpaths = fdef.getPath().split("\\|\\|\\|");
stringField.put(
stringField
.put(
fdef.getName(),
new FieldValueImpl(Type.String,
fdef.getName(),
truncateValue(Arrays.stream(jpaths).map(jpath -> getJPathString(jpath, json)).collect(Collectors.joining(" ")),
fdef.getLength())
)
);
truncateValue(
Arrays
.stream(jpaths)
.map(jpath -> getJPathString(jpath, json))
.collect(Collectors.joining(" ")),
fdef.getLength())));
break;
}
});
@ -71,7 +80,13 @@ public class MapDocumentUtil {
public static List<String> getJPathList(String path, String json, Type type) {
if (type == Type.List)
return JsonPath.using(Configuration.defaultConfiguration().addOptions(Option.ALWAYS_RETURN_LIST, Option.SUPPRESS_EXCEPTIONS)).parse(json).read(path);
return JsonPath
.using(
Configuration
.defaultConfiguration()
.addOptions(Option.ALWAYS_RETURN_LIST, Option.SUPPRESS_EXCEPTIONS))
.parse(json)
.read(path);
Object jresult;
List<String> result = new ArrayList<>();
try {
@ -88,8 +103,7 @@ public class MapDocumentUtil {
} catch (JsonProcessingException e) {
}
}
);
});
return result;
}
@ -107,7 +121,6 @@ public class MapDocumentUtil {
return result;
}
public static String getJPathString(final String jsonPath, final String json) {
try {
Object o = JsonPath.read(json, jsonPath);
@ -138,14 +151,12 @@ public class MapDocumentUtil {
return array;
}
return new double[0];
}
catch (Exception e) {
} catch (Exception e) {
e.printStackTrace();
return new double[0];
}
}
public static String truncateValue(String value, int length) {
if (value == null)
return "";

View File

@ -1,3 +1,4 @@
package eu.dnetlib.pace.util;
public class PaceException extends RuntimeException {

View File

@ -1,16 +1,18 @@
package eu.dnetlib.pace.util;
import eu.dnetlib.pace.clustering.ClusteringClass;
import eu.dnetlib.pace.clustering.ClusteringFunction;
import eu.dnetlib.pace.tree.support.Comparator;
import eu.dnetlib.pace.tree.support.ComparatorClass;
import org.reflections.Reflections;
package eu.dnetlib.pace.util;
import java.io.Serializable;
import java.lang.reflect.InvocationTargetException;
import java.util.Map;
import java.util.stream.Collectors;
import org.reflections.Reflections;
import eu.dnetlib.pace.clustering.ClusteringClass;
import eu.dnetlib.pace.clustering.ClusteringFunction;
import eu.dnetlib.pace.tree.support.Comparator;
import eu.dnetlib.pace.tree.support.ComparatorClass;
public class PaceResolver implements Serializable {
public static final Reflections CLUSTERING_RESOLVER = new Reflections("eu.dnetlib.pace.clustering");
@ -21,19 +23,28 @@ public class PaceResolver implements Serializable {
public PaceResolver() {
this.clusteringFunctions = CLUSTERING_RESOLVER.getTypesAnnotatedWith(ClusteringClass.class).stream()
this.clusteringFunctions = CLUSTERING_RESOLVER
.getTypesAnnotatedWith(ClusteringClass.class)
.stream()
.filter(ClusteringFunction.class::isAssignableFrom)
.collect(Collectors.toMap(cl -> cl.getAnnotation(ClusteringClass.class).value(), cl -> (Class<ClusteringFunction>)cl));
.collect(
Collectors
.toMap(
cl -> cl.getAnnotation(ClusteringClass.class).value(), cl -> (Class<ClusteringFunction>) cl));
this.comparators = COMPARATOR_RESOLVER.getTypesAnnotatedWith(ComparatorClass.class).stream()
this.comparators = COMPARATOR_RESOLVER
.getTypesAnnotatedWith(ComparatorClass.class)
.stream()
.filter(Comparator.class::isAssignableFrom)
.collect(Collectors.toMap(cl -> cl.getAnnotation(ComparatorClass.class).value(), cl -> (Class<Comparator>)cl));
.collect(
Collectors.toMap(cl -> cl.getAnnotation(ComparatorClass.class).value(), cl -> (Class<Comparator>) cl));
}
public ClusteringFunction getClusteringFunction(String name, Map<String, Integer> params) throws PaceException {
try {
return clusteringFunctions.get(name).getDeclaredConstructor(Map.class).newInstance(params);
} catch (InstantiationException | IllegalAccessException | InvocationTargetException | NoSuchMethodException e) {
} catch (InstantiationException | IllegalAccessException | InvocationTargetException
| NoSuchMethodException e) {
throw new PaceException(name + " not found ", e);
}
}
@ -41,7 +52,8 @@ public class PaceResolver implements Serializable {
public Comparator getComparator(String name, Map<String, String> params) throws PaceException {
try {
return comparators.get(name).getDeclaredConstructor(Map.class).newInstance(params);
} catch (InstantiationException | IllegalAccessException | InvocationTargetException | NoSuchMethodException | NullPointerException e) {
} catch (InstantiationException | IllegalAccessException | InvocationTargetException | NoSuchMethodException
| NullPointerException e) {
throw new PaceException(name + " not found ", e);
}
}

View File

@ -1,5 +1,5 @@
package eu.dnetlib.pace.util;
package eu.dnetlib.pace.util;
import java.io.Serializable;

View File

@ -1,11 +1,5 @@
package eu.dnetlib.pace;
import eu.dnetlib.pace.common.AbstractPaceFunctions;
import eu.dnetlib.pace.config.Type;
import eu.dnetlib.pace.model.Field;
import eu.dnetlib.pace.model.FieldListImpl;
import eu.dnetlib.pace.model.FieldValueImpl;
import org.apache.commons.io.IOUtils;
package eu.dnetlib.pace;
import java.io.IOException;
import java.io.StringWriter;
@ -13,6 +7,14 @@ import java.nio.charset.StandardCharsets;
import java.util.List;
import java.util.stream.Collectors;
import org.apache.commons.io.IOUtils;
import eu.dnetlib.pace.common.AbstractPaceFunctions;
import eu.dnetlib.pace.config.Type;
import eu.dnetlib.pace.model.Field;
import eu.dnetlib.pace.model.FieldListImpl;
import eu.dnetlib.pace.model.FieldValueImpl;
public abstract class AbstractPaceTest extends AbstractPaceFunctions {
protected String readFromClasspath(final String filename) {
@ -43,7 +45,10 @@ public abstract class AbstractPaceTest extends AbstractPaceFunctions {
protected Field createFieldList(List<String> strings, String fieldName) {
List<FieldValueImpl> fieldValueStream = strings.stream().map(s -> new FieldValueImpl(Type.String, fieldName, s)).collect(Collectors.toList());
List<FieldValueImpl> fieldValueStream = strings
.stream()
.map(s -> new FieldValueImpl(Type.String, fieldName, s))
.collect(Collectors.toList());
FieldListImpl a = new FieldListImpl();
a.addAll(fieldValueStream);

View File

@ -1,17 +1,20 @@
package eu.dnetlib.pace.clustering;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import eu.dnetlib.pace.AbstractPaceTest;
import eu.dnetlib.pace.common.AbstractPaceFunctions;
import eu.dnetlib.pace.config.DedupConfig;
import org.junit.jupiter.api.*;
package eu.dnetlib.pace.clustering;
import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;
import org.junit.jupiter.api.*;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import eu.dnetlib.pace.AbstractPaceTest;
import eu.dnetlib.pace.common.AbstractPaceFunctions;
import eu.dnetlib.pace.config.DedupConfig;
public class ClusteringFunctionTest extends AbstractPaceTest {
private static Map<String, Integer> params;
@ -20,7 +23,11 @@ public class ClusteringFunctionTest extends AbstractPaceTest {
@BeforeAll
public static void setUp() throws Exception {
params = Maps.newHashMap();
conf = DedupConfig.load(AbstractPaceFunctions.readFromClasspath("/eu/dnetlib/pace/config/organization.current.conf.json", ClusteringFunctionTest.class));
conf = DedupConfig
.load(
AbstractPaceFunctions
.readFromClasspath(
"/eu/dnetlib/pace/config/organization.current.conf.json", ClusteringFunctionTest.class));
}
@Test

Some files were not shown because too many files have changed in this diff Show More