implemented new function for clustering

This commit is contained in:
miconis 2020-07-02 17:04:17 +02:00
parent b3ec4194da
commit 33eadb7c9c
3 changed files with 61 additions and 6 deletions

View File

@ -28,12 +28,12 @@ public class DedupLocalTest extends DedupTestUtils {
DedupConfig config;
JavaSparkContext context;
final String entitiesPath = "/Users/miconis/IdeaProjects/DnetDedup/dnet-dedup/dnet-dedup-test/src/test/resources/eu/dnetlib/pace/examples/organization";
final String entitiesPath = "/Users/miconis/Desktop/publications_to_fix.json";
@Before
public void setup() {
config = DedupConfig.load(Utility.readFromClasspath("/eu/dnetlib/pace/config/organization.current.conf.json", DedupLocalTest.class));
config = DedupConfig.load(Utility.readFromClasspath("/eu/dnetlib/pace/config/publication.current.conf.json", DedupLocalTest.class));
spark = SparkSession
.builder()
@ -51,20 +51,20 @@ public class DedupLocalTest extends DedupTestUtils {
config,
spark,
entitiesPath,
"/tmp/deduptest/organization_simrel"
"/tmp/deduptest/publication_simrel"
);
Deduper.createMergeRels(
config,
entitiesPath,
"/tmp/deduptest/organization_mergerel",
"/tmp/deduptest/organization_simrel",
"/tmp/deduptest/publication_mergerel",
"/tmp/deduptest/publication_simrel",
spark
);
Deduper.createDedupEntity(
config,
"/tmp/deduptest/organization_mergerel",
"/tmp/deduptest/publication_mergerel",
entitiesPath,
spark,
"/tmp/deduptest/dedupentity"

View File

@ -0,0 +1,42 @@
package eu.dnetlib.pace.clustering;
import java.util.Collection;
import java.util.Map;
import java.util.Set;
import com.google.common.collect.Sets;
import eu.dnetlib.pace.config.Config;
@ClusteringClass("wordssuffixprefix")
public class WordsSuffixPrefix extends AbstractClusteringFunction {
public WordsSuffixPrefix(Map<String, Integer> params) {
super(params);
}
@Override
protected Collection<String> doApply(Config conf, String s) {
return suffixPrefix(s, param("len"), param("max"));
}
private Collection<String> suffixPrefix(String s, int len, int max) {
final int words = s.split(" ").length;
final Set<String> bigrams = Sets.newLinkedHashSet();
int i = 0;
while (++i < s.length() && bigrams.size() < max) {
int j = s.indexOf(" ", i);
int offset = j + len + 1 < s.length() ? j + len + 1 : s.length();
if (j - len > 0) {
String bigram = s.substring(j - len, offset).replaceAll(" ", "").trim();
if (bigram.length() >= 4) {
bigrams.add(words+bigram);
}
}
}
return bigrams;
}
}

View File

@ -98,6 +98,19 @@ public class ClusteringFunctionTest extends AbstractPaceTest {
System.out.println(sp.apply(conf, Lists.newArrayList(title(s))));
}
@Test
public void testWordsSuffixPrefix() {
params.put("len", 3);
params.put("max", 4);
final ClusteringFunction sp = new WordsSuffixPrefix(params);
final String s = "Search for the Standard Model Higgs Boson";
System.out.println(s);
System.out.println(sp.apply(conf, Lists.newArrayList(title(s))));
}
@Test
public void testFieldValue() {