[BypassAS] prepare FOS subject, test and model for FOS and BipFinder scores
This commit is contained in:
parent
7bd224f051
commit
a7d50c499b
|
@ -0,0 +1,84 @@
|
|||
package eu.dnetlib.dhp.bypassactionset;
|
||||
|
||||
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
||||
import eu.dnetlib.dhp.bypassactionset.model.FOSDataModel;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.spark.SparkConf;
|
||||
import org.apache.spark.api.java.function.FlatMapFunction;
|
||||
import org.apache.spark.sql.Dataset;
|
||||
import org.apache.spark.sql.Encoders;
|
||||
import org.apache.spark.sql.SaveMode;
|
||||
import org.apache.spark.sql.SparkSession;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
import static eu.dnetlib.dhp.PropagationConstant.*;
|
||||
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
|
||||
|
||||
public class DistributeFOSSparkJob implements Serializable {
|
||||
private static final Logger log = LoggerFactory.getLogger(DistributeFOSSparkJob.class);
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
|
||||
String jsonConfiguration = IOUtils
|
||||
.toString(
|
||||
DistributeFOSSparkJob.class
|
||||
.getResourceAsStream(
|
||||
"/eu/dnetlib/dhp/bypassactionset/distribute_fos_parameters.json"));
|
||||
|
||||
final ArgumentApplicationParser parser = new ArgumentApplicationParser(jsonConfiguration);
|
||||
|
||||
parser.parseArgument(args);
|
||||
|
||||
Boolean isSparkSessionManaged = isSparkSessionManaged(parser);
|
||||
log.info("isSparkSessionManaged: {}", isSparkSessionManaged);
|
||||
|
||||
String sourcePath = parser.get("sourcePath");
|
||||
log.info("sourcePath: {}", sourcePath);
|
||||
|
||||
|
||||
|
||||
final String outputPath = parser.get("outputPath");
|
||||
log.info("outputPath: {}", outputPath);
|
||||
|
||||
|
||||
|
||||
SparkConf conf = new SparkConf();
|
||||
runWithSparkSession(
|
||||
conf,
|
||||
isSparkSessionManaged,
|
||||
spark -> {
|
||||
removeOutputDir(spark, outputPath);
|
||||
distributeFOSdois(
|
||||
spark,
|
||||
sourcePath,
|
||||
|
||||
outputPath
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
private static void distributeFOSdois(SparkSession spark, String sourcePath, String outputPath) {
|
||||
Dataset<FOSDataModel> fosDataset = readPath(spark, sourcePath, FOSDataModel.class);
|
||||
|
||||
fosDataset.flatMap((FlatMapFunction<FOSDataModel, FOSDataModel>) v -> {
|
||||
List<FOSDataModel> fosList = new ArrayList<>();
|
||||
final String level1 = v.getLevel1();
|
||||
final String level2 = v.getLevel2();
|
||||
final String level3 = v.getLevel3();
|
||||
Arrays.stream(v.getDoi().split("\u0002")).forEach(d -> fosList.add(FOSDataModel.newInstance(d, level1, level2, level3)));
|
||||
return fosList.iterator();
|
||||
}, Encoders.bean(FOSDataModel.class))
|
||||
.write()
|
||||
.mode(SaveMode.Overwrite)
|
||||
.option("compression","gzip")
|
||||
.json(outputPath);
|
||||
}
|
||||
|
||||
|
||||
}
|
|
@ -0,0 +1,79 @@
|
|||
package eu.dnetlib.dhp.bypassactionset;
|
||||
|
||||
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
||||
|
||||
import eu.dnetlib.dhp.common.collection.GetCSV;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.io.*;
|
||||
|
||||
import java.util.Objects;
|
||||
import java.util.Optional;
|
||||
|
||||
public class GetFOSData implements Serializable {
|
||||
|
||||
private static final Logger log = LoggerFactory.getLogger(GetFOSData.class);
|
||||
|
||||
public static final char DEFAULT_DELIMITER = '\t';
|
||||
|
||||
public static void main(final String[] args) throws Exception {
|
||||
final ArgumentApplicationParser parser = new ArgumentApplicationParser(
|
||||
IOUtils
|
||||
.toString(
|
||||
Objects
|
||||
.requireNonNull(
|
||||
GetFOSData.class
|
||||
.getResourceAsStream(
|
||||
"/eu/dnetlib/dhp/bypassactionset/get_fos_parameters.json"))));
|
||||
|
||||
parser.parseArgument(args);
|
||||
|
||||
//the path where the original fos csv file is stored
|
||||
final String inputPath = parser.get("inputPath");
|
||||
log.info("inputPath {}", inputPath);
|
||||
|
||||
//the path where to put the file as json
|
||||
final String outputFile = parser.get("outputFile");
|
||||
log.info("outputFile {}", outputFile);
|
||||
|
||||
final String hdfsNameNode = parser.get("hdfsNameNode");
|
||||
log.info("hdfsNameNode {}", hdfsNameNode);
|
||||
|
||||
final String classForName = parser.get("classForName");
|
||||
log.info("classForName {}", classForName);
|
||||
|
||||
final char delimiter = Optional
|
||||
.ofNullable(parser.get("delimiter"))
|
||||
.map(s -> s.charAt(0))
|
||||
.orElse(DEFAULT_DELIMITER);
|
||||
log.info("delimiter {}", delimiter);
|
||||
|
||||
Configuration conf = new Configuration();
|
||||
conf.set("fs.defaultFS", hdfsNameNode);
|
||||
|
||||
FileSystem fileSystem = FileSystem.get(conf);
|
||||
|
||||
new GetFOSData().doRewrite(inputPath, outputFile, classForName, delimiter, fileSystem);
|
||||
|
||||
}
|
||||
|
||||
public void doRewrite(String inputPath, String outputFile, String classForName, char delimiter, FileSystem fs)
|
||||
throws IOException, ClassNotFoundException{
|
||||
|
||||
//reads the csv and writes it as its json equivalent
|
||||
try (InputStreamReader reader = new InputStreamReader(fs.open(new Path (inputPath)))) {
|
||||
GetCSV.getCsv(fs, reader, outputFile, classForName, delimiter);
|
||||
}
|
||||
|
||||
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
}
|
|
@ -0,0 +1,28 @@
|
|||
|
||||
package eu.dnetlib.dhp.bypassactionset.model;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* Class that maps the model of the bipFinder! input data.
|
||||
* Only needed for deserialization purposes
|
||||
*/
|
||||
|
||||
public class BipDeserialize extends HashMap<String, List<Score>> implements Serializable {
|
||||
|
||||
public BipDeserialize() {
|
||||
super();
|
||||
}
|
||||
|
||||
public List<Score> get(String key) {
|
||||
|
||||
if (super.get(key) == null) {
|
||||
return new ArrayList<>();
|
||||
}
|
||||
return super.get(key);
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,71 @@
|
|||
package eu.dnetlib.dhp.bypassactionset.model;
|
||||
|
||||
import com.opencsv.bean.CsvBindByPosition;
|
||||
|
||||
import java.io.Serializable;
|
||||
|
||||
public class FOSDataModel implements Serializable {
|
||||
@CsvBindByPosition(position = 1)
|
||||
// @CsvBindByName(column = "doi")
|
||||
private String doi;
|
||||
|
||||
@CsvBindByPosition(position = 2)
|
||||
// @CsvBindByName(column = "level1")
|
||||
private String level1;
|
||||
|
||||
@CsvBindByPosition(position = 3)
|
||||
// @CsvBindByName(column = "level2")
|
||||
private String level2;
|
||||
|
||||
@CsvBindByPosition(position = 4)
|
||||
// @CsvBindByName(column = "level3")
|
||||
private String level3;
|
||||
|
||||
|
||||
public FOSDataModel() {
|
||||
|
||||
}
|
||||
|
||||
public FOSDataModel(String doi, String level1, String level2, String level3) {
|
||||
this.doi = doi;
|
||||
this.level1 = level1;
|
||||
this.level2 = level2;
|
||||
this.level3 = level3;
|
||||
}
|
||||
|
||||
public static FOSDataModel newInstance(String d, String level1, String level2, String level3) {
|
||||
return new FOSDataModel(d, level1, level2, level3);
|
||||
}
|
||||
|
||||
public String getDoi() {
|
||||
return doi;
|
||||
}
|
||||
|
||||
public void setDoi(String doi) {
|
||||
this.doi = doi;
|
||||
}
|
||||
|
||||
public String getLevel1() {
|
||||
return level1;
|
||||
}
|
||||
|
||||
public void setLevel1(String level1) {
|
||||
this.level1 = level1;
|
||||
}
|
||||
|
||||
public String getLevel2() {
|
||||
return level2;
|
||||
}
|
||||
|
||||
public void setLevel2(String level2) {
|
||||
this.level2 = level2;
|
||||
}
|
||||
|
||||
public String getLevel3() {
|
||||
return level3;
|
||||
}
|
||||
|
||||
public void setLevel3(String level3) {
|
||||
this.level3 = level3;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,26 @@
|
|||
|
||||
package eu.dnetlib.dhp.bypassactionset.model;
|
||||
|
||||
import java.io.Serializable;
|
||||
|
||||
public class KeyValue implements Serializable {
|
||||
|
||||
private String key;
|
||||
private String value;
|
||||
|
||||
public String getKey() {
|
||||
return key;
|
||||
}
|
||||
|
||||
public void setKey(String key) {
|
||||
this.key = key;
|
||||
}
|
||||
|
||||
public String getValue() {
|
||||
return value;
|
||||
}
|
||||
|
||||
public void setValue(String value) {
|
||||
this.value = value;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,30 @@
|
|||
|
||||
package eu.dnetlib.dhp.bypassactionset.model;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* represents the score in the input file
|
||||
*/
|
||||
public class Score implements Serializable {
|
||||
|
||||
private String id;
|
||||
private List<KeyValue> unit;
|
||||
|
||||
public String getId() {
|
||||
return id;
|
||||
}
|
||||
|
||||
public void setId(String id) {
|
||||
this.id = id;
|
||||
}
|
||||
|
||||
public List<KeyValue> getUnit() {
|
||||
return unit;
|
||||
}
|
||||
|
||||
public void setUnit(List<KeyValue> unit) {
|
||||
this.unit = unit;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,21 @@
|
|||
[
|
||||
{
|
||||
"paramName":"s",
|
||||
"paramLongName":"sourcePath",
|
||||
"paramDescription": "the path of the sequencial file to read",
|
||||
"paramRequired": true
|
||||
},
|
||||
{
|
||||
"paramName":"out",
|
||||
"paramLongName":"outputPath",
|
||||
"paramDescription": "the output path",
|
||||
"paramRequired": true
|
||||
},
|
||||
|
||||
{
|
||||
"paramName": "ssm",
|
||||
"paramLongName": "isSparkSessionManaged",
|
||||
"paramDescription": "true if the spark session is managed, false otherwise",
|
||||
"paramRequired": false
|
||||
}
|
||||
]
|
|
@ -0,0 +1,27 @@
|
|||
[
|
||||
{
|
||||
"paramName":"s",
|
||||
"paramLongName":"sourcePath",
|
||||
"paramDescription": "the path of the sequencial file to read",
|
||||
"paramRequired": true
|
||||
},
|
||||
{
|
||||
"paramName":"out",
|
||||
"paramLongName":"outputPath",
|
||||
"paramDescription": "the output path",
|
||||
"paramRequired": true
|
||||
},
|
||||
|
||||
{
|
||||
"paramName": "ssm",
|
||||
"paramLongName": "isSparkSessionManaged",
|
||||
"paramDescription": "true if the spark session is managed, false otherwise",
|
||||
"paramRequired": false
|
||||
},
|
||||
{
|
||||
"paramName": "hnn",
|
||||
"paramLongName": "hdfsNameNode",
|
||||
"paramDescription": "the path used to store the HostedByMap",
|
||||
"paramRequired": true
|
||||
}
|
||||
]
|
|
@ -0,0 +1,332 @@
|
|||
package eu.dnetlib.dhp.bypassactionset;
|
||||
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import eu.dnetlib.dhp.bypassactionset.model.FOSDataModel;
|
||||
import eu.dnetlib.dhp.common.collection.CollectorException;
|
||||
import eu.dnetlib.dhp.countrypropagation.CountryPropagationJobTest;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
import org.apache.hadoop.fs.LocalFileSystem;
|
||||
|
||||
import java.nio.file.Path;
|
||||
import org.apache.spark.SparkConf;
|
||||
import org.apache.spark.api.java.JavaRDD;
|
||||
import org.apache.spark.api.java.JavaSparkContext;
|
||||
import org.apache.spark.sql.SparkSession;
|
||||
import org.junit.jupiter.api.AfterAll;
|
||||
import org.junit.jupiter.api.BeforeAll;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStreamReader;
|
||||
import java.nio.file.Files;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
|
||||
public class GetFOSTest {
|
||||
private static final Logger log = LoggerFactory.getLogger(GetFOSTest.class);
|
||||
|
||||
private static Path workingDir;
|
||||
private static SparkSession spark;
|
||||
private static LocalFileSystem fs;
|
||||
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
|
||||
|
||||
@BeforeAll
|
||||
public static void beforeAll() throws IOException {
|
||||
workingDir = Files.createTempDirectory(CountryPropagationJobTest.class.getSimpleName());
|
||||
|
||||
fs = FileSystem.getLocal(new Configuration());
|
||||
log.info("using work dir {}", workingDir);
|
||||
|
||||
SparkConf conf = new SparkConf();
|
||||
conf.setAppName(GetFOSTest.class.getSimpleName());
|
||||
|
||||
conf.setMaster("local[*]");
|
||||
conf.set("spark.driver.host", "localhost");
|
||||
conf.set("hive.metastore.local", "true");
|
||||
conf.set("spark.ui.enabled", "false");
|
||||
conf.set("spark.sql.warehouse.dir", workingDir.toString());
|
||||
conf.set("hive.metastore.warehouse.dir", workingDir.resolve("warehouse").toString());
|
||||
|
||||
spark = SparkSession
|
||||
.builder()
|
||||
.appName(CountryPropagationJobTest.class.getSimpleName())
|
||||
.config(conf)
|
||||
.getOrCreate();
|
||||
}
|
||||
@AfterAll
|
||||
public static void afterAll() throws IOException {
|
||||
FileUtils.deleteDirectory(workingDir.toFile());
|
||||
spark.stop();
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
void getFOSFileTest() throws CollectorException, IOException, ClassNotFoundException {
|
||||
|
||||
final String sourcePath = getClass()
|
||||
.getResource("/eu/dnetlib/dhp/bypassactionset/fos/h2020_fos_sbs.csv")
|
||||
.getPath();
|
||||
final String outputPath = workingDir.toString() + "/fos.json";
|
||||
|
||||
|
||||
|
||||
|
||||
new GetFOSData()
|
||||
.doRewrite(sourcePath, outputPath, "eu.dnetlib.dhp.bypassactionset.FOSDataModel", '\t',fs );
|
||||
|
||||
BufferedReader in = new BufferedReader(new InputStreamReader(fs.open(new org.apache.hadoop.fs.Path(outputPath))));
|
||||
|
||||
String line;
|
||||
int count = 0;
|
||||
while ((line = in.readLine()) != null) {
|
||||
FOSDataModel fos = new ObjectMapper().readValue(line, FOSDataModel.class);
|
||||
|
||||
System.out.println(new ObjectMapper().writeValueAsString(fos));
|
||||
count += 1;
|
||||
}
|
||||
|
||||
assertEquals(38, count);
|
||||
|
||||
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
void distributeDoiTest() throws Exception{
|
||||
final String sourcePath = getClass()
|
||||
.getResource("/eu/dnetlib/dhp/bypassactionset/fos/fos.json")
|
||||
.getPath();
|
||||
|
||||
DistributeFOSSparkJob
|
||||
.main(
|
||||
new String[] {
|
||||
"--isSparkSessionManaged", Boolean.FALSE.toString(),
|
||||
"--sourcePath", sourcePath,
|
||||
|
||||
"-outputPath", workingDir.toString() + "/distribute"
|
||||
|
||||
});
|
||||
|
||||
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
|
||||
|
||||
JavaRDD<FOSDataModel> tmp = sc
|
||||
.textFile(workingDir.toString() + "/distribute")
|
||||
.map(item -> OBJECT_MAPPER.readValue(item, FOSDataModel.class));
|
||||
|
||||
|
||||
assertEquals(50, tmp.count());
|
||||
assertEquals(1, tmp.filter(row -> row.getDoi().equals("10.3390/s18072310")).count());
|
||||
assertEquals("engineering and technology", tmp.filter(r -> r.getDoi().equals("10.3390/s18072310")).collect().get(0).getLevel1());
|
||||
assertEquals("nano-technology", tmp.filter(r -> r.getDoi().equals("10.3390/s18072310")).collect().get(0).getLevel2());
|
||||
assertEquals("nanoscience & nanotechnology", tmp.filter(r -> r.getDoi().equals("10.3390/s18072310")).collect().get(0).getLevel3());
|
||||
|
||||
assertEquals(1, tmp.filter(row -> row.getDoi().equals("10.1111/1365-2656.12831")).count());
|
||||
assertEquals("social sciences", tmp.filter(r -> r.getDoi().equals("10.1111/1365-2656.12831")).collect().get(0).getLevel1());
|
||||
assertEquals("psychology and cognitive sciences", tmp.filter(r -> r.getDoi().equals("10.1111/1365-2656.12831")).collect().get(0).getLevel2());
|
||||
assertEquals("NULL", tmp.filter(r -> r.getDoi().equals("10.1111/1365-2656.12831")).collect().get(0).getLevel3());
|
||||
|
||||
// {"doi":"10.1111/1365-2656.12831\u000210.17863/cam.24369","level1":"social sciences","level2":"psychology and cognitive sciences","level3":"NULL"}
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* @Test
|
||||
* void testCountryPropagationSoftware() throws Exception {
|
||||
* final String sourcePath = getClass()
|
||||
* .getResource("/eu/dnetlib/dhp/countrypropagation/sample/software")
|
||||
* .getPath();
|
||||
* final String preparedInfoPath = getClass()
|
||||
* .getResource("/eu/dnetlib/dhp/countrypropagation/preparedInfo")
|
||||
* .getPath();
|
||||
* SparkCountryPropagationJob
|
||||
* .main(
|
||||
* new String[] {
|
||||
* "--isSparkSessionManaged", Boolean.FALSE.toString(),
|
||||
* "--sourcePath", sourcePath,
|
||||
* "-saveGraph", "true",
|
||||
* "-resultTableName", Software.class.getCanonicalName(),
|
||||
* "-outputPath", workingDir.toString() + "/software",
|
||||
* "-preparedInfoPath", preparedInfoPath
|
||||
* });
|
||||
*
|
||||
* final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
|
||||
*
|
||||
* JavaRDD<Software> tmp = sc
|
||||
* .textFile(workingDir.toString() + "/software")
|
||||
* .map(item -> OBJECT_MAPPER.readValue(item, Software.class));
|
||||
*
|
||||
* // tmp.map(s -> new Gson().toJson(s)).foreach(s -> System.out.println(s));
|
||||
*
|
||||
* Assertions.assertEquals(10, tmp.count());
|
||||
*
|
||||
* Dataset<Software> verificationDs = spark.createDataset(tmp.rdd(), Encoders.bean(Software.class));
|
||||
*
|
||||
* Assertions.assertEquals(6, verificationDs.filter("size(country) > 0").count());
|
||||
* Assertions.assertEquals(3, verificationDs.filter("size(country) = 1").count());
|
||||
* Assertions.assertEquals(3, verificationDs.filter("size(country) = 2").count());
|
||||
* Assertions.assertEquals(0, verificationDs.filter("size(country) > 2").count());
|
||||
*
|
||||
* Dataset<String> countryExploded = verificationDs
|
||||
* .flatMap(
|
||||
* (FlatMapFunction<Software, Country>) row -> row.getCountry().iterator(), Encoders.bean(Country.class))
|
||||
* .map((MapFunction<Country, String>) Qualifier::getClassid, Encoders.STRING());
|
||||
*
|
||||
* Assertions.assertEquals(9, countryExploded.count());
|
||||
*
|
||||
* Assertions.assertEquals(1, countryExploded.filter("value = 'FR'").count());
|
||||
* Assertions.assertEquals(1, countryExploded.filter("value = 'TR'").count());
|
||||
* Assertions.assertEquals(2, countryExploded.filter("value = 'IT'").count());
|
||||
* Assertions.assertEquals(1, countryExploded.filter("value = 'US'").count());
|
||||
* Assertions.assertEquals(1, countryExploded.filter("value = 'MX'").count());
|
||||
* Assertions.assertEquals(1, countryExploded.filter("value = 'CH'").count());
|
||||
* Assertions.assertEquals(2, countryExploded.filter("value = 'JP'").count());
|
||||
*
|
||||
* Dataset<Tuple2<String, String>> countryExplodedWithCountryclassid = verificationDs
|
||||
* .flatMap((FlatMapFunction<Software, Tuple2<String, String>>) row -> {
|
||||
* List<Tuple2<String, String>> prova = new ArrayList<>();
|
||||
* List<Country> countryList = row.getCountry();
|
||||
* countryList
|
||||
* .forEach(
|
||||
* c -> prova
|
||||
* .add(
|
||||
* new Tuple2<>(
|
||||
* row.getId(), c.getClassid())));
|
||||
* return prova.iterator();
|
||||
* }, Encoders.tuple(Encoders.STRING(), Encoders.STRING()));
|
||||
*
|
||||
* Assertions.assertEquals(9, countryExplodedWithCountryclassid.count());
|
||||
*
|
||||
* countryExplodedWithCountryclassid.show(false);
|
||||
* Assertions
|
||||
* .assertEquals(
|
||||
* 1,
|
||||
* countryExplodedWithCountryclassid
|
||||
* .filter(
|
||||
* "_1 = '50|od______1582::6e7a9b21a2feef45673890432af34244' and _2 = 'FR' ")
|
||||
* .count());
|
||||
* Assertions
|
||||
* .assertEquals(
|
||||
* 1,
|
||||
* countryExplodedWithCountryclassid
|
||||
* .filter(
|
||||
* "_1 = '50|dedup_wf_001::40ea2f24181f6ae77b866ebcbffba523' and _2 = 'TR' ")
|
||||
* .count());
|
||||
* Assertions
|
||||
* .assertEquals(
|
||||
* 2,
|
||||
* countryExplodedWithCountryclassid
|
||||
* .filter(
|
||||
* "_1 = '50|od______1106::2b7ca9726230be8e862be224fd463ac4' and (_2 = 'IT' or _2 = 'MX') ")
|
||||
* .count());
|
||||
* Assertions
|
||||
* .assertEquals(
|
||||
* 2,
|
||||
* countryExplodedWithCountryclassid
|
||||
* .filter(
|
||||
* "_1 = '50|od_______935::46a0ad9964171c3dd13373f5427b9a1c' and (_2 = 'IT' or _2 = 'US') ")
|
||||
* .count());
|
||||
* Assertions
|
||||
* .assertEquals(
|
||||
* 1,
|
||||
* countryExplodedWithCountryclassid
|
||||
* .filter(
|
||||
* "_1 = '50|dedup_wf_001::b67bc915603fc01e445f2b5888ba7218' and _2 = 'JP'")
|
||||
* .count());
|
||||
* Assertions
|
||||
* .assertEquals(
|
||||
* 2,
|
||||
* countryExplodedWithCountryclassid
|
||||
* .filter(
|
||||
* "_1 = '50|od_______109::f375befa62a741e9250e55bcfa88f9a6' and (_2 = 'CH' or _2 = 'JP') ")
|
||||
* .count());
|
||||
*
|
||||
* Dataset<Tuple2<String, String>> countryExplodedWithCountryclassname = verificationDs
|
||||
* .flatMap(
|
||||
* (FlatMapFunction<Software, Tuple2<String, String>>) row -> {
|
||||
* List<Tuple2<String, String>> prova = new ArrayList<>();
|
||||
* List<Country> countryList = row.getCountry();
|
||||
* countryList
|
||||
* .forEach(
|
||||
* c -> prova
|
||||
* .add(
|
||||
* new Tuple2<>(
|
||||
* row.getId(),
|
||||
* c.getClassname())));
|
||||
* return prova.iterator();
|
||||
* },
|
||||
* Encoders.tuple(Encoders.STRING(), Encoders.STRING()));
|
||||
*
|
||||
* countryExplodedWithCountryclassname.show(false);
|
||||
* Assertions
|
||||
* .assertEquals(
|
||||
* 1,
|
||||
* countryExplodedWithCountryclassname
|
||||
* .filter(
|
||||
* "_1 = '50|od______1582::6e7a9b21a2feef45673890432af34244' and _2 = 'France' ")
|
||||
* .count());
|
||||
* Assertions
|
||||
* .assertEquals(
|
||||
* 1,
|
||||
* countryExplodedWithCountryclassname
|
||||
* .filter(
|
||||
* "_1 = '50|dedup_wf_001::40ea2f24181f6ae77b866ebcbffba523' and _2 = 'Turkey' ")
|
||||
* .count());
|
||||
* Assertions
|
||||
* .assertEquals(
|
||||
* 2,
|
||||
* countryExplodedWithCountryclassname
|
||||
* .filter(
|
||||
* "_1 = '50|od______1106::2b7ca9726230be8e862be224fd463ac4' and (_2 = 'Italy' or _2 = 'Mexico') ")
|
||||
* .count());
|
||||
* Assertions
|
||||
* .assertEquals(
|
||||
* 2,
|
||||
* countryExplodedWithCountryclassname
|
||||
* .filter(
|
||||
* "_1 = '50|od_______935::46a0ad9964171c3dd13373f5427b9a1c' and (_2 = 'Italy' or _2 = 'United States') ")
|
||||
* .count());
|
||||
* Assertions
|
||||
* .assertEquals(
|
||||
* 1,
|
||||
* countryExplodedWithCountryclassname
|
||||
* .filter(
|
||||
* "_1 = '50|dedup_wf_001::b67bc915603fc01e445f2b5888ba7218' and _2 = 'Japan' ")
|
||||
* .count());
|
||||
* Assertions
|
||||
* .assertEquals(
|
||||
* 2,
|
||||
* countryExplodedWithCountryclassname
|
||||
* .filter(
|
||||
* "_1 = '50|od_______109::f375befa62a741e9250e55bcfa88f9a6' and (_2 = 'Switzerland' or _2 = 'Japan') ")
|
||||
* .count());
|
||||
*
|
||||
* Dataset<Tuple2<String, String>> countryExplodedWithCountryProvenance = verificationDs
|
||||
* .flatMap(
|
||||
* (FlatMapFunction<Software, Tuple2<String, String>>) row -> {
|
||||
* List<Tuple2<String, String>> prova = new ArrayList<>();
|
||||
* List<Country> countryList = row.getCountry();
|
||||
* countryList
|
||||
* .forEach(
|
||||
* c -> prova
|
||||
* .add(
|
||||
* new Tuple2<>(
|
||||
* row.getId(),
|
||||
* c
|
||||
* .getDataInfo()
|
||||
* .getInferenceprovenance())));
|
||||
* return prova.iterator();
|
||||
* },
|
||||
* Encoders.tuple(Encoders.STRING(), Encoders.STRING()));
|
||||
*
|
||||
* Assertions
|
||||
* .assertEquals(
|
||||
* 7, countryExplodedWithCountryProvenance.filter("_2 = 'propagation'").count());
|
||||
* }
|
||||
*/
|
||||
}
|
|
@ -0,0 +1,38 @@
|
|||
{"doi":"10.3390/s18072310","level1":"engineering and technology","level2":"nano-technology","level3":"nanoscience & nanotechnology"}
|
||||
{"doi":"10.1111/1365-2656.12831\u000210.17863/cam.24369","level1":"social sciences","level2":"psychology and cognitive sciences","level3":"NULL"}
|
||||
{"doi":"10.3929/ethz-b-000187584\u000210.1002/chem.201701644","level1":"natural sciences","level2":"NULL","level3":"NULL"}
|
||||
{"doi":"10.1080/01913123.2017.1367361","level1":"medical and health sciences","level2":"clinical medicine","level3":"oncology & carcinogenesis"}
|
||||
{"doi":"10.1051/e3sconf/20199207011","level1":"natural sciences","level2":"earth and related environmental sciences","level3":"environmental sciences"}
|
||||
{"doi":"10.1038/onc.2015.333","level1":"medical and health sciences","level2":"clinical medicine","level3":"oncology & carcinogenesis"}
|
||||
{"doi":"10.1093/mnras/staa256","level1":"natural sciences","level2":"physical sciences","level3":"NULL"}
|
||||
{"doi":"10.1016/j.jclepro.2018.07.166","level1":"engineering and technology","level2":"other engineering and technologies","level3":"building & construction"}
|
||||
{"doi":"10.1103/physrevlett.125.037403","level1":"natural sciences","level2":"physical sciences","level3":"nuclear & particles physics"}
|
||||
{"doi":"10.1080/03602532.2017.1316285","level1":"natural sciences","level2":"NULL","level3":"NULL"}
|
||||
{"doi":"10.1001/jamanetworkopen.2019.1868","level1":"medical and health sciences","level2":"other medical science","level3":"health policy & services"}
|
||||
{"doi":"10.1128/mra.00874-18","level1":"natural sciences","level2":"biological sciences","level3":"plant biology & botany"}
|
||||
{"doi":"10.1016/j.nancom.2018.03.001","level1":"engineering and technology","level2":"NULL","level3":"NULL"}
|
||||
{"doi":"10.1112/topo.12174","level1":"natural sciences","level2":"NULL","level3":"NULL"}
|
||||
{"doi":"10.12688/wellcomeopenres.15846.1","level1":"medical and health sciences","level2":"health sciences","level3":"NULL"}
|
||||
{"doi":"10.21468/scipostphys.3.1.001","level1":"natural sciences","level2":"physical sciences","level3":"NULL"}
|
||||
{"doi":"10.1088/1741-4326/ab6c77","level1":"natural sciences","level2":"physical sciences","level3":"nuclear & particles physics"}
|
||||
{"doi":"10.1109/tpwrs.2019.2944747","level1":"engineering and technology","level2":"electrical engineering, electronic engineering, information engineering","level3":"electrical & electronic engineering"}
|
||||
{"doi":"10.1016/j.expthermflusci.2019.109994\u000210.17863/cam.46212","level1":"engineering and technology","level2":"mechanical engineering","level3":"mechanical engineering & transports"}
|
||||
{"doi":"10.1109/tc.2018.2860012","level1":"engineering and technology","level2":"electrical engineering, electronic engineering, information engineering","level3":"computer hardware & architecture"}
|
||||
{"doi":"10.1002/mma.6622","level1":"natural sciences","level2":"mathematics","level3":"numerical & computational mathematics"}
|
||||
{"doi":"10.1051/radiopro/2020020","level1":"natural sciences","level2":"chemical sciences","level3":"NULL"}
|
||||
{"doi":"10.1007/s12268-019-1003-4","level1":"medical and health sciences","level2":"basic medicine","level3":"NULL"}
|
||||
{"doi":"10.3390/cancers12010236","level1":"medical and health sciences","level2":"health sciences","level3":"biochemistry & molecular biology"}
|
||||
{"doi":"10.6084/m9.figshare.9912614\u000210.6084/m9.figshare.9912614.v1\u000210.1080/00268976.2019.1665199","level1":"natural sciences","level2":"chemical sciences","level3":"physical chemistry"}
|
||||
{"doi":"10.1175/jpo-d-17-0239.1","level1":"natural sciences","level2":"biological sciences","level3":"marine biology & hydrobiology"}
|
||||
{"doi":"10.1007/s13218-020-00674-7","level1":"engineering and technology","level2":"industrial biotechnology","level3":"industrial engineering & automation"}
|
||||
{"doi":"10.1016/j.psyneuen.2016.02.003\u000210.1016/j.psyneuen.2016.02.00310.7892/boris.78886\u000210.7892/boris.78886","level1":"medical and health sciences","level2":"basic medicine","level3":"NULL"}
|
||||
{"doi":"10.1109/ted.2018.2813542","level1":"engineering and technology","level2":"electrical engineering, electronic engineering, information engineering","level3":"electrical & electronic engineering"}
|
||||
{"doi":"10.3989/scimar.04739.25a","level1":"natural sciences","level2":"biological sciences","level3":"NULL"}
|
||||
{"doi":"10.3390/su12187503","level1":"natural sciences","level2":"earth and related environmental sciences","level3":"NULL"}
|
||||
{"doi":"10.1016/j.ccell.2018.08.017","level1":"medical and health sciences","level2":"basic medicine","level3":"biochemistry & molecular biology"}
|
||||
{"doi":"10.1103/physrevresearch.2.023322","level1":"natural sciences","level2":"physical sciences","level3":"nuclear & particles physics"}
|
||||
{"doi":"10.1039/c8cp03234c","level1":"natural sciences","level2":"NULL","level3":"NULL"}
|
||||
{"doi":"10.5281/zenodo.3696557\u000210.5281/zenodo.3696556\u000210.1109/jsac.2016.2545384","level1":"engineering and technology","level2":"electrical engineering, electronic engineering, information engineering","level3":"networking & telecommunications"}
|
||||
{"doi":"10.1038/ng.3667\u000210.1038/ng.3667.\u000210.17615/tct6-4m26\u000210.17863/cam.15649","level1":"medical and health sciences","level2":"health sciences","level3":"genetics & heredity"}
|
||||
{"doi":"10.1016/j.jclepro.2019.119065","level1":"engineering and technology","level2":"other engineering and technologies","level3":"building & construction"}
|
||||
{"doi":"10.1111/pce.13392","level1":"agricultural and veterinary sciences","level2":"agriculture, forestry, and fisheries","level3":"agronomy & agriculture"}
|
|
@ -0,0 +1,38 @@
|
|||
dedup_wf_001::ddcc7a56fa13e49bcc59c6bdd19ad26c 10.3390/s18072310 engineering and technology nano-technology nanoscience & nanotechnology
|
||||
dedup_wf_001::b76062d56e28224eac56111a4e1e5ecf 10.1111/1365-2656.1283110.17863/cam.24369 social sciences psychology and cognitive sciences NULL
|
||||
dedup_wf_001::bb752acb8f403a25fa7851a302f7b7ac 10.3929/ethz-b-00018758410.1002/chem.201701644 natural sciences NULL NULL
|
||||
dedup_wf_001::2f1435a9201ecf5cbbcb12c9b2d971cd 10.1080/01913123.2017.1367361 medical and health sciences clinical medicine oncology & carcinogenesis
|
||||
dedup_wf_001::fc9e47ec16c67b101724320d4b030514 10.1051/e3sconf/20199207011 natural sciences earth and related environmental sciences environmental sciences
|
||||
dedup_wf_001::caa1e5b4de387cb31751552f4f0f5d72 10.1038/onc.2015.333 medical and health sciences clinical medicine oncology & carcinogenesis
|
||||
dedup_wf_001::c2a98df5637d69bf0524eaf40fe6bf11 10.1093/mnras/staa256 natural sciences physical sciences NULL
|
||||
dedup_wf_001::c221262bdc77cbfd59859a402f0e3991 10.1016/j.jclepro.2018.07.166 engineering and technology other engineering and technologies building & construction
|
||||
doiboost____::d56d9dc21f317b3e009d5b6c8ea87212 10.1103/physrevlett.125.037403 natural sciences physical sciences nuclear & particles physics
|
||||
dedup_wf_001::8a7269c8ee6470b2fb4fd384bc389e08 10.1080/03602532.2017.1316285 natural sciences NULL NULL
|
||||
dedup_wf_001::28342ebbc19833e4e1f4a2b23cf5ee20 10.1001/jamanetworkopen.2019.1868 medical and health sciences other medical science health policy & services
|
||||
dedup_wf_001::c1e1daf2b55dd9ec8e1c7c7458bbc7bc 10.1128/mra.00874-18 natural sciences biological sciences plant biology & botany
|
||||
dedup_wf_001::a2ef4a2720c71907180750e5871298ef 10.1016/j.nancom.2018.03.001 engineering and technology NULL NULL
|
||||
dedup_wf_001::676f46a31519e83a89efcb1c626286fb 10.1112/topo.12174 natural sciences NULL NULL
|
||||
dedup_wf_001::6f2761642f1e39313388e2c4060657dd 10.12688/wellcomeopenres.15846.1 medical and health sciences health sciences NULL
|
||||
dedup_wf_001::e414c1dec599521a9635a60de0f6755b 10.21468/scipostphys.3.1.001 natural sciences physical sciences NULL
|
||||
dedup_wf_001::f3395fe0f330164ea424dc61c86c9a3d 10.1088/1741-4326/ab6c77 natural sciences physical sciences nuclear & particles physics
|
||||
dedup_wf_001::a4f32a97a783117012f1de11797e73f2 10.1109/tpwrs.2019.2944747 engineering and technology electrical engineering, electronic engineering, information engineering electrical & electronic engineering
|
||||
dedup_wf_001::313ae1cd083ae1696d12dd1909f97df8 10.1016/j.expthermflusci.2019.10999410.17863/cam.46212 engineering and technology mechanical engineering mechanical engineering & transports
|
||||
dedup_wf_001::2a300a7d3ca7347791ebcef986bc0682 10.1109/tc.2018.2860012 engineering and technology electrical engineering, electronic engineering, information engineering computer hardware & architecture
|
||||
doiboost____::5b79bd7bd9f87361b4a4abc3cbb2df75 10.1002/mma.6622 natural sciences mathematics numerical & computational mathematics
|
||||
dedup_wf_001::6a3f61f217a2519fbaddea1094e3bfc2 10.1051/radiopro/2020020 natural sciences chemical sciences NULL
|
||||
dedup_wf_001::a3f0430309a639f4234a0e57b10f2dee 10.1007/s12268-019-1003-4 medical and health sciences basic medicine NULL
|
||||
dedup_wf_001::b6b8a3a1cccbee459cf3343485efdb12 10.3390/cancers12010236 medical and health sciences health sciences biochemistry & molecular biology
|
||||
dedup_wf_001::dd06ee7974730e7b09a4f03c83b3f9bd 10.6084/m9.figshare.991261410.6084/m9.figshare.9912614.v110.1080/00268976.2019.1665199 natural sciences chemical sciences physical chemistry
|
||||
dedup_wf_001::027c78bef6f972b5e26dfea55d30fbe3 10.1175/jpo-d-17-0239.1 natural sciences biological sciences marine biology & hydrobiology
|
||||
dedup_wf_001::43edc179aa9e1fbaf582c5203b18b519 10.1007/s13218-020-00674-7 engineering and technology industrial biotechnology industrial engineering & automation
|
||||
dedup_wf_001::e7770e11cd6eb514bb52c07b5a8a80f0 10.1016/j.psyneuen.2016.02.00310.1016/j.psyneuen.2016.02.00310.7892/boris.7888610.7892/boris.78886 medical and health sciences basic medicine NULL
|
||||
dedup_wf_001::80bc15d69bdc589149631f3439dde5aa 10.1109/ted.2018.2813542 engineering and technology electrical engineering, electronic engineering, information engineering electrical & electronic engineering
|
||||
dedup_wf_001::42c1cfa33e7872944b920cff90f4d99e 10.3989/scimar.04739.25a natural sciences biological sciences NULL
|
||||
dedup_wf_001::9bacdbbaa9da3658b7243d5de8e3ce14 10.3390/su12187503 natural sciences earth and related environmental sciences NULL
|
||||
dedup_wf_001::59e43d3527dcfecb6097fbd5740c8950 10.1016/j.ccell.2018.08.017 medical and health sciences basic medicine biochemistry & molecular biology
|
||||
doiboost____::e024d1b738df3b24bc58fa0228542571 10.1103/physrevresearch.2.023322 natural sciences physical sciences nuclear & particles physics
|
||||
dedup_wf_001::66e9a3237fa8178886d26d3c2d5b9e66 10.1039/c8cp03234c natural sciences NULL NULL
|
||||
dedup_wf_001::83737ab4205bae751571bb3b166efa18 10.5281/zenodo.369655710.5281/zenodo.369655610.1109/jsac.2016.2545384 engineering and technology electrical engineering, electronic engineering, information engineering networking & telecommunications
|
||||
dedup_wf_001::e3f892db413a689e572dd256acad55fe 10.1038/ng.366710.1038/ng.3667.10.17615/tct6-4m2610.17863/cam.15649 medical and health sciences health sciences genetics & heredity
|
||||
dedup_wf_001::14ba594e8fd081847bc3f50f56335003 10.1016/j.jclepro.2019.119065 engineering and technology other engineering and technologies building & construction
|
||||
dedup_wf_001::08ac7b33a41bcea2d055ecd8585d632e 10.1111/pce.13392 agricultural and veterinary sciences agriculture, forestry, and fisheries agronomy & agriculture
|
|
Loading…
Reference in New Issue