forked from D-Net/dnet-hadoop
removed not needed test class
This commit is contained in:
parent
c80b768af0
commit
a737dd47b6
|
@ -6,7 +6,7 @@ import static eu.dnetlib.dhp.bulktag.community.TaggingConstants.ZENODO_COMMUNITY
|
|||
import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.util.HashMap;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.commons.io.FileUtils;
|
||||
|
|
|
@ -1,90 +0,0 @@
|
|||
|
||||
package eu.dnetlib.dhp.resulttocommunityfromproject;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.spark.SparkConf;
|
||||
import org.apache.spark.api.java.JavaRDD;
|
||||
import org.apache.spark.api.java.JavaSparkContext;
|
||||
import org.apache.spark.sql.SparkSession;
|
||||
import org.junit.jupiter.api.*;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
|
||||
import eu.dnetlib.dhp.bulktag.BulkTagJobTest;
|
||||
import eu.dnetlib.dhp.resulttocommunityfromorganization.ResultCommunityList;
|
||||
|
||||
/**
|
||||
* @author miriam.baglioni
|
||||
* @Date 13/10/23
|
||||
*/
|
||||
public class PrepareAssocTest {
|
||||
|
||||
private static SparkSession spark;
|
||||
|
||||
private static Path workingDir;
|
||||
|
||||
private static final Logger log = LoggerFactory.getLogger(PrepareAssocTest.class);
|
||||
|
||||
@BeforeAll
|
||||
public static void beforeAll() throws IOException {
|
||||
workingDir = Files.createTempDirectory(BulkTagJobTest.class.getSimpleName());
|
||||
log.info("using work dir {}", workingDir);
|
||||
|
||||
SparkConf conf = new SparkConf();
|
||||
conf.setAppName(BulkTagJobTest.class.getSimpleName());
|
||||
|
||||
conf.setMaster("local[*]");
|
||||
conf.set("spark.driver.host", "localhost");
|
||||
conf.set("hive.metastore.local", "true");
|
||||
conf.set("spark.ui.enabled", "false");
|
||||
conf.set("spark.sql.warehouse.dir", workingDir.toString());
|
||||
conf.set("hive.metastore.warehouse.dir", workingDir.resolve("warehouse").toString());
|
||||
|
||||
spark = SparkSession
|
||||
.builder()
|
||||
.appName(PrepareAssocTest.class.getSimpleName())
|
||||
.config(conf)
|
||||
.getOrCreate();
|
||||
}
|
||||
|
||||
@AfterAll
|
||||
public static void afterAll() throws IOException {
|
||||
FileUtils.deleteDirectory(workingDir.toFile());
|
||||
spark.stop();
|
||||
}
|
||||
|
||||
@Disabled
|
||||
@Test
|
||||
void test1() throws Exception {
|
||||
|
||||
PrepareResultCommunitySet
|
||||
.main(
|
||||
new String[] {
|
||||
|
||||
"-isSparkSessionManaged", Boolean.FALSE.toString(),
|
||||
"-sourcePath",
|
||||
getClass().getResource("/eu/dnetlib/dhp/resulttocommunityfromproject/relation/").getPath(),
|
||||
"-outputPath", workingDir.toString() + "/prepared",
|
||||
"-production", Boolean.TRUE.toString()
|
||||
|
||||
});
|
||||
|
||||
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
|
||||
|
||||
JavaRDD<ResultProjectList> tmp = sc
|
||||
.textFile(workingDir.toString() + "/prepared")
|
||||
.map(item -> new ObjectMapper().readValue(item, ResultProjectList.class));
|
||||
|
||||
Assertions.assertEquals(4, tmp.count());
|
||||
Assertions.assertEquals(2, tmp.filter(rpl -> rpl.getCommunityList().contains("aurora")).count());
|
||||
Assertions.assertEquals(1, tmp.filter(rpl -> rpl.getCommunityList().contains("sdsn-gr")).count());
|
||||
Assertions.assertEquals(1, tmp.filter(rpl -> rpl.getCommunityList().contains("netherlands")).count());
|
||||
}
|
||||
|
||||
}
|
Loading…
Reference in New Issue