package eu.dnetlib.dhp.resulttocommunityfromproject; import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; import org.apache.commons.io.FileUtils; import org.apache.spark.SparkConf; import org.apache.spark.api.java.JavaRDD; import org.apache.spark.api.java.JavaSparkContext; import org.apache.spark.sql.SparkSession; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.fasterxml.jackson.databind.ObjectMapper; import eu.dnetlib.dhp.bulktag.BulkTagJobTest; import eu.dnetlib.dhp.resulttocommunityfromorganization.ResultCommunityList; /** * @author miriam.baglioni * @Date 13/10/23 */ public class PrepareAssocTest { private static SparkSession spark; private static Path workingDir; private static final Logger log = LoggerFactory.getLogger(PrepareAssocTest.class); @BeforeAll public static void beforeAll() throws IOException { workingDir = Files.createTempDirectory(BulkTagJobTest.class.getSimpleName()); log.info("using work dir {}", workingDir); SparkConf conf = new SparkConf(); conf.setAppName(BulkTagJobTest.class.getSimpleName()); conf.setMaster("local[*]"); conf.set("spark.driver.host", "localhost"); conf.set("hive.metastore.local", "true"); conf.set("spark.ui.enabled", "false"); conf.set("spark.sql.warehouse.dir", workingDir.toString()); conf.set("hive.metastore.warehouse.dir", workingDir.resolve("warehouse").toString()); spark = SparkSession .builder() .appName(PrepareAssocTest.class.getSimpleName()) .config(conf) .getOrCreate(); } @AfterAll public static void afterAll() throws IOException { FileUtils.deleteDirectory(workingDir.toFile()); spark.stop(); } @Test void test1() throws Exception { PrepareResultCommunitySet .main( new String[] { "-isSparkSessionManaged", Boolean.FALSE.toString(), "-sourcePath", getClass().getResource("/eu/dnetlib/dhp/resulttocommunityfromproject/relation/").getPath(), "-outputPath", workingDir.toString() + "/prepared", "-production", Boolean.TRUE.toString(), "-hive_metastore_uris", "" }); final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext()); JavaRDD tmp = sc .textFile(workingDir.toString() + "/prepared") .map(item -> new ObjectMapper().readValue(item, ResultProjectList.class)); tmp.foreach(r -> System.out.println(new ObjectMapper().writeValueAsString(r))); } }