package eu.dnetlib.dhp.oa.graph.dump.skgif; import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession; import java.io.Serializable; import java.util.*; import java.util.stream.Collectors; import org.apache.commons.io.IOUtils; import org.apache.spark.SparkConf; import org.apache.spark.api.java.function.FilterFunction; import org.apache.spark.api.java.function.MapFunction; import org.apache.spark.api.java.function.MapGroupsFunction; import org.apache.spark.sql.Dataset; import org.apache.spark.sql.Encoders; import org.apache.spark.sql.SaveMode; import org.apache.spark.sql.SparkSession; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import eu.dnetlib.dhp.application.ArgumentApplicationParser; import eu.dnetlib.dhp.oa.graph.dump.skgif.beans.EmitPerManifestation; import eu.dnetlib.dhp.schema.oaf.Datasource; import eu.dnetlib.dhp.schema.oaf.Journal; import eu.dnetlib.dhp.skgif.model.*; import scala.Tuple2; /** * @author miriam.baglioni * @Date 21/02/24 */ public class DumpVenue implements Serializable { private static final Logger log = LoggerFactory.getLogger(DumpVenue.class); public static void main(String[] args) throws Exception { String jsonConfiguration = IOUtils .toString( DumpVenue.class .getResourceAsStream( "/eu/dnetlib/dhp/oa/graph/dump/dump_datasource_parameters.json")); final ArgumentApplicationParser parser = new ArgumentApplicationParser(jsonConfiguration); parser.parseArgument(args); Boolean isSparkSessionManaged = Optional .ofNullable(parser.get("isSparkSessionManaged")) .map(Boolean::valueOf) .orElse(Boolean.TRUE); log.info("isSparkSessionManaged: {}", isSparkSessionManaged); final String inputPath = parser.get("sourcePath"); log.info("inputPath: {}", inputPath); final String workingDir = parser.get("workingDir"); log.info("workingDir: {}", workingDir); final String outputPath = parser.get("outputPath"); log.info("outputPath: {}", outputPath); SparkConf conf = new SparkConf(); runWithSparkSession( conf, isSparkSessionManaged, spark -> { Utils.removeOutputDir(spark, outputPath + "Venue"); mapVenue(spark, inputPath, outputPath, workingDir); }); } private static void mapVenue(SparkSession spark, String inputPath, String outputPath, String workingDir) { Dataset manifestationDataset = Utils .readPath(spark, workingDir + "datasourcePublisher", EmitPerManifestation.class); Dataset datasourceDataset = Utils .readPath(spark, inputPath + "datasource", Datasource.class) .filter( (FilterFunction) d -> !d.getDataInfo().getInvisible() && !d.getDataInfo().getDeletedbyinference() && d.getEoscdatasourcetype().getClassid().equalsIgnoreCase("Journal archive")); datasourceDataset .joinWith( manifestationDataset, datasourceDataset.col("id").equalTo(manifestationDataset.col("hostedby.key")), "left") .map((MapFunction, Venue>) t2 -> { Venue venue = new Venue(); Datasource d = t2._1(); if (Optional.ofNullable(d.getJournal().getIssnPrinted()).isPresent()) venue.setLocal_identifier(Utils.getIdentifier(Prefixes.VENUE, d.getJournal().getIssnPrinted())); else if (Optional.ofNullable(d.getJournal().getIssnOnline()).isPresent()) venue.setLocal_identifier(Utils.getIdentifier(Prefixes.VENUE, d.getJournal().getIssnOnline())); venue.setIdentifiers(getVenueIdentifier(d.getJournal())); venue.setName(d.getOfficialname().getValue()); venue.setType(VenueType.JOURNAL.label); if (Optional.ofNullable(t2._2()).isPresent()) venue.setPublisher(t2._2().getPublisher()); venue.setAcronym(null); venue.setSeries(null); venue.setIs_currently_full_oa(null); venue.setCreation_date(null); venue.setContributions(null); return venue; }, Encoders.bean(Venue.class)) .write() .mode(SaveMode.Overwrite) .option("compression", "gzip") .json(workingDir + "Venues"); Utils .readPath(spark, workingDir + "Venues", Venue.class) .groupByKey((MapFunction) v -> v.getLocal_identifier(), Encoders.STRING()) .mapGroups((MapGroupsFunction) (k, v) -> v.next(), Encoders.bean(Venue.class)) .write() .mode(SaveMode.Overwrite) .option("compression", "gzip") .json(outputPath + "Venues"); } private static List getVenueIdentifier(Journal journal) { List identifiers = new ArrayList<>(); if (Optional.ofNullable((journal.getIssnOnline())).isPresent()) identifiers.add(Identifier.newInstance(VenueIdentifierType.EISSN.label, journal.getIssnOnline())); if (Optional.ofNullable(journal.getIssnPrinted()).isPresent()) identifiers.add(Identifier.newInstance(VenueIdentifierType.ISSN.label, journal.getIssnPrinted())); if (Optional.ofNullable(journal.getIssnLinking()).isPresent()) identifiers.add(Identifier.newInstance(VenueIdentifierType.LISSN.label, journal.getIssnLinking())); return identifiers; } private static List getResearchProductAccessPolicy(List value) { return value .stream() .map(v -> getResearchProductAccessPolicy(v)) .filter(Objects::nonNull) .map(v -> v.get(0)) .distinct() .collect(Collectors.toList()); } private static List getResearchProductAccessPolicy(String value) { // "databaseaccesstype if open => open access (https://vocabularies.coar-repositories.org/access_rights/c_abf2/) // if restricted => restricted access (https://vocabularies.coar-repositories.org/access_rights/c_16ec/) // if closed => metadata only access (https://vocabularies.coar-repositories.org/access_rights/c_14cb/) " switch (value) { case "open":// (https://vocabularies.coar-repositories.org/access_rights/c_abf2/) return Arrays.asList("open access"); case "restricted":// (https://vocabularies.coar-repositories.org/access_rights/c_16ec/) return Arrays.asList("restricted access"); case "closed":// (https://vocabularies.coar-repositories.org/access_rights/c_14cb/) return Arrays.asList("metadata only access"); default: return null; } } private static List getEoscProductType(List researchentitytypes) { List eoscProductType = new ArrayList<>(); if (researchentitytypes != null) { if (researchentitytypes.contains("Software")) eoscProductType.add("Research Software"); if (researchentitytypes.contains("Research Publications") || researchentitytypes.contains("Literature")) eoscProductType.add("Research Literature"); if (researchentitytypes.contains("Research Data")) eoscProductType.add("Research Data"); if (researchentitytypes.contains("Organization") || researchentitytypes.contains("Organizations") || researchentitytypes.contains("Services") || researchentitytypes.contains("Projects")) eoscProductType.add("Other research product"); } return eoscProductType; } }