package eu.dnetlib.dhp.oa.graph.dump.skgif; import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession; import java.io.Serializable; import java.util.*; import java.util.stream.Collectors; import org.apache.commons.io.IOUtils; import org.apache.spark.SparkConf; import org.apache.spark.api.java.function.FilterFunction; import org.apache.spark.api.java.function.MapFunction; import org.apache.spark.sql.Dataset; import org.apache.spark.sql.Encoders; import org.apache.spark.sql.SaveMode; import org.apache.spark.sql.SparkSession; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import eu.dnetlib.dhp.application.ArgumentApplicationParser; import eu.dnetlib.dhp.schema.oaf.Datasource; import eu.dnetlib.dhp.skgif.model.Identifier; import eu.dnetlib.dhp.skgif.model.Prefixes; /** * @author miriam.baglioni * @Date 21/02/24 */ public class DumpDatasource implements Serializable { private static final Logger log = LoggerFactory.getLogger(DumpDatasource.class); public static void main(String[] args) throws Exception { String jsonConfiguration = IOUtils .toString( DumpDatasource.class .getResourceAsStream( "/eu/dnetlib/dhp/oa/graph/dump/dump_datasource_parameters.json")); final ArgumentApplicationParser parser = new ArgumentApplicationParser(jsonConfiguration); parser.parseArgument(args); Boolean isSparkSessionManaged = Optional .ofNullable(parser.get("isSparkSessionManaged")) .map(Boolean::valueOf) .orElse(Boolean.TRUE); log.info("isSparkSessionManaged: {}", isSparkSessionManaged); final String inputPath = parser.get("sourcePath"); log.info("inputPath: {}", inputPath); final String workingDir = parser.get("workingDir"); log.info("workingDir: {}", workingDir); final String outputPath = parser.get("outputPath"); log.info("outputPath: {}", outputPath); SparkConf conf = new SparkConf(); runWithSparkSession( conf, isSparkSessionManaged, spark -> { Utils.removeOutputDir(spark, outputPath + "Datasources"); mapDatasource(spark, inputPath, outputPath); }); } private static void mapDatasource(SparkSession spark, String inputPath, String outputPath) { Utils .readPath(spark, inputPath + "datasource", Datasource.class) .filter( (FilterFunction) d -> !d.getDataInfo().getInvisible() && !d.getDataInfo().getDeletedbyinference()) .map((MapFunction) d -> { eu.dnetlib.dhp.skgif.model.Datasource datasource = new eu.dnetlib.dhp.skgif.model.Datasource(); datasource.setLocal_identifier(Utils.getIdentifier(Prefixes.DATASOURCE, d.getId())); datasource .setIdentifiers( d .getPid() .stream() .map(p -> Identifier.newInstance(p.getQualifier().getClassid(), p.getValue())) .collect(Collectors.toList())); datasource.setName(d.getOfficialname().getValue()); datasource.setSubmission_policy_url(d.getSubmissionpolicyurl()); datasource .setJurisdiction( Optional .ofNullable(d.getJurisdiction()) .map(v -> v.getClassid()) .orElse(new String())); datasource.setPreservation_policy_url(d.getPreservationpolicyurl()); datasource.setVersion_control(d.getVersioncontrol()); datasource .setData_source_classification( Optional .ofNullable(d.getEoscdatasourcetype()) .map(v -> v.getClassname()) .orElse(new String())); datasource.setResearch_product_type(getEoscProductType(d.getResearchentitytypes())); datasource.setThematic(d.getThematic()); datasource .setResearch_product_access_policy( Optional .ofNullable(d.getDatabaseaccesstype()) .map(v -> getResearchProductAccessPolicy(d.getDatabaseaccesstype().getValue())) .orElse(new ArrayList<>())); datasource .setResearch_product_metadata_access_policy( Optional .ofNullable(d.getResearchproductmetadataaccesspolicies()) .map(v -> getResearchProductAccessPolicy(d.getResearchproductmetadataaccesspolicies())) .orElse(new ArrayList<>())); return datasource; }, Encoders.bean(eu.dnetlib.dhp.skgif.model.Datasource.class)) .write() .mode(SaveMode.Overwrite) .option("compression", "gzip") .json(outputPath + "Datasource"); } private static List getResearchProductAccessPolicy(List value) { return value .stream() .map(v -> getResearchProductAccessPolicy(v)) .filter(Objects::nonNull) .map(v -> v.get(0)) .distinct() .collect(Collectors.toList()); } private static List getResearchProductAccessPolicy(String value) { // "databaseaccesstype if open => open access (https://vocabularies.coar-repositories.org/access_rights/c_abf2/) // if restricted => restricted access (https://vocabularies.coar-repositories.org/access_rights/c_16ec/) // if closed => metadata only access (https://vocabularies.coar-repositories.org/access_rights/c_14cb/) " switch (value) { case "open":// (https://vocabularies.coar-repositories.org/access_rights/c_abf2/) return Arrays.asList("open access"); case "restricted":// (https://vocabularies.coar-repositories.org/access_rights/c_16ec/) return Arrays.asList("restricted access"); case "closed":// (https://vocabularies.coar-repositories.org/access_rights/c_14cb/) return Arrays.asList("metadata only access"); default: return null; } } private static List getEoscProductType(List researchentitytypes) { List eoscProductType = new ArrayList<>(); if (researchentitytypes != null) { if (researchentitytypes.contains("Software")) eoscProductType.add("Research Software"); if (researchentitytypes.contains("Research Publications") || researchentitytypes.contains("Literature")) eoscProductType.add("Research Literature"); if (researchentitytypes.contains("Research Data")) eoscProductType.add("Research Data"); if (researchentitytypes.contains("Organization") || researchentitytypes.contains("Organizations") || researchentitytypes.contains("Services") || researchentitytypes.contains("Projects")) eoscProductType.add("Other research product"); } return eoscProductType; } }