Rearrange resources folder structure

This commit is contained in:
Serafeim Chatzopoulos 2023-03-21 18:24:12 +02:00
parent f992ecb657
commit 3e8a4cf952
7 changed files with 64 additions and 4 deletions

View File

@ -126,12 +126,19 @@ oa_objects_df = oa_objects_df.drop('deletedbyinference').drop('invisible').disti
# Collect only valid citations i.e., invisible = false & deletedbyinference=false # Collect only valid citations i.e., invisible = false & deletedbyinference=false
cites_df = spark.read.json(graph_folder + "/relation")\ cites_df = spark.read.json(graph_folder + "/relation")\
.select(F.col('source').alias('citing'), F.col('target').alias('cited'), 'relClass', 'dataInfo.deletedbyinference', 'dataInfo.invisible')\ .select(F.col('source').alias('citing'), F.col('target').alias('cited'), 'collectedfrom.value', 'relClass', 'dataInfo.deletedbyinference', 'dataInfo.invisible')\
.where( (F.col('relClass') == "Cites") \ .where( (F.col('relClass') == "Cites") \
& (F.col('dataInfo.deletedbyinference') == "false")\ & (F.col('dataInfo.deletedbyinference') == "false")\
& (F.col('dataInfo.invisible') == "false"))\ & (F.col('dataInfo.invisible') == "false"))\
.drop('dataInfo.deletedbyinference').drop('dataInfo.invisible')\ .drop('dataInfo.deletedbyinference').drop('dataInfo.invisible')\
.repartition(num_partitions, 'citing').drop('relClass') .repartition(num_partitions, 'citing').drop('relClass')\
.withColumn('collected_lower', F.expr('transform(collectedfrom.value, x -> lower(x))'))\
.drop('collectedfrom.value')\
.where(
(F.array_contains(F.col('collected_lower'), "opencitations"))
| (F.array_contains(F.col('collected_lower'), "crossref"))
| (F.array_contains(F.col('collected_lower'), "mag"))
).drop('collected_lower')
# print ("Cited df has: " + str(cites_df.count()) + " entries") # print ("Cited df has: " + str(cites_df.count()) + " entries")
# DEPRECATED # DEPRECATED

View File

@ -63,6 +63,9 @@ oozieWorkflowPath=user/ilias.kanellos/workflow_example/
# The directory where the workflow data is/should be stored # The directory where the workflow data is/should be stored
workflowDataDir=user/ilias.kanellos/ranking_workflow workflowDataDir=user/ilias.kanellos/ranking_workflow
# Directory where json data containing scores will be output
bipScorePath=${workflowDataDir}/openaire_universe_scores/
# Directory where dataframes are checkpointed # Directory where dataframes are checkpointed
checkpointDir=${nameNode}/${workflowDataDir}/check/ checkpointDir=${nameNode}/${workflowDataDir}/check/
@ -84,3 +87,6 @@ wfAppPath=${nameNode}/${oozieWorkflowPath}
# The following is needed as a property of a workflow # The following is needed as a property of a workflow
oozie.wf.application.path=${wfAppPath} oozie.wf.application.path=${wfAppPath}
# Path where the final output should be?
actionSetOutputPath=${workflowDataDir}/bip_actionsets/

View File

@ -552,11 +552,50 @@
</spark> </spark>
<!-- Do this after finishing okay --> <!-- Do this after finishing okay -->
<ok to="end" /> <ok to="deleteOutputPathForActionSet" />
<!-- Go there if we have an error --> <!-- Go there if we have an error -->
<error to="map-scores-fail" /> <error to="map-scores-fail" />
</action> </action>
<action name="deleteOutputPathForActionSet">
<fs>
<delete path="${actionSetOutputPath}"/>
<mkdir path="${actionSetOutputPath}"/>
<!--
<delete path="${workingDir}"/>
<mkdir path="${workingDir}"/>
-->
</fs>
<ok to="createActionSet"/>
<error to="actionset-delete-fail"/>
</action>
<action name="createActionSet">
<spark xmlns="uri:oozie:spark-action:0.2">
<master>yarn</master>
<mode>cluster</mode>
<name>Produces the atomic action with the bip finder scores for publications</name>
<class>eu.dnetlib.dhp.actionmanager.bipfinder.SparkAtomicActionScoreJob</class>
<jar>dhp-aggregation-${projectVersion}.jar</jar>
<spark-opts>
--executor-memory=${sparkExecutorMemory}
--executor-cores=${sparkExecutorCores}
--driver-memory=${sparkDriverMemory}
--conf spark.extraListeners=${spark2ExtraListeners}
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
</spark-opts>
<arg>--inputPath</arg><arg>${bipScorePath}</arg>
<arg>--outputPath</arg><arg>${actionSetOutputPath}</arg>
</spark>
<ok to="end"/>
<error to="actionset-creation-fail"/>
</action>
<!-- TODO: end the workflow--> <!-- TODO: end the workflow-->
@ -597,4 +636,12 @@
<message>Mapping scores to DOIs failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message> <message>Mapping scores to DOIs failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
</kill> </kill>
<kill name="actionset-delete-fail">
<message>Deleting output path for actionsets failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
</kill>
<kill name="actionset-creation-fail">
<message>ActionSet creation failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
</kill>
</workflow-app> </workflow-app>