oct-update #1

Merged
sandro.labruzzo merged 42 commits from oct-update into master 2024-11-18 10:43:07 +01:00
6 changed files with 276 additions and 335 deletions
Showing only changes of commit 1bd836b88a - Show all commits

View File

@ -1,6 +1,5 @@
from __future__ import annotations
import pendulum
from airflow.decorators import dag
from airflow.models.baseoperator import chain
from airflow.models.param import Param
@ -8,42 +7,69 @@ from airflow.operators.trigger_dagrun import TriggerDagRunOperator
import dag_utils
@dag(
dag_id="build_openaire_graph",
dag_display_name="Build the OpenAIRE graph",
params={
"S3_CONN_ID": Param("s3_conn", type='string', description="Airflow connection for S3 endpoint")
"S3_CONN_ID": Param("s3_conn", type='string', description="Airflow connection for S3 endpoint"),
"GRAPH_PATH": Param("s3a://graph/tmp/prod_provision/graph", type='string', description=""),
"WRKDIR_PATH": Param("s3a://graph/tmp/prod_provision/working_dir", type='string', description=""),
"IS_LOOKUP_URL": Param("http://services.openaire.eu:8280/is/services/isLookUp?wsdl", type='string',
description=""),
"DEDUP_CONFIG_ID": Param("dedup-result-decisiontree-v4", type='string', description=""),
"ORCID_PATH": Param("s3a://graph/data/orcid_2023/tables", type='string', description="")
},
tags=["openaire"]
)
def build_new_graph():
chain(TriggerDagRunOperator(
chain(
TriggerDagRunOperator(
task_id="dedup",
trigger_dag_id="dedup_graph",
wait_for_completion=True),
task_display_name="Deduplicate Research Results",
trigger_dag_id="results_deduplication",
wait_for_completion=True,
conf={
"S3_CONN_ID": "{{ dag_run.conf.get('S3_CONN_ID') }}",
"INPUT_PATH": "{{ dag_run.conf.get('GRAPH_PATH') }}/" + dag_utils.BUILD_PHASES["inference"],
"OUTPUT_PATH": "{{ dag_run.conf.get('GRAPH_PATH') }}/" + dag_utils.BUILD_PHASES["dedup"],
"WRKDIR_PATH": "{{ dag_run.conf.get('WRKDIR_PATH') }}/dedup",
"IS_LOOKUP_URL": "{{ dag_run.conf.get('IS_LOOKUP_URL') }}",
"DEDUP_CONFIG_ID": "{{ dag_run.conf.get('DEDUP_CONFIG_ID') }}"
}
),
TriggerDagRunOperator(
task_id="consistency",
task_display_name="Enforce Consistency of Graph",
trigger_dag_id="consistency_graph",
wait_for_completion=True
wait_for_completion=True,
# conf={
# "file": "{{ task_instance.xcom_pull(task_ids='check_new_dump_availability', key='file_path') }}",
# "dst_bucket": "{{ dag_run.conf.get('S3_BUCKET') }}",
# }
conf={
"S3_CONN_ID": "{{ dag_run.conf.get('S3_CONN_ID') }}",
"INPUT_PATH": "{{ dag_run.conf.get('GRAPH_PATH') }}/" + dag_utils.BUILD_PHASES["dedup"],
"OUTPUT_PATH": "{{ dag_run.conf.get('GRAPH_PATH') }}/" + dag_utils.BUILD_PHASES["consistency"],
"WRKDIR_PATH": "{{ dag_run.conf.get('WRKDIR_PATH') }}/dedup",
"IS_LOOKUP_URL": "{{ dag_run.conf.get('IS_LOOKUP_URL') }}"
}
),
TriggerDagRunOperator(
task_id="orcid_enrichment",
task_display_name="Enrich Graph with ORCID data",
trigger_dag_id="orcid_enrichment_graph",
wait_for_completion=True
wait_for_completion=True,
# conf={
# "src_key": "/data/graph/{{ task_instance.xcom_pull(task_ids='check_new_dump_availability', key='file_path') }}",
# "src_bucket": "{{ dag_run.conf.get('S3_BUCKET') }}",
# "dst_key_prefix": "/data/graph/{{ task_instance.xcom_pull(task_ids='check_new_dump_availability', key='timestamp') }}",
# "dst_bucket": "{{ dag_run.conf.get('S3_BUCKET') }}"
# }
conf={
"S3_CONN_ID": "{{ dag_run.conf.get('S3_CONN_ID') }}",
"ORCID_PATH": "{{ dag_run.conf.get('ORCID_PATH') }}",
"INPUT_PATH": "{{ dag_run.conf.get('GRAPH_PATH') }}/" + dag_utils.BUILD_PHASES["consistency"],
"OUTPUT_PATH": "{{ dag_run.conf.get('GRAPH_PATH') }}/" + dag_utils.BUILD_PHASES["orcid_enhancement"],
"WRKDIR_PATH": "{{ dag_run.conf.get('WRKDIR_PATH') }}/orcid_enrichment"
}
)
)
build_new_graph()

View File

@ -1,101 +1,71 @@
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
This is an example DAG which uses SparkKubernetesOperator and SparkKubernetesSensor.
In this example, we create two tasks which execute sequentially.
The first task is to submit sparkApplication on Kubernetes cluster(the example uses spark-pi application).
and the second task is to check the final state of the sparkApplication that submitted in the first state.
import os
from datetime import timedelta
Spark-on-k8s operator is required to be already installed on Kubernetes
https://github.com/GoogleCloudPlatform/spark-on-k8s-operator
"""
# [START import_module]
# The DAG object; we'll need this to instantiate a DAG
from airflow import DAG
# Operators; we need this to operate!
from airflow.decorators import dag
from airflow.models.baseoperator import chain
from airflow.models.param import Param
from airflow.providers.cncf.kubernetes.operators.spark_kubernetes import SparkKubernetesOperator
from airflow.utils.dates import days_ago
from spark_configurator import SparkConfigurator
# [END import_module]
EXECUTION_TIMEOUT = int(os.getenv("EXECUTION_TIMEOUT", 6))
# [START default_args]
# These args will get passed on to each operator
# You can override them on a per-task basis during operator initialization
default_args = {
'owner': 'airflow',
'depends_on_past': False,
'start_date': days_ago(1),
'email': ['airflow@example.com'],
'email_on_failure': False,
'email_on_retry': False,
'max_active_runs': 1,
'retries': 3
"execution_timeout": timedelta(days=EXECUTION_TIMEOUT),
"retries": int(os.getenv("DEFAULT_TASK_RETRIES", 1)),
"retry_delay": timedelta(seconds=int(os.getenv("DEFAULT_RETRY_DELAY_SECONDS", 60)))
}
dag = DAG(
'consistency_graph',
default_args=default_args,
schedule_interval=None,
tags=['example', 'spark']
)
propagaterel = SparkKubernetesOperator(
@dag(
dag_id="consistency_graph",
dag_display_name="Enforce Consistency of Graph",
default_args=default_args,
params={
"S3_CONN_ID": Param("s3_conn", type='string', description="Airflow connection of S3 endpoint"),
"INPUT_PATH": Param("s3a://graph/tmp/prod_provision/graph/06_graph_dedup", type='string', description=""),
"OUTPUT_PATH": Param("s3a://graph/tmp/prod_provision/graph/07_graph_consistent", type='string', description=""),
"WRKDIR_PATH": Param("s3a://graph/tmp/prod_provision/working_dir/dedup", type='string', description=""),
"IS_LOOKUP_URL": Param("http://services.openaire.eu:8280/is/services/isLookUp?wsdl", type='string',
description="")
},
tags=["openaire"]
)
def consistency_graph_dag():
propagate_rel = SparkKubernetesOperator(
task_id='PropagateRelation',
task_display_name="Propagate Relations",
namespace='dnet-spark-jobs',
template_spec=SparkConfigurator(
name="propagaterels-{{ ds }}-{{ task_instance.try_number }}",
mainClass="eu.dnetlib.dhp.oa.dedup.SparkPropagateRelation",
jarLocation='s3a://binaries/dhp-shade-package-1.2.5-SNAPSHOT.jar',
arguments=["--graphBasePath", "s3a://graph/tmp/prod_provision/graph/06_graph_dedup",
"--graphOutputPath", "s3a://graph/tmp/prod_provision/graph/07_graph_consistent",
"--workingPath", "s3a://graph/tmp/prod_provision/working_dir/dedup"
],
executor_cores=8,
executor_memory="4G",
executor_instances=1,
executor_memoryOverhead="3G").get_configuration(),
kubernetes_conn_id="kubernetes_default",
dag=dag
arguments=["--graphBasePath", "{{ dag_run.conf.get('INPUT_PATH') }}",
"--graphOutputPath", "{{ dag_run.conf.get('OUTPUT_PATH') }}",
"--workingPath", "{{ dag_run.conf.get('WRKDIR_PATH') }}"
]).get_configuration(),
kubernetes_conn_id="kubernetes_default"
)
group_entities = SparkKubernetesOperator(
task_id='GroupEntities',
task_display_name="Group results by id",
namespace='dnet-spark-jobs',
template_spec=SparkConfigurator(
name="groupentities-{{ ds }}-{{ task_instance.try_number }}",
mainClass="eu.dnetlib.dhp.oa.merge.GroupEntitiesSparkJob",
jarLocation='s3a://binaries/dhp-shade-package-1.2.5-SNAPSHOT.jar',
arguments=["--graphInputPath", "s3a://graph/tmp/prod_provision/graph/06_graph_dedup",
"--checkpointPath", "s3a://graph/tmp/prod_provision/working_dir/dedup/grouped_entities",
"--outputPath", "s3a://graph/tmp/prod_provision/graph/07_graph_consistent",
"--isLookupUrl", "http://services.openaire.eu:8280/is/services/isLookUp?wsdl",
arguments=["--graphInputPath", "{{ dag_run.conf.get('INPUT_PATH') }}",
"--checkpointPath", "{{ dag_run.conf.get('WRKDIR_PATH') }}/grouped_entities",
"--outputPath", "{{ dag_run.conf.get('OUTPUT_PATH') }}",
"--isLookupUrl", "{{ dag_run.conf.get('IS_LOOKUP_URL') }}",
"--filterInvisible", "true"
],
#
executor_cores=8,
executor_memory="4G",
executor_instances=1,
executor_memoryOverhead="3G").get_configuration(),
kubernetes_conn_id="kubernetes_default",
dag=dag
]).get_configuration(),
kubernetes_conn_id="kubernetes_default"
)
propagaterel >> group_entities
chain(propagate_rel, group_entities)
consistency_graph_dag()

View File

@ -1,6 +1,14 @@
from airflow.hooks.base import BaseHook
from airflow.providers.amazon.aws.hooks.s3 import S3Hook
BUILD_PHASES = {
"inference": "05_graph_inferred",
"dedup": "06_graph_dedup",
"consistency": "07_graph_consistent",
"enrichment": "08_graph_dedup_enriched", # actionset
"orcid_enhancement": "09_graph_orcid_enriched"
}
def get_bucket_name(context: dict, hook: S3Hook, param_name: str):
bucket_name = context["params"][param_name]
if not bucket_name:

View File

@ -1,228 +1,173 @@
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
This is an example DAG which uses SparkKubernetesOperator and SparkKubernetesSensor.
In this example, we create two tasks which execute sequentially.
The first task is to submit sparkApplication on Kubernetes cluster(the example uses spark-pi application).
and the second task is to check the final state of the sparkApplication that submitted in the first state.
import os
from datetime import timedelta
Spark-on-k8s operator is required to be already installed on Kubernetes
https://github.com/GoogleCloudPlatform/spark-on-k8s-operator
"""
# [START import_module]
# The DAG object; we'll need this to instantiate a DAG
from airflow import DAG
# Operators; we need this to operate!
from airflow.decorators import dag
from airflow.models.baseoperator import chain
from airflow.models.param import Param
from airflow.providers.cncf.kubernetes.operators.spark_kubernetes import SparkKubernetesOperator
from airflow.utils.dates import days_ago
from spark_configurator import SparkConfigurator
# [END import_module]
EXECUTION_TIMEOUT = int(os.getenv("EXECUTION_TIMEOUT", 6))
# [START default_args]
# These args will get passed on to each operator
# You can override them on a per-task basis during operator initialization
default_args = {
'owner': 'airflow',
'depends_on_past': False,
'start_date': days_ago(1),
'email': ['airflow@example.com'],
'email_on_failure': False,
'email_on_retry': False,
'max_active_runs': 1,
'retries': 3
"execution_timeout": timedelta(days=EXECUTION_TIMEOUT),
"retries": int(os.getenv("DEFAULT_TASK_RETRIES", 1)),
"retry_delay": timedelta(seconds=int(os.getenv("DEFAULT_RETRY_DELAY_SECONDS", 60)))
}
dag = DAG(
'dedup_graph',
default_args=default_args,
schedule_interval=None,
tags=['example', 'spark']
)
@dag(
dag_id="results_deduplication",
dag_display_name="Deduplicate Research Results",
default_args=default_args,
params={
"S3_CONN_ID": Param("s3_conn", type='string', description="Airflow connection of S3 endpoint"),
"INPUT_PATH": Param("s3a://graph/tmp/prod_provision/graph/05_graph_inferred", type='string', description=""),
"OUTPUT_PATH": Param("s3a://graph/tmp/prod_provision/graph/06_graph_dedup", type='string', description=""),
"WRKDIR_PATH": Param("s3a://graph/tmp/prod_provision/working_dir/dedup", type='string', description=""),
"IS_LOOKUP_URL": Param("http://services.openaire.eu:8280/is/services/isLookUp?wsdl", type='string',
description=""),
"DEDUP_CONFIG_ID": Param("dedup-result-decisiontree-v4", type='string', description="")
},
tags=["openaire"]
)
def results_deduplication_dag():
simrel = SparkKubernetesOperator(
task_id='CreateSimRel',
task_display_name="Create Similarity Relations",
namespace='dnet-spark-jobs',
template_spec=SparkConfigurator(
name="createsimrels-{{ ds }}-{{ task_instance.try_number }}",
mainClass="eu.dnetlib.dhp.oa.dedup.SparkCreateSimRels",
jarLocation='s3a://binaries/dhp-shade-package-1.2.5-SNAPSHOT.jar',
arguments=["--graphBasePath", "s3a://graph/tmp/prod_provision/graph/05_graph_inferred",
"--isLookUpUrl", "http://services.openaire.eu:8280/is/services/isLookUp?wsdl",
"--actionSetId", "dedup-result-decisiontree-v4",
"--workingPath", "s3a://graph/tmp/prod_provision/working_dir/dedup",
arguments=["--graphBasePath", "{{ dag_run.conf.get('INPUT_PATH') }}",
"--isLookUpUrl", "{{ dag_run.conf.get('IS_LOOKUP_URL') }}",
"--actionSetId", "{{ dag_run.conf.get('DEDUP_CONFIG_ID') }}",
"--workingPath", "{{ dag_run.conf.get('WRKDIR_PATH') }}",
"--numPartitions", "64"
],
executor_cores=8,
executor_memory="4G",
executor_instances=1,
executor_memoryOverhead="3G").get_configuration(),
kubernetes_conn_id="kubernetes_default",
dag=dag
]).get_configuration(),
kubernetes_conn_id="kubernetes_default"
)
whitelist = SparkKubernetesOperator(
task_id='WhitelistSimRels',
task_display_name="Add Whitelist Similarity Relations",
namespace='dnet-spark-jobs',
template_spec=SparkConfigurator(
name="whitelistsimrels-{{ ds }}-{{ task_instance.try_number }}",
mainClass="eu.dnetlib.dhp.oa.dedup.SparkWhitelistSimRels",
jarLocation='s3a://binaries/dhp-shade-package-1.2.5-SNAPSHOT.jar',
arguments=["--graphBasePath", "s3a://graph/tmp/prod_provision/graph/05_graph_inferred",
"--isLookUpUrl", "http://services.openaire.eu:8280/is/services/isLookUp?wsdl",
"--actionSetId", "dedup-result-decisiontree-v4",
"--workingPath", "s3a://graph/tmp/prod_provision/working_dir/dedup",
arguments=["--graphBasePath", "{{ dag_run.conf.get('INPUT_PATH') }}",
"--isLookUpUrl", "{{ dag_run.conf.get('IS_LOOKUP_URL') }}",
"--actionSetId", "{{ dag_run.conf.get('DEDUP_CONFIG_ID') }}",
"--workingPath", "{{ dag_run.conf.get('WRKDIR_PATH') }}",
"--whiteListPath", "s3a://graph/data/dedup/whitelist_prod", # TODO: copy!
"--numPartitions", "64"
],
executor_cores=8,
executor_memory="4G",
executor_instances=1,
executor_memoryOverhead="3G").get_configuration(),
kubernetes_conn_id="kubernetes_default",
dag=dag
]).get_configuration(),
kubernetes_conn_id="kubernetes_default"
)
createmergerel = SparkKubernetesOperator(
task_id='CreateMergeRels',
task_display_name="Create Merge Relations",
namespace='dnet-spark-jobs',
template_spec=SparkConfigurator(
name="createmergerels-{{ ds }}-{{ task_instance.try_number }}",
mainClass="eu.dnetlib.dhp.oa.dedup.SparkCreateMergeRels",
jarLocation='s3a://binaries/dhp-shade-package-1.2.5-SNAPSHOT.jar',
arguments=["--graphBasePath", "s3a://graph/tmp/prod_provision/graph/05_graph_inferred",
"--isLookUpUrl", "http://services.openaire.eu:8280/is/services/isLookUp?wsdl",
"--actionSetId", "dedup-result-decisiontree-v4",
"--workingPath", "s3a://graph/tmp/prod_provision/working_dir/dedup",
arguments=["--graphBasePath", "{{ dag_run.conf.get('INPUT_PATH') }}",
"--isLookUpUrl", "{{ dag_run.conf.get('IS_LOOKUP_URL') }}",
"--actionSetId", "{{ dag_run.conf.get('DEDUP_CONFIG_ID') }}",
"--workingPath", "{{ dag_run.conf.get('WRKDIR_PATH') }}",
"--cutConnectedComponent", "200",
"--hiveMetastoreUris", "",
"--pivotHistoryDatabase", ""
],
executor_cores=8,
executor_memory="4G",
executor_instances=1,
executor_memoryOverhead="3G").get_configuration(),
kubernetes_conn_id="kubernetes_default",
dag=dag
]).get_configuration(),
kubernetes_conn_id="kubernetes_default"
)
creatededuprecord = SparkKubernetesOperator(
task_id='CreateDedupRecord',
task_display_name="Create Dedup Record",
namespace='dnet-spark-jobs',
template_spec=SparkConfigurator(
name="creatededuprecord-{{ ds }}-{{ task_instance.try_number }}",
mainClass="eu.dnetlib.dhp.oa.dedup.SparkCreateDedupRecord",
jarLocation='s3a://binaries/dhp-shade-package-1.2.5-SNAPSHOT.jar',
arguments=["--graphBasePath", "s3a://graph/tmp/prod_provision/graph/05_graph_inferred",
"--isLookUpUrl", "http://services.openaire.eu:8280/is/services/isLookUp?wsdl",
"--actionSetId", "dedup-result-decisiontree-v4",
"--workingPath", "s3a://graph/tmp/prod_provision/working_dir/dedup"
],
executor_cores=8,
executor_memory="4G",
executor_instances=1,
executor_memoryOverhead="3G").get_configuration(),
kubernetes_conn_id="kubernetes_default",
dag=dag
arguments=["--graphBasePath", "{{ dag_run.conf.get('INPUT_PATH') }}",
"--isLookUpUrl", "{{ dag_run.conf.get('IS_LOOKUP_URL') }}",
"--actionSetId", "{{ dag_run.conf.get('DEDUP_CONFIG_ID') }}",
"--workingPath", "{{ dag_run.conf.get('WRKDIR_PATH') }}"
]).get_configuration(),
kubernetes_conn_id="kubernetes_default"
)
copyopenorgsmergerel = SparkKubernetesOperator(
task_id='CopyOpenorgsMergeRels',
task_display_name="Copy Openorgs Merge Relations",
namespace='dnet-spark-jobs',
template_spec=SparkConfigurator(
name="copyopenorgsmergerels-{{ ds }}-{{ task_instance.try_number }}",
mainClass="eu.dnetlib.dhp.oa.dedup.SparkCopyOpenorgsMergeRels",
jarLocation='s3a://binaries/dhp-shade-package-1.2.5-SNAPSHOT.jar',
arguments=["--graphBasePath", "s3a://graph/tmp/prod_provision/graph/05_graph_inferred",
"--isLookUpUrl", "http://services.openaire.eu:8280/is/services/isLookUp?wsdl",
"--actionSetId", "dedup-result-decisiontree-v4",
"--workingPath", "s3a://graph/tmp/prod_provision/working_dir/dedup",
arguments=["--graphBasePath", "{{ dag_run.conf.get('INPUT_PATH') }}",
"--isLookUpUrl", "{{ dag_run.conf.get('IS_LOOKUP_URL') }}",
"--actionSetId", "{{ dag_run.conf.get('DEDUP_CONFIG_ID') }}",
"--workingPath", "{{ dag_run.conf.get('WRKDIR_PATH') }}",
"--numPartitions", "64"
],
executor_cores=8,
executor_memory="4G",
executor_instances=1,
executor_memoryOverhead="3G").get_configuration(),
kubernetes_conn_id="kubernetes_default",
dag=dag
]).get_configuration(),
kubernetes_conn_id="kubernetes_default"
)
createorgsdeduprecord = SparkKubernetesOperator(
task_id='CreateOrgsDedupRecord',
task_display_name="Create Organizations Dedup Records",
namespace='dnet-spark-jobs',
template_spec=SparkConfigurator(
name="createorgsdeduprecord-{{ ds }}-{{ task_instance.try_number }}",
mainClass="eu.dnetlib.dhp.oa.dedup.SparkCreateOrgsDedupRecord",
jarLocation='s3a://binaries/dhp-shade-package-1.2.5-SNAPSHOT.jar',
arguments=["--graphBasePath", "s3a://graph/tmp/prod_provision/graph/05_graph_inferred",
"--isLookUpUrl", "http://services.openaire.eu:8280/is/services/isLookUp?wsdl",
"--actionSetId", "dedup-result-decisiontree-v4",
"--workingPath", "s3a://graph/tmp/prod_provision/working_dir/dedup"
],
executor_cores=8,
executor_memory="4G",
executor_instances=1,
executor_memoryOverhead="3G").get_configuration(),
kubernetes_conn_id="kubernetes_default",
dag=dag
arguments=["--graphBasePath", "{{ dag_run.conf.get('INPUT_PATH') }}",
"--isLookUpUrl", "{{ dag_run.conf.get('IS_LOOKUP_URL') }}",
"--actionSetId", "{{ dag_run.conf.get('DEDUP_CONFIG_ID') }}",
"--workingPath", "{{ dag_run.conf.get('WRKDIR_PATH') }}"
]).get_configuration(),
kubernetes_conn_id="kubernetes_default"
)
updateentity = SparkKubernetesOperator(
task_id='UpdateEntity',
task_display_name="Update Entity",
namespace='dnet-spark-jobs',
template_spec=SparkConfigurator(
name="updateentity-{{ ds }}-{{ task_instance.try_number }}",
mainClass="eu.dnetlib.dhp.oa.dedup.SparkUpdateEntity",
jarLocation='s3a://binaries/dhp-shade-package-1.2.5-SNAPSHOT.jar',
arguments=["--graphBasePath", "s3a://graph/tmp/prod_provision/graph/05_graph_inferred",
"--workingPath", "s3a://graph/tmp/prod_provision/working_dir/dedup",
"--dedupGraphPath", "s3a://graph/tmp/prod_provision/graph/06_graph_dedup"
],
executor_cores=8,
executor_memory="4G",
executor_instances=1,
executor_memoryOverhead="3G").get_configuration(),
kubernetes_conn_id="kubernetes_default",
dag=dag
arguments=["--graphBasePath", "{{ dag_run.conf.get('INPUT_PATH') }}",
"--workingPath", "{{ dag_run.conf.get('WRKDIR_PATH') }}",
"--dedupGraphPath", "{{ dag_run.conf.get('OUTPUT_PATH') }}"
]).get_configuration(),
kubernetes_conn_id="kubernetes_default"
)
copyrelations = SparkKubernetesOperator(
task_id='copyRelations',
task_display_name="Copy Non-Openorgs Relations",
namespace='dnet-spark-jobs',
template_spec=SparkConfigurator(
name="copyrelations-{{ ds }}-{{ task_instance.try_number }}",
mainClass="eu.dnetlib.dhp.oa.dedup.SparkCopyRelationsNoOpenorgs",
jarLocation='s3a://binaries/dhp-shade-package-1.2.5-SNAPSHOT.jar',
arguments=["--graphBasePath", "s3a://graph/tmp/prod_provision/graph/05_graph_inferred",
"--workingPath", "s3a://graph/tmp/prod_provision/working_dir/dedup",
"--dedupGraphPath", "s3a://graph/tmp/prod_provision/graph/06_graph_dedup"
],
executor_cores=8,
executor_memory="4G",
executor_instances=1,
executor_memoryOverhead="3G").get_configuration(),
kubernetes_conn_id="kubernetes_default",
dag=dag
arguments=["--graphBasePath", "{{ dag_run.conf.get('INPUT_PATH') }}",
"--workingPath", "{{ dag_run.conf.get('WRKDIR_PATH') }}",
"--dedupGraphPath", "{{ dag_run.conf.get('OUTPUT_PATH') }}"
]).get_configuration(),
kubernetes_conn_id="kubernetes_default"
)
simrel >> whitelist >> createmergerel \
>> creatededuprecord >> copyopenorgsmergerel \
>> createorgsdeduprecord \
>> updateentity >> copyrelations
chain(simrel, whitelist, createmergerel, creatededuprecord, copyopenorgsmergerel, createorgsdeduprecord, updateentity, copyrelations)
results_deduplication_dag()

View File

@ -1,16 +1,9 @@
import os
import tarfile
import time
from datetime import timedelta
import pendulum
from airflow.decorators import dag
from airflow.decorators import task
from airflow.models.param import Param
from airflow.operators.python import get_current_context
from airflow.providers.cncf.kubernetes.operators.spark_kubernetes import SparkKubernetesOperator
from airflow.utils.dates import days_ago
from spark_configurator import SparkConfigurator
@ -19,7 +12,7 @@ EXECUTION_TIMEOUT = int(os.getenv("EXECUTION_TIMEOUT", 6))
default_args = {
"execution_timeout": timedelta(days=EXECUTION_TIMEOUT),
"retries": int(os.getenv("DEFAULT_TASK_RETRIES", 1)),
"retry_delay": timedelta(seconds=int(os.getenv("DEFAULT_RETRY_DELAY_SECONDS", 60))),
"retry_delay": timedelta(seconds=int(os.getenv("DEFAULT_RETRY_DELAY_SECONDS", 60)))
}
@ -31,14 +24,17 @@ default_args = {
"S3_CONN_ID": Param("s3_conn", type='string', description="Airflow connection of S3 endpoint"),
"ORCID_PATH": Param("s3a://graph/data/orcid_2023/tables", type='string', description=""),
"INPUT_PATH": Param("s3a://graph/tmp/prod_provision/graph/07_graph_consistent", type='string', description=""),
"OUTPUT_PATH": Param("s3a://graph/tmp/prod_provision/graph/09_graph_orcid_enriched", type='string', description=""),
"WRKDIR_PATH": Param("s3a://graph/tmp/prod_provision/working_dir/orcid_enrichment", type='string', description=""),
"OUTPUT_PATH": Param("s3a://graph/tmp/prod_provision/graph/09_graph_orcid_enriched", type='string',
description=""),
"WRKDIR_PATH": Param("s3a://graph/tmp/prod_provision/working_dir/orcid_enrichment", type='string',
description="")
},
tags=["openaire"],
tags=["openaire"]
)
def orcid_enrichment_dag():
orcid_enrich = SparkKubernetesOperator(
task_id='EnrichGraphWithOrcidAuthors',
task_display_name='Enrich Authors with ORCID',
namespace='dnet-spark-jobs',
template_spec=SparkConfigurator(
name="orcidenrich-{{ ds }}-{{ task_instance.try_number }}",
@ -49,16 +45,11 @@ def orcid_enrichment_dag():
"--targetPath", "{{ dag_run.conf.get('OUTPUT_PATH') }}",
"--workingDir", "{{ dag_run.conf.get('WRKDIR_PATH') }}",
"--master", ""
],
executor_cores=8,
executor_memory="16G",
executor_instances=1,
executor_memoryOverhead="8G").get_configuration(),
kubernetes_conn_id="kubernetes_default",
dag=dag
]).get_configuration(),
kubernetes_conn_id="kubernetes_default"
)
orcid_enrich()
orcid_enrich
orcid_enrichment_dag()

View File

@ -1,3 +1,4 @@
class SparkConfigurator:
def __init__(self,
name,
@ -9,9 +10,9 @@ class SparkConfigurator:
image= "dnet-spark:1.0.0",
driver_cores=1,
driver_memory='1G',
executor_cores=1,
executor_memory="1G",
executor_memoryOverhead= "1G",
executor_cores=8,
executor_memory="16G",
executor_memoryOverhead="8G",
executor_instances=1
) -> None:
if apiVersion: