From 657a40536b68a027493e85e754ebda4e98a3f928 Mon Sep 17 00:00:00 2001 From: Spyros Zoupanos Date: Sat, 13 Jun 2020 13:35:53 +0300 Subject: [PATCH] Corrections by Spyros: Scipt cleanup, corrections and re-arrangement --- .../graph/stats/oozie_app/config-default.xml | 22 +- .../oa/graph/stats/oozie_app/impala-shell.sh | 12 +- .../oa/graph/stats/oozie_app/impala-stats.sql | 77 ----- .../graph/stats/oozie_app/scripts/step1.sql | 6 +- .../graph/stats/oozie_app/scripts/step10.sql | 9 +- .../graph/stats/oozie_app/scripts/step11.sql | 10 +- .../graph/stats/oozie_app/scripts/step12.sql | 46 ++- .../graph/stats/oozie_app/scripts/step13.sql | 45 ++- .../graph/stats/oozie_app/scripts/step14.sql | 47 ++++ .../graph/stats/oozie_app/scripts/step2.sql | 30 +- .../graph/stats/oozie_app/scripts/step3.sql | 4 +- .../graph/stats/oozie_app/scripts/step4.sql | 40 ++- .../graph/stats/oozie_app/scripts/step5.sql | 40 +-- .../graph/stats/oozie_app/scripts/step6.sql | 44 +-- .../graph/stats/oozie_app/scripts/step7.sql | 21 +- .../graph/stats/oozie_app/scripts/step8.sql | 33 ++- .../graph/stats/oozie_app/scripts/step9.sql | 43 +-- .../graph/stats/oozie_app/scripts/step9_6.sql | 2 - .../dhp/oa/graph/stats/oozie_app/workflow.xml | 266 ++++++------------ 19 files changed, 350 insertions(+), 447 deletions(-) delete mode 100644 dhp-workflows/dhp-stats-update/src/main/resources/eu/dnetlib/dhp/oa/graph/stats/oozie_app/impala-stats.sql create mode 100644 dhp-workflows/dhp-stats-update/src/main/resources/eu/dnetlib/dhp/oa/graph/stats/oozie_app/scripts/step14.sql delete mode 100644 dhp-workflows/dhp-stats-update/src/main/resources/eu/dnetlib/dhp/oa/graph/stats/oozie_app/scripts/step9_6.sql diff --git a/dhp-workflows/dhp-stats-update/src/main/resources/eu/dnetlib/dhp/oa/graph/stats/oozie_app/config-default.xml b/dhp-workflows/dhp-stats-update/src/main/resources/eu/dnetlib/dhp/oa/graph/stats/oozie_app/config-default.xml index 74dbb6f77..2cd53a37b 100644 --- a/dhp-workflows/dhp-stats-update/src/main/resources/eu/dnetlib/dhp/oa/graph/stats/oozie_app/config-default.xml +++ b/dhp-workflows/dhp-stats-update/src/main/resources/eu/dnetlib/dhp/oa/graph/stats/oozie_app/config-default.xml @@ -1,12 +1,11 @@ - jobTracker - yarnRM + ${jobTracker} nameNode - hdfs://nameservice1 + ${nameNode} oozie.use.system.libpath @@ -16,16 +15,6 @@ oozie.action.sharelib.for.spark spark2 - - hive_db_name - - stats_wf_db_galexiou_oozie_beta - - - hive_source_db_name - - openaire2 - hive_metastore_uris thrift://iis-cdh5-test-m3.ocean.icm.edu.pl:9083 @@ -34,5 +23,8 @@ hive_jdbc_url jdbc:hive2://iis-cdh5-test-m3.ocean.icm.edu.pl:10000 - - + + oozie.wf.workflow.notification.url + {serviceUrl}/v1/oozieNotification/jobUpdate?jobId=$jobId%26status=$status + + \ No newline at end of file diff --git a/dhp-workflows/dhp-stats-update/src/main/resources/eu/dnetlib/dhp/oa/graph/stats/oozie_app/impala-shell.sh b/dhp-workflows/dhp-stats-update/src/main/resources/eu/dnetlib/dhp/oa/graph/stats/oozie_app/impala-shell.sh index a5a58493b..70112dc7b 100644 --- a/dhp-workflows/dhp-stats-update/src/main/resources/eu/dnetlib/dhp/oa/graph/stats/oozie_app/impala-shell.sh +++ b/dhp-workflows/dhp-stats-update/src/main/resources/eu/dnetlib/dhp/oa/graph/stats/oozie_app/impala-shell.sh @@ -1,4 +1,4 @@ -export PYTHON_EGG_CACHE=/home/giorgos.alexiou/.python-eggs +export PYTHON_EGG_CACHE=/home/$(whoami)/.python-eggs export link_folder=/tmp/impala-shell-python-egg-cache-$(whoami) if ! [ -L $link_folder ] then @@ -6,11 +6,13 @@ then ln -sfn ${PYTHON_EGG_CACHE}${link_folder} ${link_folder} fi -echo "getting file from " $3 - +echo "Getting file from " $3 hdfs dfs -copyToLocal $3 -echo "running impala shell now" +echo "Running impala shell make the new database visible" +impala-shell -q "INVALIDATE METADATA;" + +echo "Running impala shell to compute new table stats" impala-shell -d $1 -f $2 -echo "impala shell finished" +echo "Impala shell finished" rm $2 diff --git a/dhp-workflows/dhp-stats-update/src/main/resources/eu/dnetlib/dhp/oa/graph/stats/oozie_app/impala-stats.sql b/dhp-workflows/dhp-stats-update/src/main/resources/eu/dnetlib/dhp/oa/graph/stats/oozie_app/impala-stats.sql deleted file mode 100644 index b8833352d..000000000 --- a/dhp-workflows/dhp-stats-update/src/main/resources/eu/dnetlib/dhp/oa/graph/stats/oozie_app/impala-stats.sql +++ /dev/null @@ -1,77 +0,0 @@ - -INVALIDATE METADATA; --- CREATE TABLE chart_country_year AS SELECT org.country AS country, r.year AS year, count(distinct r.id) AS publications FROM result r, result_datasources rd, datasource d, datasource_organizations dor, organization org WHERE r.id=rd.id AND rd.datasource=d.id AND rd.datasource=dor.id AND dor.organization=org.id AND r.type='publication' AND r.bestlicence='Open Access' and r.year>='1990' AND r.year<=CAST(date_part('year',now()) AS STRING) group by org.country, r.year ORDER BY org.country, r.year; --- CREATE TABLE chart_country_datasources AS SELECT org.country AS country, d.name AS datasource, count(distinct r.id) AS publications FROM result r, result_datasources rd, datasource d, datasource_organizations dor, organization org WHERE r.id=rd.id AND rd.datasource=d.id AND d.id=dor.id AND dor.organization=org.id AND r.type='publication' AND r.bestlicence='Open Access' GROUP BY org.country, d.name ORDER BY org.country, publications DESC; --- CREATE TABLE chart_country_type AS SELECT org.country AS country, rc.type AS type, count(distinct r.id) AS publications FROM result r, result_datasources rd, datasource d, result_classifications rc, datasource_organizations dor, organization org WHERE r.id=rd.id AND r.id=rc.id AND rd.datasource=d.id AND rd.datasource=dor.id AND dor.organization=org.id AND r.type='publication' AND r.bestlicence='Open Access' GROUP BY org.country, rc.type; --- CREATE TABLE chart_country_fp7 AS SELECT org.country AS country, r.year AS year, count(distinct r.id) AS publications FROM result r, result_datasources rd, datasource d, datasource_organizations dor, organization org, project_results pr, project p WHERE r.id=rd.id AND rd.datasource=d.id AND rd.datasource=dor.id AND dor.organization=org.id AND r.id=pr.result AND pr.id=p.id AND p.funding_lvl0='FP7' AND r.type='publication' AND r.year>='1990' AND r.year<=CAST(date_part('year',now()) AS STRING) GROUP BY org.country, r.year ORDER BY org.country, r.year; --- CREATE TABLE chart_datasource_type AS SELECT rd.datasource, rc.type, count(distinct rd.id) FROM result_datasources rd, result_classifications rc WHERE rd.id=rc.id GROUP BY rd.datasource, rc.type; --- CREATE TABLE chart_datasource_year AS SELECT rd.datasource, r.year, count(distinct rd.id) FROM result r, result_datasources rd WHERE rd.id=r.id GROUP By rd.datasource, r.year; --- CREATE TABLE chart_datasource_funders AS SELECT rd.datasource, p.funder, count(distinct rd.id) FROM result_datasources rd, project p, project_results pr WHERE p.id=pr.id AND pr.result=rd.id GROUP BY rd.datasource, p.funder; --- CREATE TABLE chart_datasource_projects_pubs AS SELECT rd.datasource, p.title, count(distinct rd.id) FROM result_datasources rd, project p, project_results pr, result r WHERE p.id=pr.id AND pr.result=rd.id AND pr.result=r.id AND r.type='publication' GROUP BY rd.datasource, p.title; --- CREATE TABLE chart_datasource_projects_data AS SELECT rd.datasource, p.title, count(distinct rd.id) FROM result_datasources rd, project p, project_results pr, result r WHERE p.id=pr.id AND pr.result=rd.id AND pr.result=r.id and r.type='dataset' GROUP BY rd.datasource, p.title; --- CREATE TABLE chart_project_year AS SELECT p.id, r.year, count( distinct r.id) FROM result r, project_results pr, project p WHERE r.id=pr.result AND p.id=pr.id AND r.year>=p.start_year GROUP BY p.id, r.year; --- CREATE TABLE chart_project_license AS SELECT pr.id, r.bestlicence, count(distinct r.id) FROM result r, project_results pr WHERE r.id=pr.result AND r.type='publication' GROUP BY pr.id, r.bestlicence; --- CREATE TABLE chart_project_repos AS SELECT pr.id, d.name, count (distinct r.id) FROM result r, project_results pr, datasource d, datasource_results dr WHERE r.id=dr.result AND d.id=dr.id AND r.id=pr.result AND r.type='publication' GROUP BY pr.id, d.name; --- CREATE TABLE rd_distinct AS SELECT DISTINCT * FROM result_datasources; --- DROP VIEW result_datasources; --- ALTER TABLE rd_distinct RENAME TO result_datasources; --- COMPUTE STATS chart_country_datasources; --- COMPUTE STATS chart_country_fp7; --- COMPUTE STATS chart_country_type; --- COMPUTE STATS chart_country_year; --- COMPUTE STATS chart_datasource_funders; --- COMPUTE STATS chart_datasource_projects_data; --- COMPUTE STATS chart_datasource_projects_pubs; --- COMPUTE STATS chart_datasource_type; --- COMPUTE STATS chart_datasource_year; --- COMPUTE STATS chart_project_license; --- COMPUTE STATS chart_project_repos; --- COMPUTE STATS chart_project_year; -COMPUTE STATS dataset; -COMPUTE STATS dataset_citations; -COMPUTE STATS dataset_classifications; -COMPUTE STATS dataset_concepts; -COMPUTE STATS dataset_datasources; -COMPUTE STATS dataset_languages; -COMPUTE STATS dataset_oids; -COMPUTE STATS dataset_pids; -COMPUTE STATS dataset_topics; -COMPUTE STATS datasource; -COMPUTE STATS datasource_languages; -COMPUTE STATS datasource_oids; -COMPUTE STATS datasource_organizations; -COMPUTE STATS numbers_country; -COMPUTE STATS organization; -COMPUTE STATS otherresearchproduct; -COMPUTE STATS otherresearchproduct_citations; -COMPUTE STATS otherresearchproduct_classifications; -COMPUTE STATS otherresearchproduct_concepts; -COMPUTE STATS otherresearchproduct_datasources; -COMPUTE STATS otherresearchproduct_languages; -COMPUTE STATS otherresearchproduct_oids; -COMPUTE STATS otherresearchproduct_pids; -COMPUTE STATS otherresearchproduct_topics; -COMPUTE STATS project; -COMPUTE STATS project_oids; -COMPUTE STATS project_organizations; -COMPUTE STATS project_results; -COMPUTE STATS publication; -COMPUTE STATS publication_citations; -COMPUTE STATS publication_classifications; -COMPUTE STATS publication_concepts; -COMPUTE STATS publication_datasources; -COMPUTE STATS publication_languages; -COMPUTE STATS publication_oids; -COMPUTE STATS publication_pids; -COMPUTE STATS publication_topics; -COMPUTE STATS result_organization; -COMPUTE STATS result_projects; -COMPUTE STATS software; -COMPUTE STATS software_citations; -COMPUTE STATS software_classifications; -COMPUTE STATS software_concepts; -COMPUTE STATS software_datasources; -COMPUTE STATS software_languages; -COMPUTE STATS software_oids; -COMPUTE STATS software_pids; -COMPUTE STATS software_topic; diff --git a/dhp-workflows/dhp-stats-update/src/main/resources/eu/dnetlib/dhp/oa/graph/stats/oozie_app/scripts/step1.sql b/dhp-workflows/dhp-stats-update/src/main/resources/eu/dnetlib/dhp/oa/graph/stats/oozie_app/scripts/step1.sql index 1290155e6..92e3be7a5 100644 --- a/dhp-workflows/dhp-stats-update/src/main/resources/eu/dnetlib/dhp/oa/graph/stats/oozie_app/scripts/step1.sql +++ b/dhp-workflows/dhp-stats-update/src/main/resources/eu/dnetlib/dhp/oa/graph/stats/oozie_app/scripts/step1.sql @@ -1,11 +1,11 @@ -- DROP database if EXISTS ${hive_db_name} cascade; -- CREATE database ${hive_db_name}; --- +-- -- CREATE TABLE ${hive_db_name}.Persons ( -- PersonID int, -- LastName varchar(255)); -- -- INSERT INTO ${hive_db_name}.Persons VALUES (1, "test_db_spyros_rec_111"); -drop database if exists ${hive_db_name} cascade; -create database ${hive_db_name}; +DROP database IF EXISTS ${stats_db_name} CASCADE; +CREATE database ${stats_db_name}; diff --git a/dhp-workflows/dhp-stats-update/src/main/resources/eu/dnetlib/dhp/oa/graph/stats/oozie_app/scripts/step10.sql b/dhp-workflows/dhp-stats-update/src/main/resources/eu/dnetlib/dhp/oa/graph/stats/oozie_app/scripts/step10.sql index e51892771..792c8cbdd 100644 --- a/dhp-workflows/dhp-stats-update/src/main/resources/eu/dnetlib/dhp/oa/graph/stats/oozie_app/scripts/step10.sql +++ b/dhp-workflows/dhp-stats-update/src/main/resources/eu/dnetlib/dhp/oa/graph/stats/oozie_app/scripts/step10.sql @@ -3,8 +3,9 @@ -- Organization table/view and Organization related tables/views ---------------------------------------------------------------- ---------------------------------------------------------------- -DROP TABLE IF EXISTS ${hive_db_name}.organization; -CREATE TABLE ${hive_db_name}.organization AS SELECT substr(o.id, 4) as id, o.legalname.value as name, o.country.classid as country from ${hive_source_db_name}.organization o WHERE o.datainfo.deletedbyinference=false; +DROP TABLE IF EXISTS ${stats_db_name}.organization; +CREATE TABLE ${stats_db_name}.organization AS SELECT substr(o.id, 4) as id, o.legalname.value AS name, o.country.classid AS country FROM ${openaire_db_name}.organization o WHERE o.datainfo.deletedbyinference=false; -CREATE OR REPLACE VIEW ${hive_db_name}.organization_datasources AS SELECT organization AS id, id AS datasource FROM ${hive_db_name}.datasource_organizations; -CREATE OR REPLACE VIEW ${hive_db_name}.organization_projects AS SELECT id AS project, organization as id FROM ${hive_db_name}.project_organizations; +CREATE OR REPLACE VIEW ${stats_db_name}.organization_datasources AS SELECT organization AS id, id AS datasource FROM ${stats_db_name}.datasource_organizations; + +CREATE OR REPLACE VIEW ${stats_db_name}.organization_projects AS SELECT id AS project, organization as id FROM ${stats_db_name}.project_organizations; diff --git a/dhp-workflows/dhp-stats-update/src/main/resources/eu/dnetlib/dhp/oa/graph/stats/oozie_app/scripts/step11.sql b/dhp-workflows/dhp-stats-update/src/main/resources/eu/dnetlib/dhp/oa/graph/stats/oozie_app/scripts/step11.sql index e586f0cd6..145be77b0 100644 --- a/dhp-workflows/dhp-stats-update/src/main/resources/eu/dnetlib/dhp/oa/graph/stats/oozie_app/scripts/step11.sql +++ b/dhp-workflows/dhp-stats-update/src/main/resources/eu/dnetlib/dhp/oa/graph/stats/oozie_app/scripts/step11.sql @@ -3,8 +3,8 @@ -- Tables/views from external tables/views (Fundref, Country, CountyGDP, roarmap, rndexpediture) ------------------------------------------------------------------------------------------------ ------------------------------------------------------------------------------------------------ -CREATE OR REPLACE VIEW ${hive_db_name}.fundref AS SELECT * FROM stats_ext.fundref; -CREATE OR REPLACE VIEW ${hive_db_name}.country AS SELECT * FROM stats_ext.country; -CREATE OR REPLACE VIEW ${hive_db_name}.countrygdp AS SELECT * FROM stats_ext.countrygdp; -CREATE OR REPLACE VIEW ${hive_db_name}.roarmap AS SELECT * FROM stats_ext.roarmap; -CREATE OR REPLACE VIEW ${hive_db_name}.rndexpediture AS SELECT * FROM stats_ext.rndexpediture; +CREATE OR REPLACE VIEW ${stats_db_name}.fundref AS SELECT * FROM ${external_stats_db_name}.fundref; +CREATE OR REPLACE VIEW ${stats_db_name}.country AS SELECT * FROM ${external_stats_db_name}.country; +CREATE OR REPLACE VIEW ${stats_db_name}.countrygdp AS SELECT * FROM ${external_stats_db_name}.countrygdp; +CREATE OR REPLACE VIEW ${stats_db_name}.roarmap AS SELECT * FROM ${external_stats_db_name}.roarmap; +CREATE OR REPLACE VIEW ${stats_db_name}.rndexpediture AS SELECT * FROM ${external_stats_db_name}.rndexpediture; diff --git a/dhp-workflows/dhp-stats-update/src/main/resources/eu/dnetlib/dhp/oa/graph/stats/oozie_app/scripts/step12.sql b/dhp-workflows/dhp-stats-update/src/main/resources/eu/dnetlib/dhp/oa/graph/stats/oozie_app/scripts/step12.sql index ba0d52aca..c18e2e1f8 100644 --- a/dhp-workflows/dhp-stats-update/src/main/resources/eu/dnetlib/dhp/oa/graph/stats/oozie_app/scripts/step12.sql +++ b/dhp-workflows/dhp-stats-update/src/main/resources/eu/dnetlib/dhp/oa/graph/stats/oozie_app/scripts/step12.sql @@ -3,42 +3,40 @@ ---------------------------- --Datasource temporary table updates -UPDATE ${hive_db_name}.datasource_tmp set harvested ='true' WHERE datasource_tmp.id IN (SELECT DISTINCT d.id FROM ${hive_db_name}.datasource_tmp d, ${hive_db_name}.result_datasources rd where d.id=rd.datasource); +UPDATE ${stats_db_name}.datasource_tmp SET harvested='true' WHERE datasource_tmp.id IN (SELECT DISTINCT d.id FROM ${stats_db_name}.datasource_tmp d, ${stats_db_name}.result_datasources rd WHERE d.id=rd.datasource); -- Project temporary table update and final project table creation with final updates that can not be applied to ORC tables --- UPDATE ${hive_db_name}.project_tmp SET haspubs='yes' WHERE project_tmp.id IN (SELECT pr.id FROM ${hive_db_name}.project_results pr, ${hive_db_name}.result r WHERE pr.result=r.id AND r.type='publication'); +UPDATE ${stats_db_name}.project_tmp SET haspubs='yes' WHERE project_tmp.id IN (SELECT pr.id FROM ${stats_db_name}.project_results pr, ${stats_db_name}.result r WHERE pr.result=r.id AND r.type='publication'); - -CREATE TABLE ${hive_db_name}.project stored as parquet as -SELECT p.id , p.acronym, p.title, p.funder, p.funding_lvl0, p.funding_lvl1, p.funding_lvl2, p.ec39, p.type, p.startdate, p.enddate, p.start_year, p.end_year, p.duration, -CASE WHEN prr1.id IS NULL THEN 'no' ELSE 'yes' END as haspubs, -CASE WHEN prr1.id IS NULL THEN 0 ELSE prr1.np END as numpubs, -CASE WHEN prr2.id IS NULL THEN 0 ELSE prr2.daysForlastPub END as daysforlastpub, -CASE WHEN prr2.id IS NULL THEN 0 ELSE prr2.dp END as delayedpubs, +DROP TABLE IF EXISTS ${stats_db_name}.project; +CREATE TABLE ${stats_db_name}.project stored as parquet as +SELECT p.id , p.acronym, p.title, p.funder, p.funding_lvl0, p.funding_lvl1, p.funding_lvl2, p.ec39, p.type, p.startdate, p.enddate, p.start_year, p.end_year, p.duration, +CASE WHEN prr1.id IS NULL THEN 'no' ELSE 'yes' END AS haspubs, +CASE WHEN prr1.id IS NULL THEN 0 ELSE prr1.np END AS numpubs, +CASE WHEN prr2.id IS NULL THEN 0 ELSE prr2.daysForlastPub END AS daysforlastpub, +CASE WHEN prr2.id IS NULL THEN 0 ELSE prr2.dp END AS delayedpubs, p.callidentifier, p.code -FROM ${hive_db_name}.project_tmp p +FROM ${stats_db_name}.project_tmp p LEFT JOIN (SELECT pr.id, count(distinct pr.result) AS np - FROM ${hive_db_name}.project_results pr INNER JOIN ${hive_db_name}.result r ON pr.result=r.id - WHERE r.type='publication' + FROM ${stats_db_name}.project_results pr INNER JOIN ${stats_db_name}.result r ON pr.result=r.id + WHERE r.type='publication' GROUP BY pr.id) AS prr1 on prr1.id = p.id -LEFT JOIN (SELECT pp.id, max(datediff(to_date(r.date), to_date(pp.enddate)) ) as daysForlastPub , count(distinct r.id) as dp - FROM ${hive_db_name}.project_tmp pp, ${hive_db_name}.project_results pr, ${hive_db_name}.result r - WHERE pp.id=pr.id AND pr.result=r.id AND r.type='publication' AND datediff(to_date(r.date), to_date(pp.enddate)) > 0 +LEFT JOIN (SELECT pp.id, max(datediff(to_date(r.date), to_date(pp.enddate)) ) AS daysForlastPub , count(distinct r.id) AS dp + FROM ${stats_db_name}.project_tmp pp, ${stats_db_name}.project_results pr, ${stats_db_name}.result r + WHERE pp.id=pr.id AND pr.result=r.id AND r.type='publication' AND datediff(to_date(r.date), to_date(pp.enddate)) > 0 GROUP BY pp.id) AS prr2 - on prr2.id = p.id; - - + ON prr2.id = p.id; + -- Publication temporary table updates -UPDATE ${hive_db_name}.publication_tmp SET delayed = 'yes' WHERE publication_tmp.id IN (SELECT distinct r.id FROM ${hive_db_name}.result r, ${hive_db_name}.project_results pr, ${hive_db_name}.project_tmp p WHERE r.id=pr.result AND pr.id=p.id AND to_date(r.date)-to_date(p.enddate) > 0); +UPDATE ${stats_db_name}.publication_tmp SET delayed = 'yes' WHERE publication_tmp.id IN (SELECT distinct r.id FROM stats_wf_db_obs.result r, ${stats_db_name}.project_results pr, ${stats_db_name}.project_tmp p WHERE r.id=pr.result AND pr.id=p.id AND to_date(r.date)-to_date(p.enddate) > 0); -- Dataset temporary table updates -UPDATE ${hive_db_name}.dataset_tmp SET delayed = 'yes' WHERE dataset_tmp.id IN (SELECT distinct r.id FROM ${hive_db_name}.result r, ${hive_db_name}.project_results pr, ${hive_db_name}.project_tmp p WHERE r.id=pr.result AND pr.id=p.id AND to_date(r.date)-to_date(p.enddate) > 0); +UPDATE ${stats_db_name}.dataset_tmp SET delayed = 'yes' WHERE dataset_tmp.id IN (SELECT distinct r.id FROM stats_wf_db_obs.result r, ${stats_db_name}.project_results pr, ${stats_db_name}.project_tmp p WHERE r.id=pr.result AND pr.id=p.id AND to_date(r.date)-to_date(p.enddate) > 0); -- Software temporary table updates -UPDATE ${hive_db_name}.software_tmp SET delayed = 'yes' WHERE software_tmp.id IN (SELECT distinct r.id FROM ${hive_db_name}.result r, ${hive_db_name}.project_results pr, ${hive_db_name}.project_tmp p WHERE r.id=pr.result AND pr.id=p.id AND to_date(r.date)-to_date(p.enddate) > 0); +UPDATE ${stats_db_name}.software_tmp SET delayed = 'yes' WHERE software_tmp.id IN (SELECT distinct r.id FROM ${stats_db_name}.result r, ${stats_db_name}.project_results pr, ${stats_db_name}.project_tmp p WHERE r.id=pr.result AND pr.id=p.id AND to_date(r.date)-to_date(p.enddate) > 0); -- Oherresearchproduct temporary table updates -UPDATE ${hive_db_name}.otherresearchproduct_tmp SET delayed = 'yes' WHERE otherresearchproduct_tmp.id IN (SELECT distinct r.id FROM ${hive_db_name}.result r, ${hive_db_name}.project_results pr, ${hive_db_name}.project_tmp p WHERE r.id=pr.result AND pr.id=p.id AND to_date(r.date)-to_date(p.enddate) > 0); +UPDATE ${stats_db_name}.otherresearchproduct_tmp SET delayed = 'yes' WHERE otherresearchproduct_tmp.id IN (SELECT distinct r.id FROM ${stats_db_name}.result r, ${stats_db_name}.project_results pr, ${stats_db_name}.project_tmp p WHERE r.id=pr.result AND pr.id=p.id AND to_date(r.date)-to_date(p.enddate) > 0); - -CREATE OR REPLACE VIEW ${hive_db_name}.project_results_publication AS SELECT result_projects.id AS result, result_projects.project AS project_results, result.date as resultdate, project.enddate as projectenddate, result_projects.daysfromend as daysfromend FROM ${hive_db_name}.result_projects, ${hive_db_name}.result, ${hive_db_name}.project WHERE result_projects.id=result.id and result.type='publication' and project.id=result_projects.project; +CREATE OR REPLACE VIEW ${stats_db_name}.project_results_publication AS SELECT result_projects.id AS result, result_projects.project AS project_results, result.date as resultdate, project.enddate as projectenddate, result_projects.daysfromend AS daysfromend FROM ${stats_db_name}.result_projects, ${stats_db_name}.result, ${stats_db_name}.project WHERE result_projects.id=result.id AND result.type='publication' AND project.id=result_projects.project; diff --git a/dhp-workflows/dhp-stats-update/src/main/resources/eu/dnetlib/dhp/oa/graph/stats/oozie_app/scripts/step13.sql b/dhp-workflows/dhp-stats-update/src/main/resources/eu/dnetlib/dhp/oa/graph/stats/oozie_app/scripts/step13.sql index e28424dec..25439852e 100644 --- a/dhp-workflows/dhp-stats-update/src/main/resources/eu/dnetlib/dhp/oa/graph/stats/oozie_app/scripts/step13.sql +++ b/dhp-workflows/dhp-stats-update/src/main/resources/eu/dnetlib/dhp/oa/graph/stats/oozie_app/scripts/step13.sql @@ -1,39 +1,38 @@ ------------------------------------------------------------------------------------------------------ -- Creating parquet tables from the updated temporary tables and removing unnecessary temporary tables ------------------------------------------------------------------------------------------------------ -DROP TABLE IF EXISTS ${hive_db_name}.datasource; -CREATE TABLE ${hive_db_name}.datasource stored as parquet as select * from ${hive_db_name}.datasource_tmp; -DROP TABLE IF EXISTS ${hive_db_name}.publication; -CREATE TABLE ${hive_db_name}.publication stored as parquet as select * from ${hive_db_name}.publication_tmp; +DROP TABLE IF EXISTS ${stats_db_name}.datasource; +CREATE TABLE ${stats_db_name}.datasource stored AS parquet AS SELECT * FROM ${stats_db_name}.datasource_tmp; -DROP TABLE IF EXISTS ${hive_db_name}.dataset; -CREATE TABLE ${hive_db_name}.dataset stored as parquet as select * from ${hive_db_name}.dataset_tmp; +DROP TABLE IF EXISTS ${stats_db_name}.publication; +CREATE TABLE ${stats_db_name}.publication stored AS parquet AS SELECT * FROM ${stats_db_name}.publication_tmp; -DROP TABLE IF EXISTS ${hive_db_name}.software; -CREATE TABLE ${hive_db_name}.software stored as parquet as select * from ${hive_db_name}.software_tmp; +DROP TABLE IF EXISTS ${stats_db_name}.dataset; +CREATE TABLE ${stats_db_name}.dataset stored AS parquet AS SELECT * FROM ${stats_db_name}.dataset_tmp; -DROP TABLE IF EXISTS ${hive_db_name}.otherresearchproduct; -CREATE TABLE ${hive_db_name}.otherresearchproduct stored as parquet as select * from ${hive_db_name}.otherresearchproduct_tmp; +DROP TABLE IF EXISTS ${stats_db_name}.software; +CREATE TABLE ${stats_db_name}.software stored AS parquet AS SELECT * FROM ${stats_db_name}.software_tmp; -DROP TABLE ${hive_db_name}.project_tmp; -DROP TABLE ${hive_db_name}.datasource_tmp; -DROP TABLE ${hive_db_name}.publication_tmp; -DROP TABLE ${hive_db_name}.dataset_tmp; -DROP TABLE ${hive_db_name}.software_tmp; -DROP TABLE ${hive_db_name}.otherresearchproduct_tmp; +DROP TABLE IF EXISTS ${stats_db_name}.otherresearchproduct; +CREATE TABLE ${stats_db_name}.otherresearchproduct stored AS parquet AS SELECT * FROM ${stats_db_name}.otherresearchproduct_tmp; + +DROP TABLE ${stats_db_name}.project_tmp; +DROP TABLE ${stats_db_name}.datasource_tmp; +DROP TABLE ${stats_db_name}.publication_tmp; +DROP TABLE ${stats_db_name}.dataset_tmp; +DROP TABLE ${stats_db_name}.software_tmp; +DROP TABLE ${stats_db_name}.otherresearchproduct_tmp; ---------------------------------------------- -- Re-creating views from final parquet tables --------------------------------------------- -- Result -CREATE OR REPLACE VIEW ${hive_db_name}.result as SELECT *, bestlicence as access_mode FROM ${hive_db_name}.publication UNION ALL SELECT *, bestlicence as access_mode FROM ${hive_db_name}.software UNION ALL SELECT *, bestlicence as access_mode FROM ${hive_db_name}.dataset UNION ALL SELECT *, bestlicence as access_mode FROM ${hive_db_name}.otherresearchproduct; +CREATE OR REPLACE VIEW ${stats_db_name}.result AS SELECT *, bestlicence AS access_mode FROM ${stats_db_name}.publication UNION ALL SELECT *, bestlicence as access_mode FROM ${stats_db_name}.software UNION ALL SELECT *, bestlicence AS access_mode FROM ${stats_db_name}.dataset UNION ALL SELECT *, bestlicence AS access_mode FROM ${stats_db_name}.otherresearchproduct; --- cleanup -drop view if exists ${hive_db_name}.delayedpubs; -drop view if exists ${hive_db_name}.project_pub_count; -drop view if exists ${hive_db_name}.delayedpubs; -drop view if exists ${hive_db_name}.project_results_publication; - CREATE TABLE ${hive_db_name}.numbers_country AS SELECT org.country AS country, count(distinct rd.datasource) AS datasources, count(distinct r.id) AS publications FROM ${hive_db_name}.result r, ${hive_db_name}.result_datasources rd, ${hive_db_name}.datasource d, ${hive_db_name}.datasource_organizations dor, ${hive_db_name}.organization org WHERE r.id=rd.id AND rd.datasource=d.id AND d.id=dor.id AND dor.organization=org.id AND r.type='publication' AND r.bestlicence='Open Access' GROUP BY org.country; +------------------------------------------------------------------------------- +-- To see with Antonis if the following is needed and where it should be placed +------------------------------------------------------------------------------- +CREATE TABLE ${stats_db_name}.numbers_country AS SELECT org.country AS country, count(distinct rd.datasource) AS datasources, count(distinct r.id) AS publications FROM ${stats_db_name}.result r, ${stats_db_name}.result_datasources rd, ${stats_db_name}.datasource d, ${stats_db_name}.datasource_organizations dor, ${stats_db_name}.organization org WHERE r.id=rd.id AND rd.datasource=d.id AND d.id=dor.id AND dor.organization=org.id AND r.type='publication' AND r.bestlicence='Open Access' GROUP BY org.country; diff --git a/dhp-workflows/dhp-stats-update/src/main/resources/eu/dnetlib/dhp/oa/graph/stats/oozie_app/scripts/step14.sql b/dhp-workflows/dhp-stats-update/src/main/resources/eu/dnetlib/dhp/oa/graph/stats/oozie_app/scripts/step14.sql new file mode 100644 index 000000000..06d58fd58 --- /dev/null +++ b/dhp-workflows/dhp-stats-update/src/main/resources/eu/dnetlib/dhp/oa/graph/stats/oozie_app/scripts/step14.sql @@ -0,0 +1,47 @@ +COMPUTE STATS dataset; +COMPUTE STATS dataset_citations; +COMPUTE STATS dataset_classifications; +COMPUTE STATS dataset_concepts; +COMPUTE STATS dataset_datasources; +COMPUTE STATS dataset_languages; +COMPUTE STATS dataset_oids; +COMPUTE STATS dataset_pids; +COMPUTE STATS dataset_topics; +COMPUTE STATS datasource; +COMPUTE STATS datasource_languages; +COMPUTE STATS datasource_oids; +COMPUTE STATS datasource_organizations; +COMPUTE STATS numbers_country; +COMPUTE STATS organization; +COMPUTE STATS otherresearchproduct; +COMPUTE STATS otherresearchproduct_citations; +COMPUTE STATS otherresearchproduct_classifications; +COMPUTE STATS otherresearchproduct_concepts; +COMPUTE STATS otherresearchproduct_datasources; +COMPUTE STATS otherresearchproduct_languages; +COMPUTE STATS otherresearchproduct_oids; +COMPUTE STATS otherresearchproduct_pids; +COMPUTE STATS otherresearchproduct_topics; +COMPUTE STATS project; +COMPUTE STATS project_oids; +COMPUTE STATS project_organizations; +COMPUTE STATS project_results; +COMPUTE STATS publication; +COMPUTE STATS publication_citations; +COMPUTE STATS publication_classifications; +COMPUTE STATS publication_concepts; +COMPUTE STATS publication_datasources; +COMPUTE STATS publication_languages; +COMPUTE STATS publication_oids; +COMPUTE STATS publication_pids; +COMPUTE STATS publication_topics; +COMPUTE STATS result_organization; +COMPUTE STATS result_projects; +COMPUTE STATS software; +COMPUTE STATS software_citations; +COMPUTE STATS software_classifications; +COMPUTE STATS software_concepts; +COMPUTE STATS software_datasources; +COMPUTE STATS software_languages; +COMPUTE STATS software_oids; +COMPUTE STATS software_pids; diff --git a/dhp-workflows/dhp-stats-update/src/main/resources/eu/dnetlib/dhp/oa/graph/stats/oozie_app/scripts/step2.sql b/dhp-workflows/dhp-stats-update/src/main/resources/eu/dnetlib/dhp/oa/graph/stats/oozie_app/scripts/step2.sql index cddf83e2b..7e0a07184 100644 --- a/dhp-workflows/dhp-stats-update/src/main/resources/eu/dnetlib/dhp/oa/graph/stats/oozie_app/scripts/step2.sql +++ b/dhp-workflows/dhp-stats-update/src/main/resources/eu/dnetlib/dhp/oa/graph/stats/oozie_app/scripts/step2.sql @@ -5,28 +5,28 @@ -------------------------------------------------------------- -- Publication temporary table -DROP TABLE IF EXISTS ${hive_db_name}.publication_tmp; -CREATE TABLE ${hive_db_name}.publication_tmp (id STRING, title STRING, publisher STRING, journal STRING, date STRING, year STRING, bestlicence STRING, embargo_end_date STRING, delayed BOOLEAN, authors INT, source STRING, abstract BOOLEAN, type STRING ) clustered by (id) into 100 buckets stored as orc tblproperties('transactional'='true'); +DROP TABLE IF EXISTS ${stats_db_name}.publication_tmp; --- The following fails --- -INSERT INTO ${hive_db_name}.publication_tmp SELECT substr(p.id, 4) as id, p.title[0].value as title, p.publisher.value as publisher, p.journal.name as journal, +CREATE TABLE ${stats_db_name}.publication_tmp (id STRING, title STRING, publisher STRING, journal STRING, date STRING, year STRING, bestlicence STRING, embargo_end_date STRING, delayed BOOLEAN, authors INT, source STRING, abstract BOOLEAN, type STRING ) clustered by (id) into 100 buckets stored as orc tblproperties('transactional'='true'); + +INSERT INTO ${stats_db_name}.publication_tmp SELECT substr(p.id, 4) as id, p.title[0].value as title, p.publisher.value as publisher, p.journal.name as journal , p.dateofacceptance.value as date, date_format(p.dateofacceptance.value,'yyyy') as year, p.bestaccessright.classname as bestlicence, p.embargoenddate.value as embargo_end_date, false as delayed, size(p.author) as authors , concat_ws('\u003B',p.source.value) as source, case when size(p.description) > 0 then true else false end as abstract, 'publication' as type -from ${hive_source_db_name}.publication p +from ${openaire_db_name}.publication p where p.datainfo.deletedbyinference=false; +CREATE TABLE ${stats_db_name}.publication_classifications AS SELECT substr(p.id, 4) as id, instancetype.classname as type from ${openaire_db_name}.publication p LATERAL VIEW explode(p.instance.instancetype) instances as instancetype; -CREATE TABLE ${hive_db_name}.publication_classifications AS SELECT substr(p.id, 4) as id, instancetype.classname as type from ${hive_source_db_name}.publication p LATERAL VIEW explode(p.instance.instancetype) instances as instancetype; -CREATE TABLE ${hive_db_name}.publication_concepts AS SELECT substr(p.id, 4) as id, contexts.context.id as concept from ${hive_source_db_name}.publication p LATERAL VIEW explode(p.context) contexts as context; +CREATE TABLE ${stats_db_name}.publication_concepts AS SELECT substr(p.id, 4) as id, contexts.context.id as concept from ${openaire_db_name}.publication p LATERAL VIEW explode(p.context) contexts as context; -CREATE TABLE ${hive_db_name}.publication_datasources as SELECT p.id, case when d.id is null then 'other' else p.datasource end as datasource FROM (SELECT substr(p.id, 4) as id, substr(instances.instance.hostedby.key, 4) as datasource -from ${hive_source_db_name}.publication p lateral view explode(p.instance) instances as instance) p LEFT OUTER JOIN -(SELECT substr(d.id, 4) id from ${hive_source_db_name}.datasource d WHERE d.datainfo.deletedbyinference=false) d on p.datasource = d.id; +CREATE TABLE ${stats_db_name}.publication_datasources as SELECT p.id, case when d.id is null then 'other' else p.datasource end as datasource FROM (SELECT substr(p.id, 4) as id, substr(instances.instance.hostedby.key, 4) as datasource from ${openaire_db_name}.publication p lateral view explode(p.instance) instances as instance) p LEFT OUTER JOIN (SELECT substr(d.id, 4) id from ${openaire_db_name}.datasource d WHERE d.datainfo.deletedbyinference=false) d on p.datasource = d.id; -CREATE TABLE ${hive_db_name}.publication_languages AS select substr(p.id, 4) as id, p.language.classname as language from ${hive_source_db_name}.publication p; -CREATE TABLE ${hive_db_name}.publication_oids AS SELECT substr(p.id, 4) as id, oids.ids as oid from ${hive_source_db_name}.publication p LATERAL VIEW explode(p.originalid) oids as ids; -create table ${hive_db_name}.publication_pids as select substr(p.id, 4) as id, ppid.qualifier.classname as type, ppid.value as pid from ${hive_source_db_name}.publication p lateral view explode(p.pid) pids as ppid; -create table ${hive_db_name}.publication_topics as select substr(p.id, 4) as id, subjects.subject.qualifier.classname as type, subjects.subject.value as topic from ${hive_source_db_name}.publication p lateral view explode(p.subject) subjects as subject; +CREATE TABLE ${stats_db_name}.publication_languages AS select substr(p.id, 4) as id, p.language.classname as language FROM ${openaire_db_name}.publication p; + +CREATE TABLE ${stats_db_name}.publication_oids AS SELECT substr(p.id, 4) AS id, oids.ids AS oid FROM ${openaire_db_name}.publication p LATERAL VIEW explode(p.originalid) oids AS ids; + +CREATE TABLE ${stats_db_name}.publication_pids AS SELECT substr(p.id, 4) AS id, ppid.qualifier.classname AS type, ppid.value as pid FROM ${openaire_db_name}.publication p LATERAL VIEW explode(p.pid) pids AS ppid; + +CREATE TABLE ${stats_db_name}.publication_topics as select substr(p.id, 4) AS id, subjects.subject.qualifier.classname AS TYPE, subjects.subject.value AS topic FROM ${openaire_db_name}.publication p LATERAL VIEW explode(p.subject) subjects AS subject; diff --git a/dhp-workflows/dhp-stats-update/src/main/resources/eu/dnetlib/dhp/oa/graph/stats/oozie_app/scripts/step3.sql b/dhp-workflows/dhp-stats-update/src/main/resources/eu/dnetlib/dhp/oa/graph/stats/oozie_app/scripts/step3.sql index 95d167b2d..c1345d1ba 100644 --- a/dhp-workflows/dhp-stats-update/src/main/resources/eu/dnetlib/dhp/oa/graph/stats/oozie_app/scripts/step3.sql +++ b/dhp-workflows/dhp-stats-update/src/main/resources/eu/dnetlib/dhp/oa/graph/stats/oozie_app/scripts/step3.sql @@ -1,4 +1,2 @@ --- The following fails --- -- 3. Publication_citations -CREATE TABLE ${hive_db_name}.publication_citations AS SELECT substr(p.id, 4) AS id, xpath_string(citation.value, "//citation/id[@type='openaire']/@value") AS result from ${hive_source_db_name}.publication p lateral view explode(p.extrainfo) citations AS citation WHERE xpath_string(citation.value, "//citation/id[@type='openaire']/@value") !=""; +CREATE TABLE ${stats_db_name}.publication_citations AS SELECT substr(p.id, 4) AS id, xpath_string(citation.value, "//citation/id[@type='openaire']/@value") AS result FROM ${openaire_db_name}.publication p lateral view explode(p.extrainfo) citations AS citation WHERE xpath_string(citation.value, "//citation/id[@type='openaire']/@value") !=""; \ No newline at end of file diff --git a/dhp-workflows/dhp-stats-update/src/main/resources/eu/dnetlib/dhp/oa/graph/stats/oozie_app/scripts/step4.sql b/dhp-workflows/dhp-stats-update/src/main/resources/eu/dnetlib/dhp/oa/graph/stats/oozie_app/scripts/step4.sql index de842d6df..d7202c2de 100644 --- a/dhp-workflows/dhp-stats-update/src/main/resources/eu/dnetlib/dhp/oa/graph/stats/oozie_app/scripts/step4.sql +++ b/dhp-workflows/dhp-stats-update/src/main/resources/eu/dnetlib/dhp/oa/graph/stats/oozie_app/scripts/step4.sql @@ -5,22 +5,32 @@ ------------------------------------------------------ -- Dataset temporary table supporting updates -DROP TABLE IF EXISTS ${hive_db_name}.dataset_tmp; -CREATE TABLE ${hive_db_name}.dataset_tmp (id STRING, title STRING, publisher STRING, journal STRING, date STRING, year STRING, bestlicence STRING, embargo_end_date STRING, delayed BOOLEAN, authors INT, source STRING, abstract BOOLEAN, type STRING ) clustered by (id) into 100 buckets stored as orc tblproperties('transactional'='true'); - -INSERT INTO ${hive_db_name}.dataset_tmp SELECT substr(d.id, 4) as id, d.title[0].value as title, d.publisher.value as publisher, cast(null as string) as journal, d.dateofacceptance.value as date, -date_format(d.dateofacceptance.value,'yyyy') as year, d.bestaccessright.classname as bestlicence, d.embargoenddate.value as embargo_end_date, false as delayed, size(d.author) as authors, -concat_ws ('\u003B',d.source.value) as source, -case when size(d.description) > 0 then true else false end as abstract, 'dataset' as type from ${hive_source_db_name}.dataset d where d.datainfo.deletedbyinference=false; +DROP TABLE IF EXISTS ${stats_db_name}.dataset_tmp; +CREATE TABLE ${stats_db_name}.dataset_tmp (id STRING, title STRING, publisher STRING, journal STRING, date STRING, year STRING, bestlicence STRING, embargo_end_date STRING, delayed BOOLEAN, authors INT, source STRING, abstract BOOLEAN, type STRING ) clustered by (id) into 100 buckets stored AS orc tblproperties('transactional'='true'); +INSERT INTO ${stats_db_name}.dataset_tmp SELECT substr(d.id, 4) AS id, d.title[0].value AS title, d.publisher.value AS publisher, cast(null AS string) AS journal, +d.dateofacceptance.value as date, date_format(d.dateofacceptance.value,'yyyy') AS year, d.bestaccessright.classname AS bestlicence, +d.embargoenddate.value AS embargo_end_date, false AS delayed, size(d.author) AS authors , concat_ws('\u003B',d.source.value) AS source, + CASE WHEN SIZE(d.description) > 0 THEN TRUE ELSE FALSE end AS abstract, +'dataset' AS type +FROM ${openaire_db_name}.dataset d +WHERE d.datainfo.deletedbyinference=FALSE; -- Dataset_citations -CREATE TABLE ${hive_db_name}.dataset_citations as select substr(d.id, 4) as id, xpath_string(citation.value, "//citation/id[@type='openaire']/@value") as result from ${hive_source_db_name}.dataset d lateral view explode(d.extrainfo) citations as citation where xpath_string(citation.value, "//citation/id[@type='openaire']/@value") !=""; +CREATE TABLE ${stats_db_name}.dataset_citations AS SELECT substr(d.id, 4) AS id, xpath_string(citation.value, "//citation/id[@type='openaire']/@value") AS result FROM ${openaire_db_name}.dataset d LATERAL VIEW explode(d.extrainfo) citations AS citation WHERE xpath_string(citation.value, "//citation/id[@type='openaire']/@value") !=""; -CREATE TABLE ${hive_db_name}.dataset_classifications AS SELECT substr(p.id, 4) as id, instancetype.classname as type from ${hive_source_db_name}.dataset p LATERAL VIEW explode(p.instance.instancetype) instances as instancetype; -CREATE TABLE ${hive_db_name}.dataset_concepts AS SELECT substr(p.id, 4) as id, contexts.context.id as concept from ${hive_source_db_name}.dataset p LATERAL VIEW explode(p.context) contexts as context; -CREATE TABLE ${hive_db_name}.dataset_datasources as SELECT p.id, case when d.id is null then 'other' else p.datasource end as datasource FROM (SELECT substr(p.id, 4) as id, substr(instances.instance.hostedby.key, 4) as datasource from ${hive_source_db_name}.dataset p lateral view explode(p.instance) instances as instance) p LEFT OUTER JOIN (SELECT substr(d.id, 4) id from ${hive_source_db_name}.datasource d WHERE d.datainfo.deletedbyinference=false) d on p.datasource = d.id; -CREATE TABLE ${hive_db_name}.dataset_languages AS SELECT substr(p.id, 4) as id, p.language.classname as language from ${hive_source_db_name}.dataset p; -CREATE TABLE ${hive_db_name}.dataset_oids AS SELECT substr(p.id, 4) as id, oids.ids as oid from ${hive_source_db_name}.dataset p LATERAL VIEW explode(p.originalid) oids as ids; -CREATE TABLE ${hive_db_name}.dataset_pids AS SELECT substr(p.id, 4) as id, ppid.qualifier.classname as type, ppid.value as pid from ${hive_source_db_name}.dataset p lateral view explode(p.pid) pids as ppid; -CREATE TABLE ${hive_db_name}.dataset_topics AS SELECT substr(p.id, 4) as id, subjects.subject.qualifier.classname as type, subjects.subject.value as topic from ${hive_source_db_name}.dataset p lateral view explode(p.subject) subjects as subject; +CREATE TABLE ${stats_db_name}.dataset_classifications AS SELECT substr(p.id, 4) AS id, instancetype.classname AS type FROM ${openaire_db_name}.dataset p LATERAL VIEW explode(p.instance.instancetype) instances AS instancetype; + +CREATE TABLE ${stats_db_name}.dataset_concepts AS SELECT substr(p.id, 4) as id, contexts.context.id as concept from ${openaire_db_name}.dataset p LATERAL VIEW explode(p.context) contexts as context; + +CREATE TABLE ${stats_db_name}.dataset_datasources AS SELECT p.id, case when d.id IS NULL THEN 'other' ELSE p.datasource END AS datasource FROM (SELECT substr(p.id, 4) as id, substr(instances.instance.hostedby.key, 4) AS datasource +FROM ${openaire_db_name}.dataset p LATERAL VIEW explode(p.instance) instances AS instance) p LEFT OUTER JOIN +(SELECT substr(d.id, 4) id FROM ${openaire_db_name}.datasource d WHERE d.datainfo.deletedbyinference=false) d ON p.datasource = d.id; + +CREATE TABLE ${stats_db_name}.dataset_languages AS SELECT substr(p.id, 4) AS id, p.language.classname AS language FROM ${openaire_db_name}.dataset p; + +CREATE TABLE ${stats_db_name}.dataset_oids AS SELECT substr(p.id, 4) AS id, oids.ids AS oid FROM ${openaire_db_name}.dataset p LATERAL VIEW explode(p.originalid) oids AS ids; + +CREATE TABLE ${stats_db_name}.dataset_pids AS SELECT substr(p.id, 4) AS id, ppid.qualifier.classname AS type, ppid.value AS pid FROM ${openaire_db_name}.dataset p LATERAL VIEW explode(p.pid) pids AS ppid; + +CREATE TABLE ${stats_db_name}.dataset_topics AS SELECT substr(p.id, 4) AS id, subjects.subject.qualifier.classname AS type, subjects.subject.value AS topic FROM ${openaire_db_name}.dataset p LATERAL VIEW explode(p.subject) subjects AS subject; diff --git a/dhp-workflows/dhp-stats-update/src/main/resources/eu/dnetlib/dhp/oa/graph/stats/oozie_app/scripts/step5.sql b/dhp-workflows/dhp-stats-update/src/main/resources/eu/dnetlib/dhp/oa/graph/stats/oozie_app/scripts/step5.sql index 21175a5da..b2b3c4c75 100644 --- a/dhp-workflows/dhp-stats-update/src/main/resources/eu/dnetlib/dhp/oa/graph/stats/oozie_app/scripts/step5.sql +++ b/dhp-workflows/dhp-stats-update/src/main/resources/eu/dnetlib/dhp/oa/graph/stats/oozie_app/scripts/step5.sql @@ -5,28 +5,32 @@ -------------------------------------------------------- -- Software temporary table supporting updates -DROP TABLE IF EXISTS ${hive_db_name}.software_tmp; -CREATE TABLE ${hive_db_name}.software_tmp ( id STRING, title STRING, publisher STRING, journal STRING, date STRING, year STRING, bestlicence STRING, embargo_end_date STRING, delayed BOOLEAN, authors INT, source STRING, abstract BOOLEAN, type STRING ) clustered by (id) into 100 buckets stored as orc tblproperties('transactional'='true'); -INSERT INTO ${hive_db_name}.software_tmp select substr(s.id, 4) as id, s.title[0].value as title, s.publisher.value as publisher, cast(null as string) as journal, -s.dateofacceptance.value as date, date_format(s.dateofacceptance.value,'yyyy') as year, s.bestaccessright.classname as bestlicence, -s.embargoenddate.value as embargo_end_date, false as delayed, size(s.author) as authors , concat_ws('\u003B',s.source.value) as source, - case when size(s.description) > 0 then true else false end as abstract, +DROP TABLE IF EXISTS ${stats_db_name}.software_tmp; +CREATE TABLE ${stats_db_name}.software_tmp (id STRING, title STRING, publisher STRING, journal STRING, date STRING, year STRING, bestlicence STRING, embargo_end_date STRING, delayed BOOLEAN, authors INT, source STRING, abstract BOOLEAN, type STRING ) clustered by (id) INTO 100 buckets stored AS orc tblproperties('transactional'='true'); + +INSERT INTO ${stats_db_name}.software_tmp SELECT substr(s.id, 4) as id, s.title[0].value AS title, s.publisher.value AS publisher, CAST(NULL AS string) AS journal, +s.dateofacceptance.value AS DATE, date_format(s.dateofacceptance.value,'yyyy') AS YEAR, s.bestaccessright.classname AS bestlicence, +s.embargoenddate.value AS embargo_end_date, FALSE AS delayed, SIZE(s.author) AS authors , concat_ws('\u003B',s.source.value) AS source, + CASE WHEN SIZE(s.description) > 0 THEN TRUE ELSE FALSE END AS abstract, 'software' as type -from ${hive_source_db_name}.software s +from ${openaire_db_name}.software s where s.datainfo.deletedbyinference=false; - -- Software_citations -Create table ${hive_db_name}.software_citations as select substr(s.id, 4) as id, xpath_string(citation.value, "//citation/id[@type='openaire']/@value") as result from ${hive_source_db_name}.software s lateral view explode(s.extrainfo) citations as citation where xpath_string(citation.value, "//citation/id[@type='openaire']/@value") !=""; +CREATE TABLE ${stats_db_name}.software_citations AS SELECT substr(s.id, 4) as id, xpath_string(citation.value, "//citation/id[@type='openaire']/@value") AS RESULT FROM ${openaire_db_name}.software s LATERAL VIEW explode(s.extrainfo) citations as citation where xpath_string(citation.value, "//citation/id[@type='openaire']/@value") !=""; -CREATE TABLE ${hive_db_name}.software_classifications AS SELECT substr(p.id, 4) as id, instancetype.classname as type from ${hive_source_db_name}.software p LATERAL VIEW explode(p.instance.instancetype) instances as instancetype; -CREATE TABLE ${hive_db_name}.software_concepts AS SELECT substr(p.id, 4) as id, contexts.context.id as concept from ${hive_source_db_name}.software p LATERAL VIEW explode(p.context) contexts as context; +CREATE TABLE ${stats_db_name}.software_classifications AS SELECT substr(p.id, 4) AS id, instancetype.classname AS type FROM ${openaire_db_name}.software p LATERAL VIEW explode(p.instance.instancetype) instances AS instancetype; -CREATE TABLE ${hive_db_name}.software_datasources as SELECT p.id, case when d.id is null then 'other' else p.datasource end as datasource FROM (SELECT substr(p.id, 4) as id, substr(instances.instance.hostedby.key, 4) as datasource -from ${hive_source_db_name}.software p lateral view explode(p.instance) instances as instance) p LEFT OUTER JOIN -(SELECT substr(d.id, 4) id from ${hive_source_db_name}.datasource d WHERE d.datainfo.deletedbyinference=false) d on p.datasource = d.id; +CREATE TABLE ${stats_db_name}.software_concepts AS SELECT substr(p.id, 4) AS id, contexts.context.id AS concept FROM ${openaire_db_name}.software p LATERAL VIEW explode(p.context) contexts AS context; -CREATE TABLE ${hive_db_name}.software_languages AS select substr(p.id, 4) as id, p.language.classname as language from ${hive_source_db_name}.software p; -CREATE TABLE ${hive_db_name}.software_oids AS SELECT substr(p.id, 4) as id, oids.ids as oid from ${hive_source_db_name}.software p LATERAL VIEW explode(p.originalid) oids as ids; -create table ${hive_db_name}.software_pids as select substr(p.id, 4) as id, ppid.qualifier.classname as type, ppid.value as pid from ${hive_source_db_name}.software p lateral view explode(p.pid) pids as ppid; -create table ${hive_db_name}.software_topics as select substr(p.id, 4) as id, subjects.subject.qualifier.classname as type, subjects.subject.value as topic from ${hive_source_db_name}.software p lateral view explode(p.subject) subjects as subject; +CREATE TABLE ${stats_db_name}.software_datasources AS SELECT p.id, CASE WHEN d.id IS NULL THEN 'other' ELSE p.datasource end as datasource FROM (SELECT substr(p.id, 4) AS id, substr(instances.instance.hostedby.key, 4) AS datasource +FROM ${openaire_db_name}.software p LATERAL VIEW explode(p.instance) instances AS instance) p LEFT OUTER JOIN +(SELECT substr(d.id, 4) id FROM ${openaire_db_name}.datasource d WHERE d.datainfo.deletedbyinference=false) d ON p.datasource = d.id; + +CREATE TABLE ${stats_db_name}.software_languages AS select substr(p.id, 4) AS id, p.language.classname AS language FROM ${openaire_db_name}.software p; + +CREATE TABLE ${stats_db_name}.software_oids AS SELECT substr(p.id, 4) AS id, oids.ids AS oid FROM ${openaire_db_name}.software p LATERAL VIEW explode(p.originalid) oids AS ids; + +CREATE TABLE ${stats_db_name}.software_pids AS SELECT substr(p.id, 4) AS id, ppid.qualifier.classname AS type, ppid.value AS pid FROM ${openaire_db_name}.software p LATERAL VIEW explode(p.pid) pids AS ppid; + +CREATE TABLE ${stats_db_name}.software_topics AS SELECT substr(p.id, 4) AS id, subjects.subject.qualifier.classname AS type, subjects.subject.value AS topic FROM ${openaire_db_name}.software p LATERAL VIEW explode(p.subject) subjects AS subject; diff --git a/dhp-workflows/dhp-stats-update/src/main/resources/eu/dnetlib/dhp/oa/graph/stats/oozie_app/scripts/step6.sql b/dhp-workflows/dhp-stats-update/src/main/resources/eu/dnetlib/dhp/oa/graph/stats/oozie_app/scripts/step6.sql index 58967a840..cb2c67c9e 100644 --- a/dhp-workflows/dhp-stats-update/src/main/resources/eu/dnetlib/dhp/oa/graph/stats/oozie_app/scripts/step6.sql +++ b/dhp-workflows/dhp-stats-update/src/main/resources/eu/dnetlib/dhp/oa/graph/stats/oozie_app/scripts/step6.sql @@ -5,27 +5,33 @@ -------------------------------------------------------------------------------- -- Otherresearchproduct temporary table supporting updates -DROP TABLE IF EXISTS ${hive_db_name}.otherresearchproduct_tmp; -CREATE TABLE ${hive_db_name}.otherresearchproduct_tmp ( id STRING, title STRING, publisher STRING, journal STRING, date STRING, year STRING, bestlicence STRING, embargo_end_date STRING, delayed BOOLEAN, authors INT, source STRING, abstract BOOLEAN, type STRING ) clustered by (id) into 100 buckets stored as orc tblproperties('transactional'='true'); -INSERT INTO ${hive_db_name}.otherresearchproduct_tmp select substr(o.id, 4) as id, o.title[0].value as title, o.publisher.value as publisher, cast(null as string) as journal, -o.dateofacceptance.value as date, date_format(o.dateofacceptance.value,'yyyy') as year, o.bestaccessright.classname as bestlicence, -o.embargoenddate.value as embargo_end_date, false as delayed, size(o.author) as authors , concat_ws('\u003B',o.source.value) as source, -case when size(o.description) > 0 then true else false end as abstract, -'other' as type -from ${hive_source_db_name}.otherresearchproduct o -where o.datainfo.deletedbyinference=false; +DROP TABLE IF EXISTS ${stats_db_name}.otherresearchproduct_tmp; +CREATE TABLE ${stats_db_name}.otherresearchproduct_tmp ( id STRING, title STRING, publisher STRING, journal STRING, date STRING, year STRING, bestlicence STRING, embargo_end_date STRING, delayed BOOLEAN, authors INT, source STRING, abstract BOOLEAN, type STRING ) CLUSTERED BY (id) INTO 100 buckets stored AS orc tblproperties('transactional'='true'); + +INSERT INTO ${stats_db_name}.otherresearchproduct_tmp SELECT substr(o.id, 4) AS id, o.title[0].value AS title, o.publisher.value AS publisher, CAST(NULL AS string) AS journal, +o.dateofacceptance.value AS DATE, date_format(o.dateofacceptance.value,'yyyy') AS year, o.bestaccessright.classname AS bestlicence, +o.embargoenddate.value as embargo_end_date, FALSE AS delayed, SIZE(o.author) AS authors , concat_ws('\u003B',o.source.value) AS source, +CASE WHEN SIZE(o.description) > 0 THEN TRUE ELSE FALSE END AS abstract, +'other' AS type +FROM ${openaire_db_name}.otherresearchproduct o +WHERE o.datainfo.deletedbyinference=FALSE; -- Otherresearchproduct_citations -Create table ${hive_db_name}.otherresearchproduct_citations as select substr(o.id, 4) as id, xpath_string(citation.value, "//citation/id[@type='openaire']/@value") as result from ${hive_source_db_name}.otherresearchproduct o lateral view explode(o.extrainfo) citations as citation where xpath_string(citation.value, "//citation/id[@type='openaire']/@value") !=""; +CREATE TABLE ${stats_db_name}.otherresearchproduct_citations AS SELECT substr(o.id, 4) AS id, xpath_string(citation.value, "//citation/id[@type='openaire']/@value") AS RESULT FROM ${openaire_db_name}.otherresearchproduct o LATERAL VIEW explode(o.extrainfo) citations AS citation WHERE xpath_string(citation.value, "//citation/id[@type='openaire']/@value") !=""; -CREATE TABLE ${hive_db_name}.otherresearchproduct_classifications AS SELECT substr(p.id, 4) as id, instancetype.classname as type from ${hive_source_db_name}.otherresearchproduct p LATERAL VIEW explode(p.instance.instancetype) instances as instancetype; -CREATE TABLE ${hive_db_name}.otherresearchproduct_concepts AS SELECT substr(p.id, 4) as id, contexts.context.id as concept from ${hive_source_db_name}.otherresearchproduct p LATERAL VIEW explode(p.context) contexts as context; +CREATE TABLE ${stats_db_name}.otherresearchproduct_classifications AS SELECT substr(p.id, 4) AS id, instancetype.classname AS type FROM ${openaire_db_name}.otherresearchproduct p LATERAL VIEW explode(p.instance.instancetype) instances AS instancetype; -CREATE TABLE ${hive_db_name}.otherresearchproduct_datasources as SELECT p.id, case when d.id is null then 'other' else p.datasource end as datasource FROM (SELECT substr(p.id, 4) as id, substr(instances.instance.hostedby.key, 4) as datasource -from ${hive_source_db_name}.otherresearchproduct p lateral view explode(p.instance) instances as instance) p LEFT OUTER JOIN -(SELECT substr(d.id, 4) id from ${hive_source_db_name}.datasource d WHERE d.datainfo.deletedbyinference=false) d on p.datasource = d.id; +CREATE TABLE ${stats_db_name}.otherresearchproduct_concepts AS SELECT substr(p.id, 4) AS id, contexts.context.id AS concept FROM ${openaire_db_name}.otherresearchproduct p LATERAL VIEW explode(p.context) contexts AS context; -CREATE TABLE ${hive_db_name}.otherresearchproduct_languages AS select substr(p.id, 4) as id, p.language.classname as language from ${hive_source_db_name}.otherresearchproduct p; -CREATE TABLE ${hive_db_name}.otherresearchproduct_oids AS SELECT substr(p.id, 4) as id, oids.ids as oid from ${hive_source_db_name}.otherresearchproduct p LATERAL VIEW explode(p.originalid) oids as ids; -create table ${hive_db_name}.otherresearchproduct_pids as select substr(p.id, 4) as id, ppid.qualifier.classname as type, ppid.value as pid from ${hive_source_db_name}.otherresearchproduct p lateral view explode(p.pid) pids as ppid; -create table ${hive_db_name}.otherresearchproduct_topics as select substr(p.id, 4) as id, subjects.subject.qualifier.classname as type, subjects.subject.value as topic from ${hive_source_db_name}.otherresearchproduct p lateral view explode(p.subject) subjects as subject; + +CREATE TABLE ${stats_db_name}.otherresearchproduct_datasources AS SELECT p.id, CASE WHEN d.id IS NULL THEN 'other' ELSE p.datasource END AS datasource FROM (SELECT substr(p.id, 4) AS id, substr(instances.instance.hostedby.key, 4) AS datasource +from ${openaire_db_name}.otherresearchproduct p lateral view explode(p.instance) instances as instance) p LEFT OUTER JOIN +(SELECT substr(d.id, 4) id from ${openaire_db_name}.datasource d WHERE d.datainfo.deletedbyinference=false) d on p.datasource = d.id; + +CREATE TABLE ${stats_db_name}.otherresearchproduct_languages AS SELECT substr(p.id, 4) AS id, p.language.classname AS language FROM ${openaire_db_name}.otherresearchproduct p; + +CREATE TABLE ${stats_db_name}.otherresearchproduct_oids AS SELECT substr(p.id, 4) AS id, oids.ids AS oid FROM ${openaire_db_name}.otherresearchproduct p LATERAL VIEW explode(p.originalid) oids AS ids; + +CREATE TABLE ${stats_db_name}.otherresearchproduct_pids AS SELECT substr(p.id, 4) AS id, ppid.qualifier.classname AS type, ppid.value AS pid FROM ${openaire_db_name}.otherresearchproduct p LATERAL VIEW explode(p.pid) pids AS ppid; + +CREATE TABLE ${stats_db_name}.otherresearchproduct_topics AS SELECT substr(p.id, 4) AS id, subjects.subject.qualifier.classname AS type, subjects.subject.value AS topic FROM ${openaire_db_name}.otherresearchproduct p LATERAL VIEW explode(p.subject) subjects AS subject; diff --git a/dhp-workflows/dhp-stats-update/src/main/resources/eu/dnetlib/dhp/oa/graph/stats/oozie_app/scripts/step7.sql b/dhp-workflows/dhp-stats-update/src/main/resources/eu/dnetlib/dhp/oa/graph/stats/oozie_app/scripts/step7.sql index fa5838b61..927543f5a 100644 --- a/dhp-workflows/dhp-stats-update/src/main/resources/eu/dnetlib/dhp/oa/graph/stats/oozie_app/scripts/step7.sql +++ b/dhp-workflows/dhp-stats-update/src/main/resources/eu/dnetlib/dhp/oa/graph/stats/oozie_app/scripts/step7.sql @@ -4,18 +4,21 @@ ------------------------------------------------------ ------------------------------------------------------ -- Project_oids Table -DROP TABLE IF EXISTS ${hive_db_name}.project_oids; -CREATE TABLE ${hive_db_name}.project_oids AS SELECT substr(p.id, 4) as id, oids.ids as oid from ${hive_source_db_name}.project p LATERAL VIEW explode(p.originalid) oids as ids; +DROP TABLE IF EXISTS ${stats_db_name}.project_oids; +CREATE TABLE ${stats_db_name}.project_oids AS SELECT substr(p.id, 4) AS id, oids.ids AS oid FROM ${openaire_db_name}.project p LATERAL VIEW explode(p.originalid) oids AS ids; + -- Project_organizations Table -DROP TABLE IF EXISTS ${hive_db_name}.project_organizations; -CREATE TABLE ${hive_db_name}.project_organizations AS SELECT substr(r.source, 4) as id, substr(r.target, 4) AS organization from ${hive_source_db_name}.relation r WHERE r.reltype='projectOrganization'; +DROP TABLE IF EXISTS ${stats_db_name}.project_organizations; +CREATE TABLE ${stats_db_name}.project_organizations AS SELECT substr(r.source, 4) AS id, substr(r.target, 4) AS organization from ${openaire_db_name}.relation r WHERE r.reltype='projectOrganization'; + -- Project_results Table -DROP TABLE IF EXISTS ${hive_db_name}.project_results; -CREATE TABLE ${hive_db_name}.project_results AS SELECT substr(r.target, 4) as id, substr(r.source, 4) AS result from ${hive_source_db_name}.relation r WHERE r.reltype='resultProject'; +DROP TABLE IF EXISTS ${stats_db_name}.project_results; +CREATE TABLE ${stats_db_name}.project_results AS SELECT substr(r.target, 4) AS id, substr(r.source, 4) AS result FROM ${openaire_db_name}.relation r WHERE r.reltype='resultProject'; -- Project table ---------------- -- Creating and populating temporary Project table -DROP TABLE IF EXISTS ${hive_db_name}.project_tmp; -CREATE TABLE ${hive_db_name}.project_tmp (id STRING, acronym STRING, title STRING, funder STRING, funding_lvl0 STRING, funding_lvl1 STRING, funding_lvl2 STRING, ec39 STRING, type STRING, startdate STRING, enddate STRING, start_year STRING, end_year STRING, duration INT, haspubs STRING, numpubs INT, daysforlastpub INT, delayedpubs INT, callidentifier STRING, code STRING) clustered by (id) into 100 buckets stored as orc tblproperties('transactional'='true'); -INSERT INTO ${hive_db_name}.project_tmp SELECT substr(p.id, 4) as id, p.acronym.value as acronym, p.title.value as title, xpath_string(p.fundingtree[0].value, '//funder/name') as funder, xpath_string(p.fundingtree[0].value, '//funding_level_0/name') as funding_lvl0, xpath_string(p.fundingtree[0].value, '//funding_level_1/name') as funding_lvl1, xpath_string(p.fundingtree[0].value, '//funding_level_2/name') as funding_lvl2, p.ecsc39.value as ec39, p.contracttype.classname as type, p.startdate.value as startdate, p.enddate.value as enddate, date_format(p.startdate.value, 'yyyy') as start_year, date_format(p.enddate.value, 'yyyy') as end_year, 0 as duration, 'no' as haspubs, 0 as numpubs, 0 as daysforlastpub, 0 as delayedpubs, p.callidentifier.value as callidentifier, p.code.value as code from ${hive_source_db_name}.project p WHERE p.datainfo.deletedbyinference=false; +DROP TABLE IF EXISTS ${stats_db_name}.project_tmp; +CREATE TABLE ${stats_db_name}.project_tmp (id STRING, acronym STRING, title STRING, funder STRING, funding_lvl0 STRING, funding_lvl1 STRING, funding_lvl2 STRING, ec39 STRING, type STRING, startdate STRING, enddate STRING, start_year STRING, end_year STRING, duration INT, haspubs STRING, numpubs INT, daysforlastpub INT, delayedpubs INT, callidentifier STRING, code STRING) CLUSTERED BY (id) INTO 100 buckets stored AS orc tblproperties('transactional'='true'); + +INSERT INTO ${stats_db_name}.project_tmp SELECT substr(p.id, 4) AS id, p.acronym.value AS acronym, p.title.value AS title, xpath_string(p.fundingtree[0].value, '//funder/name') AS funder, xpath_string(p.fundingtree[0].value, '//funding_level_0/name') AS funding_lvl0, xpath_string(p.fundingtree[0].value, '//funding_level_1/name') AS funding_lvl1, xpath_string(p.fundingtree[0].value, '//funding_level_2/name') AS funding_lvl2, p.ecsc39.value AS ec39, p.contracttype.classname AS type, p.startdate.value AS startdate, p.enddate.value AS enddate, date_format(p.startdate.value, 'yyyy') AS start_year, date_format(p.enddate.value, 'yyyy') AS end_year, 0 AS duration, 'no' AS haspubs, 0 AS numpubs, 0 AS daysforlastpub, 0 AS delayedpubs, p.callidentifier.value AS callidentifier, p.code.value AS code FROM ${openaire_db_name}.project p WHERE p.datainfo.deletedbyinference=false; diff --git a/dhp-workflows/dhp-stats-update/src/main/resources/eu/dnetlib/dhp/oa/graph/stats/oozie_app/scripts/step8.sql b/dhp-workflows/dhp-stats-update/src/main/resources/eu/dnetlib/dhp/oa/graph/stats/oozie_app/scripts/step8.sql index 3d346fb95..de87b0073 100644 --- a/dhp-workflows/dhp-stats-update/src/main/resources/eu/dnetlib/dhp/oa/graph/stats/oozie_app/scripts/step8.sql +++ b/dhp-workflows/dhp-stats-update/src/main/resources/eu/dnetlib/dhp/oa/graph/stats/oozie_app/scripts/step8.sql @@ -5,20 +5,27 @@ ---------------------------------------------------- -- Views on temporary tables that should be re-created in the end -CREATE OR REPLACE VIEW ${hive_db_name}.result as SELECT *, bestlicence as access_mode FROM ${hive_db_name}.publication_tmp UNION ALL SELECT *,bestlicence as access_mode FROM ${hive_db_name}.software_tmp UNION ALL SELECT *,bestlicence as access_mode FROM ${hive_db_name}.dataset_tmp UNION ALL SELECT *,bestlicence as access_mode FROM ${hive_db_name}.otherresearchproduct_tmp; +CREATE OR REPLACE VIEW ${stats_db_name}.result as SELECT *, bestlicence AS access_mode FROM ${stats_db_name}.publication_tmp UNION ALL SELECT *,bestlicence AS access_mode FROM ${stats_db_name}.software_tmp UNION ALL SELECT *,bestlicence AS access_mode FROM ${stats_db_name}.dataset_tmp UNION ALL SELECT *,bestlicence AS access_mode FROM ${stats_db_name}.otherresearchproduct_tmp; -- Views on final tables -CREATE OR REPLACE VIEW ${hive_db_name}.result_datasources as SELECT * FROM ${hive_db_name}.publication_datasources UNION ALL SELECT * FROM ${hive_db_name}.software_datasources UNION ALL SELECT * FROM ${hive_db_name}.dataset_datasources UNION ALL SELECT * FROM ${hive_db_name}.otherresearchproduct_datasources; -CREATE OR REPLACE VIEW ${hive_db_name}.result_citations as SELECT * FROM ${hive_db_name}.publication_citations UNION ALL SELECT * FROM ${hive_db_name}.software_citations UNION ALL SELECT * FROM ${hive_db_name}.dataset_citations UNION ALL SELECT * FROM ${hive_db_name}.otherresearchproduct_citations; -CREATE OR REPLACE VIEW ${hive_db_name}.result_classifications as SELECT * FROM ${hive_db_name}.publication_classifications UNION ALL SELECT * FROM ${hive_db_name}.software_classifications UNION ALL SELECT * FROM ${hive_db_name}.dataset_classifications UNION ALL SELECT * FROM ${hive_db_name}.otherresearchproduct_classifications; -CREATE OR REPLACE VIEW ${hive_db_name}.result_concepts as SELECT * FROM ${hive_db_name}.publication_concepts UNION ALL SELECT * FROM ${hive_db_name}.software_concepts UNION ALL SELECT * FROM ${hive_db_name}.dataset_concepts UNION ALL SELECT * FROM ${hive_db_name}.otherresearchproduct_concepts; -CREATE OR REPLACE VIEW ${hive_db_name}.result_languages as SELECT * FROM ${hive_db_name}.publication_languages UNION ALL SELECT * FROM ${hive_db_name}.software_languages UNION ALL SELECT * FROM ${hive_db_name}.dataset_languages UNION ALL SELECT * FROM ${hive_db_name}.otherresearchproduct_languages; -CREATE OR REPLACE VIEW ${hive_db_name}.result_oids as SELECT * FROM ${hive_db_name}.publication_oids UNION ALL SELECT * FROM ${hive_db_name}.software_oids UNION ALL SELECT * FROM ${hive_db_name}.dataset_oids UNION ALL SELECT * FROM ${hive_db_name}.otherresearchproduct_oids; -CREATE OR REPLACE VIEW ${hive_db_name}.result_pids as SELECT * FROM ${hive_db_name}.publication_pids UNION ALL SELECT * FROM ${hive_db_name}.software_pids UNION ALL SELECT * FROM ${hive_db_name}.dataset_pids UNION ALL SELECT * FROM ${hive_db_name}.otherresearchproduct_pids; -CREATE OR REPLACE VIEW ${hive_db_name}.result_topics as SELECT * FROM ${hive_db_name}.publication_topics UNION ALL SELECT * FROM ${hive_db_name}.software_topics UNION ALL SELECT * FROM ${hive_db_name}.dataset_topics UNION ALL SELECT * FROM ${hive_db_name}.otherresearchproduct_topics; +CREATE OR REPLACE VIEW ${stats_db_name}.result_datasources AS SELECT * FROM ${stats_db_name}.publication_datasources UNION ALL SELECT * FROM ${stats_db_name}.software_datasources UNION ALL SELECT * FROM ${stats_db_name}.dataset_datasources UNION ALL SELECT * FROM ${stats_db_name}.otherresearchproduct_datasources; -DROP TABLE IF EXISTS ${hive_db_name}.result_organization; -CREATE TABLE ${hive_db_name}.result_organization AS SELECT substr(r.target, 4) as id, substr(r.source, 4) as organization from ${hive_source_db_name}.relation r where r.reltype='resultOrganization'; +CREATE OR REPLACE VIEW ${stats_db_name}.result_citations AS SELECT * FROM ${stats_db_name}.publication_citations UNION ALL SELECT * FROM ${stats_db_name}.software_citations UNION ALL SELECT * FROM ${stats_db_name}.dataset_citations UNION ALL SELECT * FROM ${stats_db_name}.otherresearchproduct_citations; -DROP TABLE IF EXISTS ${hive_db_name}.result_projects; -CREATE TABLE ${hive_db_name}.result_projects AS select pr.result as id, pr.id as project, datediff(p.enddate, p.startdate) as daysfromend from ${hive_db_name}.result r join ${hive_db_name}.project_results pr on r.id=pr.result join ${hive_db_name}.project_tmp p on p.id=pr.id; +CREATE OR REPLACE VIEW ${stats_db_name}.result_classifications AS SELECT * FROM ${stats_db_name}.publication_classifications UNION ALL SELECT * FROM ${stats_db_name}.software_classifications UNION ALL SELECT * FROM ${stats_db_name}.dataset_classifications UNION ALL SELECT * FROM ${stats_db_name}.otherresearchproduct_classifications; + +CREATE OR REPLACE VIEW ${stats_db_name}.result_concepts AS SELECT * FROM ${stats_db_name}.publication_concepts UNION ALL SELECT * FROM ${stats_db_name}.software_concepts UNION ALL SELECT * FROM ${stats_db_name}.dataset_concepts UNION ALL SELECT * FROM ${stats_db_name}.otherresearchproduct_concepts; + +CREATE OR REPLACE VIEW ${stats_db_name}.result_languages AS SELECT * FROM ${stats_db_name}.publication_languages UNION ALL SELECT * FROM ${stats_db_name}.software_languages UNION ALL SELECT * FROM ${stats_db_name}.dataset_languages UNION ALL SELECT * FROM ${stats_db_name}.otherresearchproduct_languages; + +CREATE OR REPLACE VIEW ${stats_db_name}.result_oids AS SELECT * FROM ${stats_db_name}.publication_oids UNION ALL SELECT * FROM ${stats_db_name}.software_oids UNION ALL SELECT * FROM ${stats_db_name}.dataset_oids UNION ALL SELECT * FROM ${stats_db_name}.otherresearchproduct_oids; + +CREATE OR REPLACE VIEW ${stats_db_name}.result_pids AS SELECT * FROM ${stats_db_name}.publication_pids UNION ALL SELECT * FROM ${stats_db_name}.software_pids UNION ALL SELECT * FROM ${stats_db_name}.dataset_pids UNION ALL SELECT * FROM ${stats_db_name}.otherresearchproduct_pids; + +CREATE OR REPLACE VIEW ${stats_db_name}.result_topics AS SELECT * FROM ${stats_db_name}.publication_topics UNION ALL SELECT * FROM ${stats_db_name}.software_topics UNION ALL SELECT * FROM ${stats_db_name}.dataset_topics UNION ALL SELECT * FROM ${stats_db_name}.otherresearchproduct_topics; + +DROP TABLE IF EXISTS ${stats_db_name}.result_organization; +CREATE TABLE ${stats_db_name}.result_organization AS SELECT substr(r.target, 4) AS id, substr(r.source, 4) AS organization FROM ${openaire_db_name}.relation r WHERE r.reltype='resultOrganization'; + +DROP TABLE IF EXISTS ${stats_db_name}.result_projects; +CREATE TABLE ${stats_db_name}.result_projects AS select pr.result AS id, pr.id AS project, datediff(p.enddate, p.startdate) AS daysfromend FROM ${stats_db_name}.result r JOIN ${stats_db_name}.project_results pr ON r.id=pr.result JOIN ${stats_db_name}.project_tmp p ON p.id=pr.id; diff --git a/dhp-workflows/dhp-stats-update/src/main/resources/eu/dnetlib/dhp/oa/graph/stats/oozie_app/scripts/step9.sql b/dhp-workflows/dhp-stats-update/src/main/resources/eu/dnetlib/dhp/oa/graph/stats/oozie_app/scripts/step9.sql index 114eeeeaa..5a120d920 100644 --- a/dhp-workflows/dhp-stats-update/src/main/resources/eu/dnetlib/dhp/oa/graph/stats/oozie_app/scripts/step9.sql +++ b/dhp-workflows/dhp-stats-update/src/main/resources/eu/dnetlib/dhp/oa/graph/stats/oozie_app/scripts/step9.sql @@ -6,28 +6,31 @@ -- Datasource table creation & update ------------------------------------- -- Creating and populating temporary datasource table -DROP TABLE IF EXISTS ${hive_db_name}.datasource_tmp; -create table ${hive_db_name}.datasource_tmp(`id` string, `name` string, `type` string, `dateofvalidation` string, `yearofvalidation` string, `harvested` boolean, `piwik_id` int, `latitude` string, `longitude` string, `websiteurl` string, `compatibility` string) clustered by (id) into 100 buckets stored as orc tblproperties('transactional'='true'); -insert into ${hive_db_name}.datasource_tmp SELECT substr(d.id, 4) as id, officialname.value as name, datasourcetype.classname as type, dateofvalidation.value as dateofvalidation, date_format(d.dateofvalidation.value,'yyyy') as yearofvalidation, false as harvested, 0 as piwik_id, d.latitude.value as latitude, d.longitude.value as longitude, d.websiteurl.value as websiteurl, d.openairecompatibility.classid as compatibility -from ${hive_source_db_name}.datasource d -WHERE d.datainfo.deletedbyinference=false; +DROP TABLE IF EXISTS ${stats_db_name}.datasource_tmp; +CREATE TABLE ${stats_db_name}.datasource_tmp(`id` string, `name` STRING, `type` STRING, `dateofvalidation` STRING, `yearofvalidation` string, `harvested` BOOLEAN, `piwik_id` INT, `latitude` STRING, `longitude`STRING, `websiteurl` STRING, `compatibility` STRING) CLUSTERED BY (id) INTO 100 buckets stored AS orc tblproperties('transactional'='true'); --- Updating temporary table with everything that is not based on results -> This is done with the following "dual" table. To see if default values are there +INSERT INTO ${stats_db_name}.datasource_tmp SELECT substr(d.id, 4) AS id, officialname.value AS name, datasourcetype.classname AS type, dateofvalidation.value AS dateofvalidation, date_format(d.dateofvalidation.value,'yyyy') AS yearofvalidation, FALSE AS harvested, 0 AS piwik_id, d.latitude.value AS latitude, d.longitude.value AS longitude, d.websiteurl.value AS websiteurl, d.openairecompatibility.classid AS compatibility +FROM ${openaire_db_name}.datasource d +WHERE d.datainfo.deletedbyinference=FALSE; + +-- Updating temporary table with everything that is not based on results -> This is done with the following "dual" table. -- Creating a temporary dual table that will be removed after the following insert -CREATE TABLE ${hive_db_name}.dual(dummy char(1)); -INSERT INTO ${hive_db_name}.dual values('X'); -INSERT INTO ${hive_db_name}.datasource_tmp (`id`, `name`, `type`, `dateofvalidation`, `yearofvalidation`, `harvested`, `piwik_id`, `latitude`, `longitude`, `websiteurl`, `compatibility`) -SELECT 'other', 'Other', 'Repository', NULL, NULL, false, 0, NULL, NULL, NULL, 'unknown' FROM ${hive_db_name}.dual WHERE 'other' not in (SELECT id FROM ${hive_db_name}.datasource_tmp WHERE name='Unknown Repository'); -DROP TABLE ${hive_db_name}.dual; +CREATE TABLE ${stats_db_name}.dual(dummy CHAR(1)); +INSERT INTO ${stats_db_name}.dual VALUES('X'); +INSERT INTO ${stats_db_name}.datasource_tmp (`id`, `name`, `type`, `dateofvalidation`, `yearofvalidation`, `harvested`, `piwik_id`, `latitude`, `longitude`, `websiteurl`, `compatibility`) +SELECT 'other', 'Other', 'Repository', NULL, NULL, false, 0, NULL, NULL, NULL, 'unknown' FROM ${stats_db_name}.dual WHERE 'other' not in (SELECT id FROM ${stats_db_name}.datasource_tmp WHERE name='Unknown Repository'); +DROP TABLE ${stats_db_name}.dual; -UPDATE ${hive_db_name}.datasource_tmp SET name='Other' where name='Unknown Repository'; -UPDATE ${hive_db_name}.datasource_tmp SET yearofvalidation=null WHERE yearofvalidation='-1'; +UPDATE ${stats_db_name}.datasource_tmp SET name='Other' WHERE name='Unknown Repository'; +UPDATE ${stats_db_name}.datasource_tmp SET yearofvalidation=null WHERE yearofvalidation='-1'; -DROP TABLE IF EXISTS ${hive_db_name}.datasource_languages; -CREATE TABLE ${hive_db_name}.datasource_languages AS SELECT substr(d.id, 4) as id, langs.languages as language from ${hive_source_db_name}.datasource d LATERAL VIEW explode(d.odlanguages.value) langs as languages; -DROP TABLE IF EXISTS ${hive_db_name}.datasource_oids; -CREATE TABLE ${hive_db_name}.datasource_oids AS SELECT substr(d.id, 4) as id, oids.ids as oid from ${hive_source_db_name}.datasource d LATERAL VIEW explode(d.originalid) oids as ids; -DROP TABLE IF EXISTS ${hive_db_name}.datasource_organizations; -CREATE TABLE ${hive_db_name}.datasource_organizations AS select substr(r.target, 4) as id, substr(r.source, 4) as organization from ${hive_source_db_name}.relation r where r.reltype='datasourceOrganization'; +DROP TABLE IF EXISTS ${stats_db_name}.datasource_languages; +CREATE TABLE ${stats_db_name}.datasource_languages AS SELECT substr(d.id, 4) AS id, langs.languages AS language FROM ${openaire_db_name}.datasource d LATERAL VIEW explode(d.odlanguages.value) langs AS languages; -CREATE OR REPLACE VIEW ${hive_db_name}.datasource_results AS SELECT datasource AS id, id AS result FROM ${hive_db_name}.result_datasources; +DROP TABLE IF EXISTS ${stats_db_name}.datasource_oids; +CREATE TABLE ${stats_db_name}.datasource_oids AS SELECT substr(d.id, 4) AS id, oids.ids AS oid FROM ${openaire_db_name}.datasource d LATERAL VIEW explode(d.originalid) oids AS ids; + +DROP TABLE IF EXISTS ${stats_db_name}.datasource_organizations; +CREATE TABLE ${stats_db_name}.datasource_organizations AS SELECT substr(r.target, 4) AS id, substr(r.source, 4) AS organization FROM ${openaire_db_name}.relation r WHERE r.reltype='datasourceOrganization'; + +CREATE OR REPLACE VIEW ${stats_db_name}.datasource_results AS SELECT datasource AS id, id AS result FROM ${stats_db_name}.result_datasources; diff --git a/dhp-workflows/dhp-stats-update/src/main/resources/eu/dnetlib/dhp/oa/graph/stats/oozie_app/scripts/step9_6.sql b/dhp-workflows/dhp-stats-update/src/main/resources/eu/dnetlib/dhp/oa/graph/stats/oozie_app/scripts/step9_6.sql deleted file mode 100644 index 461f48bfc..000000000 --- a/dhp-workflows/dhp-stats-update/src/main/resources/eu/dnetlib/dhp/oa/graph/stats/oozie_app/scripts/step9_6.sql +++ /dev/null @@ -1,2 +0,0 @@ -DROP TABLE IF EXISTS ${stats_db_name}.datasource_languages; -CREATE TABLE ${stats_db_name}.datasource_languages AS SELECT substr(d.id, 4) as id, langs.languages as language from ${openaire_db_name}.datasource d LATERAL VIEW explode(d.odlanguages.value) langs as languages; diff --git a/dhp-workflows/dhp-stats-update/src/main/resources/eu/dnetlib/dhp/oa/graph/stats/oozie_app/workflow.xml b/dhp-workflows/dhp-stats-update/src/main/resources/eu/dnetlib/dhp/oa/graph/stats/oozie_app/workflow.xml index 269525c0f..b5f3158f7 100644 --- a/dhp-workflows/dhp-stats-update/src/main/resources/eu/dnetlib/dhp/oa/graph/stats/oozie_app/workflow.xml +++ b/dhp-workflows/dhp-stats-update/src/main/resources/eu/dnetlib/dhp/oa/graph/stats/oozie_app/workflow.xml @@ -1,12 +1,25 @@ - + - hive_db_name - the target hive database name + stats_db_name + the target stats database name - hive_source_db_name - the source hive database name + openaire_db_name + the original graph database name + + + external_stats_db_name + stats_ext + the external stats that should be added since they are not included in the graph database + + + hive_metastore_uris + hive server metastore URIs + + + hive_jdbc_url + hive server jdbc url hive_timeout @@ -20,7 +33,7 @@ hive.metastore.uris - ${hiveMetastoreUris} + ${hive_metastore_uris} hive.txn.timeout @@ -29,271 +42,170 @@ - + Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}] - + - ${jobTracker} - ${nameNode} - - - hive.metastore.uris - ${hive_metastore_uris} - - - ${hive_jdbc_url}/${hive_db_name} + ${hive_jdbc_url} - hive_db_name=${hive_db_name} - hive_source_db_name=${hive_source_db_name} + stats_db_name=${stats_db_name} + openaire_db_name=${openaire_db_name} + - ${jobTracker} - ${nameNode} - - - hive.metastore.uris - ${hive_metastore_uris} - - - ${hive_jdbc_url}/${hive_db_name} + ${hive_jdbc_url} - hive_db_name=${hive_db_name} - hive_source_db_name=${hive_source_db_name} + stats_db_name=${stats_db_name} + openaire_db_name=${openaire_db_name} + - ${jobTracker} - ${nameNode} - - - hive.metastore.uris - ${hive_metastore_uris} - - - ${hive_jdbc_url}/${hive_db_name} + ${hive_jdbc_url} - hive_db_name=${hive_db_name} - hive_source_db_name=${hive_source_db_name} + stats_db_name=${stats_db_name} + openaire_db_name=${openaire_db_name} - + - ${jobTracker} - ${nameNode} - - - hive.metastore.uris - ${hive_metastore_uris} - - - hive.variable.substitute - true - - - ${hive_jdbc_url}/${hive_db_name} + ${hive_jdbc_url} - hive_db_name=${hive_db_name} - hive_source_db_name=${hive_source_db_name} + stats_db_name=${stats_db_name} + openaire_db_name=${openaire_db_name} + - - ${jobTracker} - ${nameNode} - - - hive.metastore.uris - ${hive_metastore_uris} - - - ${hive_jdbc_url}/${hive_db_name} + + ${hive_jdbc_url} - hive_db_name=${hive_db_name} - hive_source_db_name=${hive_source_db_name} + stats_db_name=${stats_db_name} + openaire_db_name=${openaire_db_name} + - - ${jobTracker} - ${nameNode} - - - hive.metastore.uris - ${hive_metastore_uris} - - - ${hive_jdbc_url}/${hive_db_name} + + ${hive_jdbc_url} - hive_db_name=${hive_db_name} - hive_source_db_name=${hive_source_db_name} + stats_db_name=${stats_db_name} + openaire_db_name=${openaire_db_name} + - - ${jobTracker} - ${nameNode} - - - hive.metastore.uris - ${hive_metastore_uris} - - - ${hive_jdbc_url}/${hive_db_name} + + ${hive_jdbc_url} - hive_db_name=${hive_db_name} - hive_source_db_name=${hive_source_db_name} + stats_db_name=${stats_db_name} + openaire_db_name=${openaire_db_name} + - - ${jobTracker} - ${nameNode} - - - hive.metastore.uris - ${hive_metastore_uris} - - - ${hive_jdbc_url}/${hive_db_name} + + ${hive_jdbc_url} - hive_db_name=${hive_db_name} - hive_source_db_name=${hive_source_db_name} + stats_db_name=${stats_db_name} + openaire_db_name=${openaire_db_name} + - - ${jobTracker} - ${nameNode} - - - hive.metastore.uris - ${hive_metastore_uris} - - - ${hive_jdbc_url}/${hive_db_name} + + ${hive_jdbc_url} - hive_db_name=${hive_db_name} - hive_source_db_name=${hive_source_db_name} + stats_db_name=${stats_db_name} + openaire_db_name=${openaire_db_name} + - - ${jobTracker} - ${nameNode} - - - hive.metastore.uris - ${hive_metastore_uris} - - - ${hive_jdbc_url}/${hive_db_name} + + ${hive_jdbc_url} - hive_db_name=${hive_db_name} - hive_source_db_name=${hive_source_db_name} + stats_db_name=${stats_db_name} + openaire_db_name=${openaire_db_name} + - - ${jobTracker} - ${nameNode} - - - hive.metastore.uris - ${hive_metastore_uris} - - - ${hive_jdbc_url}/${hive_db_name} + + ${hive_jdbc_url} - hive_db_name=${hive_db_name} - hive_source_db_name=${hive_source_db_name} + stats_db_name=${stats_db_name} + openaire_db_name=${openaire_db_name} + external_stats_db_name=${external_stats_db_name} - + + - - ${jobTracker} - ${nameNode} - - - hive.metastore.uris - ${hive_metastore_uris} - - - ${hive_jdbc_url}/${hive_db_name} + + ${hive_jdbc_url} - hive_db_name=${hive_db_name} - hive_source_db_name=${hive_source_db_name} + stats_db_name=${stats_db_name} + openaire_db_name=${openaire_db_name} + external_stats_db_name=${external_stats_db_name} - + + - ${jobTracker} - ${nameNode} - - - hive.metastore.uris - ${hive_metastore_uris} - - - ${hive_jdbc_url}/${hive_db_name} + ${hive_jdbc_url} - hive_db_name=${hive_db_name} - hive_source_db_name=${hive_source_db_name} + stats_db_name=${stats_db_name} + openaire_db_name=${openaire_db_name} + ${jobTracker} ${nameNode} - impala-shell.sh - ${hive_db_name} - impala-stats.sql - /user/giorgos.alexiou/graph/oozie_app/impala-stats.sql + ${stats_db_name} + step14.sql + /user/${wf:user()}/oa/graph/stats/oozie_app/scripts/step14.sql impala-shell.sh - - +