2020-06-15 18:57:40 +02:00
|
|
|
------------------------------------------------------
|
|
|
|
------------------------------------------------------
|
|
|
|
-- Dataset table/view and Dataset related tables/views
|
|
|
|
------------------------------------------------------
|
|
|
|
------------------------------------------------------
|
|
|
|
|
|
|
|
-- Dataset temporary table supporting updates
|
2021-02-14 02:14:24 +01:00
|
|
|
CREATE TABLE ${stats_db_name}.dataset_tmp
|
|
|
|
(
|
|
|
|
id STRING,
|
|
|
|
title STRING,
|
|
|
|
publisher STRING,
|
|
|
|
journal STRING,
|
|
|
|
date STRING,
|
|
|
|
year STRING,
|
|
|
|
bestlicence STRING,
|
|
|
|
embargo_end_date STRING,
|
|
|
|
delayed BOOLEAN,
|
|
|
|
authors INT,
|
|
|
|
source STRING,
|
|
|
|
abstract BOOLEAN,
|
|
|
|
type STRING
|
|
|
|
)
|
|
|
|
clustered by (id) into 100 buckets stored AS orc tblproperties ('transactional' = 'true');
|
2020-06-15 18:57:40 +02:00
|
|
|
|
2021-02-14 02:14:24 +01:00
|
|
|
INSERT INTO ${stats_db_name}.dataset_tmp
|
|
|
|
SELECT substr(d.id, 4) AS id,
|
|
|
|
d.title[0].value AS title,
|
|
|
|
d.publisher.value AS publisher,
|
|
|
|
cast(null AS string) AS journal,
|
|
|
|
d.dateofacceptance.value as date,
|
|
|
|
date_format(d.dateofacceptance.value, 'yyyy') AS year,
|
|
|
|
d.bestaccessright.classname AS bestlicence,
|
|
|
|
d.embargoenddate.value AS embargo_end_date,
|
|
|
|
false AS delayed,
|
|
|
|
size(d.author) AS authors,
|
|
|
|
concat_ws('\u003B', d.source.value) AS source,
|
|
|
|
CASE WHEN SIZE(d.description) > 0 THEN TRUE ELSE FALSE end AS abstract,
|
|
|
|
'dataset' AS type
|
2020-06-15 18:57:40 +02:00
|
|
|
FROM ${openaire_db_name}.dataset d
|
2022-02-03 11:37:10 +01:00
|
|
|
WHERE d.datainfo.deletedbyinference = FALSE and d.datainfo.invisible=false;
|
2020-06-15 18:57:40 +02:00
|
|
|
|
2022-03-22 15:16:08 +01:00
|
|
|
CREATE TABLE ${stats_db_name}.dataset_citations STORED AS PARQUET AS
|
2021-07-27 14:14:09 +02:00
|
|
|
SELECT substr(d.id, 4) AS id, xpath_string(citation.value, "//citation/id[@type='openaire']/@value") AS cites
|
2021-02-14 02:14:24 +01:00
|
|
|
FROM ${openaire_db_name}.dataset d
|
|
|
|
LATERAL VIEW explode(d.extrainfo) citations AS citation
|
|
|
|
WHERE xpath_string(citation.value, "//citation/id[@type='openaire']/@value") != ""
|
2022-02-03 11:37:10 +01:00
|
|
|
and d.datainfo.deletedbyinference = false and d.datainfo.invisible=false;
|
2020-06-15 18:57:40 +02:00
|
|
|
|
2022-03-22 15:16:08 +01:00
|
|
|
CREATE TABLE ${stats_db_name}.dataset_classifications STORED AS PARQUET AS
|
2021-02-14 02:14:24 +01:00
|
|
|
SELECT substr(p.id, 4) AS id, instancetype.classname AS type
|
|
|
|
FROM ${openaire_db_name}.dataset p
|
|
|
|
LATERAL VIEW explode(p.instance.instancetype) instances AS instancetype
|
2022-02-03 11:37:10 +01:00
|
|
|
where p.datainfo.deletedbyinference = false and p.datainfo.invisible=false;
|
2020-06-15 18:57:40 +02:00
|
|
|
|
2022-03-22 15:16:08 +01:00
|
|
|
CREATE TABLE ${stats_db_name}.dataset_concepts STORED AS PARQUET AS
|
2021-03-29 15:59:58 +02:00
|
|
|
SELECT substr(p.id, 4) as id, case
|
|
|
|
when contexts.context.id RLIKE '^[^::]+::[^::]+::.+$' then contexts.context.id
|
|
|
|
when contexts.context.id RLIKE '^[^::]+::[^::]+$' then concat(contexts.context.id, '::other')
|
|
|
|
when contexts.context.id RLIKE '^[^::]+$' then concat(contexts.context.id, '::other::other') END as concept
|
2021-02-14 02:14:24 +01:00
|
|
|
from ${openaire_db_name}.dataset p
|
|
|
|
LATERAL VIEW explode(p.context) contexts as context
|
2022-02-03 11:37:10 +01:00
|
|
|
where p.datainfo.deletedbyinference = false and p.datainfo.invisible=false;
|
2020-06-15 18:57:40 +02:00
|
|
|
|
2022-03-22 15:16:08 +01:00
|
|
|
CREATE TABLE ${stats_db_name}.dataset_datasources STORED AS PARQUET AS
|
2021-02-14 02:14:24 +01:00
|
|
|
SELECT p.id, case when d.id IS NULL THEN 'other' ELSE p.datasource END AS datasource
|
|
|
|
FROM (
|
|
|
|
SELECT substr(p.id, 4) as id, substr(instances.instance.hostedby.key, 4) AS datasource
|
|
|
|
FROM ${openaire_db_name}.dataset p
|
|
|
|
LATERAL VIEW explode(p.instance) instances AS instance
|
2022-02-03 11:37:10 +01:00
|
|
|
where p.datainfo.deletedbyinference = false and p.datainfo.invisible=false) p
|
2021-02-14 02:14:24 +01:00
|
|
|
LEFT OUTER JOIN (
|
|
|
|
SELECT substr(d.id, 4) id
|
|
|
|
FROM ${openaire_db_name}.datasource d
|
2022-02-03 11:37:10 +01:00
|
|
|
WHERE d.datainfo.deletedbyinference = false and d.datainfo.invisible=false) d ON p.datasource = d.id;
|
2020-06-15 18:57:40 +02:00
|
|
|
|
2022-03-22 15:16:08 +01:00
|
|
|
CREATE TABLE ${stats_db_name}.dataset_languages STORED AS PARQUET AS
|
2021-02-14 02:14:24 +01:00
|
|
|
SELECT substr(p.id, 4) AS id, p.language.classname AS language
|
|
|
|
FROM ${openaire_db_name}.dataset p
|
2022-02-03 11:37:10 +01:00
|
|
|
where p.datainfo.deletedbyinference = false and p.datainfo.invisible=false;
|
2020-06-15 18:57:40 +02:00
|
|
|
|
2022-03-22 15:16:08 +01:00
|
|
|
CREATE TABLE ${stats_db_name}.dataset_oids STORED AS PARQUET AS
|
2021-02-14 02:14:24 +01:00
|
|
|
SELECT substr(p.id, 4) AS id, oids.ids AS oid
|
|
|
|
FROM ${openaire_db_name}.dataset p
|
|
|
|
LATERAL VIEW explode(p.originalid) oids AS ids
|
2022-02-03 11:37:10 +01:00
|
|
|
where p.datainfo.deletedbyinference = false and p.datainfo.invisible=false;
|
2020-06-15 18:57:40 +02:00
|
|
|
|
2022-03-22 15:16:08 +01:00
|
|
|
CREATE TABLE ${stats_db_name}.dataset_pids STORED AS PARQUET AS
|
2021-02-14 02:14:24 +01:00
|
|
|
SELECT substr(p.id, 4) AS id, ppid.qualifier.classname AS type, ppid.value AS pid
|
|
|
|
FROM ${openaire_db_name}.dataset p
|
|
|
|
LATERAL VIEW explode(p.pid) pids AS ppid
|
2022-02-03 11:37:10 +01:00
|
|
|
where p.datainfo.deletedbyinference = false and p.datainfo.invisible=false;
|
2020-06-15 18:57:40 +02:00
|
|
|
|
2022-03-22 15:16:08 +01:00
|
|
|
CREATE TABLE ${stats_db_name}.dataset_topics STORED AS PARQUET AS
|
2021-02-14 02:14:24 +01:00
|
|
|
SELECT substr(p.id, 4) AS id, subjects.subject.qualifier.classname AS type, subjects.subject.value AS topic
|
|
|
|
FROM ${openaire_db_name}.dataset p
|
|
|
|
LATERAL VIEW explode(p.subject) subjects AS subject
|
2022-02-03 11:37:10 +01:00
|
|
|
where p.datainfo.deletedbyinference = false and p.datainfo.invisible=false;
|