This commit is contained in:
Michele Artini 2022-02-07 10:09:18 +01:00
parent cf612424c9
commit 9f2d5c6c24
43 changed files with 1695 additions and 1160 deletions

View File

@ -1,31 +1,114 @@
package eu.dnetlib;
import java.io.IOException;
import javax.servlet.http.HttpServletResponse;
import static springfox.documentation.builders.RequestHandlerSelectors.basePackage;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.boot.autoconfigure.solr.SolrAutoConfiguration;
import org.springframework.cache.annotation.EnableCaching;
import org.springframework.context.annotation.Bean;
import org.springframework.scheduling.annotation.EnableScheduling;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import eu.dnetlib.DnetOpenaireExporterProperties.Swagger;
import eu.dnetlib.common.app.AbstractDnetApp;
import eu.dnetlib.openaire.community.CommunityApiController;
import eu.dnetlib.openaire.context.ContextApiController;
import eu.dnetlib.openaire.dsm.DsmApiController;
import eu.dnetlib.openaire.funders.FundersApiController;
import eu.dnetlib.openaire.info.InfoController;
import eu.dnetlib.openaire.project.ProjectsController;
import springfox.documentation.builders.ApiInfoBuilder;
import springfox.documentation.service.ApiInfo;
import springfox.documentation.service.Contact;
import springfox.documentation.spi.DocumentationType;
import springfox.documentation.spring.web.plugins.Docket;
import springfox.documentation.swagger2.annotations.EnableSwagger2;
@EnableCaching
@RestController
@EnableScheduling
@EnableSwagger2
@SpringBootApplication
@EnableAutoConfiguration(exclude = { SolrAutoConfiguration.class })
public class DNetOpenaireExporterApplication {
@EnableAutoConfiguration(exclude = {
SolrAutoConfiguration.class
})
public class DNetOpenaireExporterApplication extends AbstractDnetApp {
@RequestMapping(value = { "/", "/docs" })
public void index(final HttpServletResponse response) throws IOException {
response.sendRedirect("swagger-ui.html");
}
public static final String V1 = "1.0.0";
public static void main(String[] args) throws Exception {
public static void main(final String[] args) throws Exception {
SpringApplication.run(DNetOpenaireExporterApplication.class, args);
}
@Autowired
private DnetOpenaireExporterProperties config;
@Bean
public Docket dsm() {
return _docket("Datasource Manager", DsmApiController.class.getPackage().getName(), config.getSwaggerDsm(), V1);
}
@Bean
public Docket projects() {
return _docket("OpenAIRE Projects", ProjectsController.class.getPackage().getName(), config.getSwaggerProjects(), V1);
}
@Bean
public Docket funders() {
return _docket("OpenAIRE Funders", FundersApiController.class.getPackage().getName(), config.getSwaggerFunders(), V1);
}
@Bean
public Docket communities() {
return _docket("OpenAIRE Communities", CommunityApiController.class.getPackage().getName(), config.getSwaggerCommunities(), V1);
}
@Bean
public Docket contexts() {
return _docket("OpenAIRE Contexts", ContextApiController.class.getPackage().getName(), config.getSwaggerCommunities(), V1);
}
@Bean
public Docket info() {
return _docket("OpenAIRE Info", InfoController.class.getPackage().getName(), config.getSwaggerInfo(), V1);
}
private Docket _docket(final String groupName, final String controllerPackage, final Swagger swag, final String version) {
final Docket d = new Docket(DocumentationType.SWAGGER_2);
configSwagger(d, groupName, controllerPackage, swag, version);
return d;
}
@Override
protected void configSwagger(final Docket docket) {
configSwagger(docket, "OpenAIRE Info", InfoController.class.getPackage().getName(), config.getSwaggerInfo(), V1);
}
private void configSwagger(final Docket docket, final String groupName, final String controllerPackage, final Swagger swag, final String version) {
docket
.groupName(groupName)
.select()
.apis(basePackage(controllerPackage))
.build()
.directModelSubstitute(org.joda.time.LocalDate.class, java.sql.Date.class)
.directModelSubstitute(org.joda.time.DateTime.class, java.util.Date.class)
.apiInfo(apiInfo(swag, version));
}
private ApiInfo apiInfo(final Swagger swag, final String version) {
return new ApiInfoBuilder()
.title(swag.getApiTitle())
.description(swag.getApiDescription())
.license(swag.getApiLicense())
.licenseUrl(swag.getApiLicenseUrl())
.termsOfServiceUrl("")
.version(version)
.contact(new Contact(
swag.getApiContactName(),
swag.getApiContactUrl(),
swag.getApiContactEmail()))
.build();
}
}

View File

@ -1,141 +0,0 @@
package eu.dnetlib;
import com.google.common.collect.Lists;
import com.mongodb.MongoClient;
import com.mongodb.MongoClientOptions;
import com.mongodb.ServerAddress;
import eu.dnetlib.OpenaireExporterConfig.Jdbc;
import eu.dnetlib.data.objectstore.rmi.ObjectStoreService;
import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpService;
import eu.dnetlib.enabling.is.registry.rmi.ISRegistryService;
import io.micrometer.core.instrument.ImmutableTag;
import io.micrometer.core.instrument.Metrics;
import org.apache.commons.dbcp2.BasicDataSource;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.cxf.endpoint.Client;
import org.apache.cxf.frontend.ClientProxy;
import org.apache.cxf.jaxws.JaxWsProxyFactoryBean;
import org.apache.cxf.transport.http.HTTPConduit;
import org.apache.cxf.transports.http.configuration.HTTPClientPolicy;
import org.apache.maven.model.Model;
import org.apache.maven.model.io.xpp3.MavenXpp3Reader;
import org.codehaus.plexus.util.xml.pull.XmlPullParserException;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.core.io.ClassPathResource;
import org.springframework.core.io.ResourceLoader;
import javax.annotation.PostConstruct;
import javax.sql.DataSource;
import java.io.IOException;
import java.io.InputStreamReader;
/**
* Created by claudio on 07/07/2017.
*/
@Configuration
public class DNetOpenaireExporterBeanFactory {
private static final Log log = LogFactory.getLog(DNetOpenaireExporterBeanFactory.class);
@Autowired
private ResourceLoader resourceLoader;
@Value("pom.xml")
private ClassPathResource pom;
@PostConstruct
public void init() {
final MavenXpp3Reader reader = new MavenXpp3Reader();
try {
final Model model = reader.read(new InputStreamReader(pom.getInputStream()));
log.info(String.format("registering metric for %s", model.getArtifactId()));
Metrics.gauge("micrometer_info", Lists.newArrayList(
new ImmutableTag("component", model.getGroupId()+":"+model.getArtifactId()),
new ImmutableTag("version", model.getVersion()),
new ImmutableTag("scmtag", model.getScm().getTag())), 1);
} catch (IOException | XmlPullParserException e) {
log.error(e);
}
}
@Autowired
private OpenaireExporterConfig config;
@Bean
public ISLookUpService getLookUpService() {
return getServiceStub(ISLookUpService.class, config.getIsLookupUrl());
}
@Bean
public ObjectStoreService getObjectStoreService() {
return getServiceStub(ObjectStoreService.class, config.getObjectStoreServiceUrl());
}
@Bean
public ISRegistryService getRegistryService() {
return getServiceStub(ISRegistryService.class, config.getIsRegistryServiceUrl());
}
@SuppressWarnings("unchecked")
private <T> T getServiceStub(final Class<T> clazz, final String endpoint) {
log.info(String.format("Initializing service stub %s, endpoint %s", clazz.toString(),endpoint));
final JaxWsProxyFactoryBean jaxWsProxyFactory = new JaxWsProxyFactoryBean();
jaxWsProxyFactory.setServiceClass(clazz);
jaxWsProxyFactory.setAddress(endpoint);
final T service = (T) jaxWsProxyFactory.create();
Client client = ClientProxy.getClient(service);
if (client != null) {
HTTPConduit conduit = (HTTPConduit) client.getConduit();
HTTPClientPolicy policy = new HTTPClientPolicy();
log.info(String.format("setting connectTimeout to %s, receiveTimeout to %s for service %s",
config.getCxfClientConnectTimeout(),
config.getCxfClientReceiveTimeout(),
clazz.getCanonicalName()));
policy.setConnectionTimeout(config.getCxfClientConnectTimeout());
policy.setReceiveTimeout(config.getCxfClientReceiveTimeout());
conduit.setClient(policy);
}
return service;
}
@Bean
public DataSource getSqlDataSource() {
final Jdbc jdbc = config.getJdbc();
return getDatasource(
jdbc.getDriverClassName(),
jdbc.getUrl(),
jdbc.getUser(),
jdbc.getPwd(),
jdbc.getMinIdle(),
jdbc.getMaxRows());
}
private BasicDataSource getDatasource(String driverClassName, String jdbcUrl, String jdbcUser, String jdbcPwd, int jdbcMinIdle, int jdbcMaxIdle) {
final BasicDataSource d = new BasicDataSource();
d.setDriverClassName(driverClassName);
d.setUrl(jdbcUrl);
d.setUsername(jdbcUser);
d.setPassword(jdbcPwd);
d.setMinIdle(jdbcMinIdle);
d.setMaxIdle(jdbcMaxIdle);
return d;
}
@Bean
public MongoClient getMongoClient() {
return new MongoClient(
new ServerAddress(config.getDatasource().getMongoHost(), config.getDatasource().getMongoPort()),
MongoClientOptions.builder().connectionsPerHost(config.getDatasource().getMongoConnectionsPerHost()).build());
}
}

View File

@ -0,0 +1,106 @@
package eu.dnetlib;
import javax.sql.DataSource;
import org.apache.commons.dbcp2.BasicDataSource;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.cxf.endpoint.Client;
import org.apache.cxf.frontend.ClientProxy;
import org.apache.cxf.jaxws.JaxWsProxyFactoryBean;
import org.apache.cxf.transport.http.HTTPConduit;
import org.apache.cxf.transports.http.configuration.HTTPClientPolicy;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import com.mongodb.MongoClient;
import com.mongodb.MongoClientOptions;
import com.mongodb.ServerAddress;
import eu.dnetlib.DnetOpenaireExporterProperties.Jdbc;
import eu.dnetlib.data.objectstore.rmi.ObjectStoreService;
import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpService;
import eu.dnetlib.enabling.is.registry.rmi.ISRegistryService;
/**
* Created by claudio on 07/07/2017.
*/
@Configuration
public class DNetOpenaireExporterConfiguration {
private static final Log log = LogFactory.getLog(DNetOpenaireExporterConfiguration.class);
@Autowired
private DnetOpenaireExporterProperties props;
@Bean
public ISLookUpService getLookUpService() {
return getServiceStub(ISLookUpService.class, props.getIsLookupUrl());
}
@Bean
public ObjectStoreService getObjectStoreService() {
return getServiceStub(ObjectStoreService.class, props.getObjectStoreServiceUrl());
}
@Bean
public ISRegistryService getRegistryService() {
return getServiceStub(ISRegistryService.class, props.getIsRegistryServiceUrl());
}
@SuppressWarnings("unchecked")
private <T> T getServiceStub(final Class<T> clazz, final String endpoint) {
log.info(String.format("Initializing service stub %s, endpoint %s", clazz.toString(), endpoint));
final JaxWsProxyFactoryBean jaxWsProxyFactory = new JaxWsProxyFactoryBean();
jaxWsProxyFactory.setServiceClass(clazz);
jaxWsProxyFactory.setAddress(endpoint);
final T service = (T) jaxWsProxyFactory.create();
final Client client = ClientProxy.getClient(service);
if (client != null) {
final HTTPConduit conduit = (HTTPConduit) client.getConduit();
final HTTPClientPolicy policy = new HTTPClientPolicy();
log.info(String.format("setting connectTimeout to %s, receiveTimeout to %s for service %s", props.getCxfClientConnectTimeout(), props
.getCxfClientReceiveTimeout(), clazz.getCanonicalName()));
policy.setConnectionTimeout(props.getCxfClientConnectTimeout());
policy.setReceiveTimeout(props.getCxfClientReceiveTimeout());
conduit.setClient(policy);
}
return service;
}
@Bean
public DataSource getSqlDataSource() {
final Jdbc jdbc = props.getJdbc();
return getDatasource(jdbc.getDriverClassName(), jdbc.getUrl(), jdbc.getUser(), jdbc.getPwd(), jdbc.getMinIdle(), jdbc.getMaxRows());
}
private BasicDataSource getDatasource(final String driverClassName,
final String jdbcUrl,
final String jdbcUser,
final String jdbcPwd,
final int jdbcMinIdle,
final int jdbcMaxIdle) {
final BasicDataSource d = new BasicDataSource();
d.setDriverClassName(driverClassName);
d.setUrl(jdbcUrl);
d.setUsername(jdbcUser);
d.setPassword(jdbcPwd);
d.setMinIdle(jdbcMinIdle);
d.setMaxIdle(jdbcMaxIdle);
return d;
}
@Bean
public MongoClient getMongoClient() {
return new MongoClient(
new ServerAddress(props.getDatasource().getMongoHost(), props.getDatasource().getMongoPort()),
MongoClientOptions.builder().connectionsPerHost(props.getDatasource().getMongoConnectionsPerHost()).build());
}
}

View File

@ -15,7 +15,7 @@ import org.springframework.core.io.Resource;
@Configuration
@PropertySource("classpath:global.properties")
@ConfigurationProperties(prefix = "openaire.exporter")
public class OpenaireExporterConfig {
public class DnetOpenaireExporterProperties {
// ISLOOKUP
private ClassPathResource findSolrIndexUrl;

View File

@ -1,115 +0,0 @@
package eu.dnetlib;
import eu.dnetlib.OpenaireExporterConfig.Swagger;
import eu.dnetlib.openaire.community.CommunityApiController;
import eu.dnetlib.openaire.context.ContextApiController;
import eu.dnetlib.openaire.dsm.DsmApiController;
import eu.dnetlib.openaire.funders.FundersApiController;
import eu.dnetlib.openaire.info.InfoController;
import eu.dnetlib.openaire.project.ProjectsController;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import springfox.documentation.builders.ApiInfoBuilder;
import springfox.documentation.service.ApiInfo;
import springfox.documentation.service.Contact;
import springfox.documentation.spi.DocumentationType;
import springfox.documentation.spring.web.plugins.Docket;
import springfox.documentation.swagger2.annotations.EnableSwagger2;
import static springfox.documentation.builders.RequestHandlerSelectors.basePackage;
@Configuration
@EnableSwagger2
public class SwaggerConfig {
private static final Log log = LogFactory.getLog(SwaggerConfig.class);
public static final String V1 = "1.0.0";
@Autowired
private OpenaireExporterConfig config;
@Bean
public Docket dsm() {
return _docket(
"Datasource Manager",
DsmApiController.class.getPackage().getName(),
config.getSwaggerDsm(),
V1);
}
@Bean
public Docket projects() {
return _docket(
"OpenAIRE Projects",
ProjectsController.class.getPackage().getName(),
config.getSwaggerProjects(),
V1);
}
@Bean
public Docket funders() {
return _docket(
"OpenAIRE Funders",
FundersApiController.class.getPackage().getName(),
config.getSwaggerFunders(),
V1);
}
@Bean
public Docket communities() {
return _docket(
"OpenAIRE Communities",
CommunityApiController.class.getPackage().getName(),
config.getSwaggerCommunities(),
V1);
}
@Bean
public Docket contexts() {
return _docket(
"OpenAIRE Contexts",
ContextApiController.class.getPackage().getName(),
config.getSwaggerCommunities(),
V1);
}
@Bean
public Docket info() {
return _docket(
"OpenAIRE Info",
InfoController.class.getPackage().getName(),
config.getSwaggerInfo(),
V1);
}
private Docket _docket(final String groupName, final String controllerPackage, final Swagger swag, final String version) {
return new Docket(DocumentationType.SWAGGER_2)
.groupName(groupName)
.select()
.apis(basePackage(controllerPackage))
.build()
.directModelSubstitute(org.joda.time.LocalDate.class, java.sql.Date.class)
.directModelSubstitute(org.joda.time.DateTime.class, java.util.Date.class)
.apiInfo(apiInfo(swag, version));
}
private ApiInfo apiInfo(final Swagger swag, final String version) {
return new ApiInfoBuilder()
.title(swag.getApiTitle())
.description(swag.getApiDescription())
.license(swag.getApiLicense())
.licenseUrl(swag.getApiLicenseUrl())
.termsOfServiceUrl("")
.version(version)
.contact(new Contact(
swag.getApiContactName(),
swag.getApiContactUrl(),
swag.getApiContactEmail()))
.build();
}
}

View File

@ -0,0 +1,19 @@
package eu.dnetlib;
import java.io.IOException;
import javax.servlet.http.HttpServletResponse;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestMapping;
@Controller
public class SwaggerController {
@RequestMapping(value = {
"/", "/docs"
})
public void index(final HttpServletResponse response) throws IOException {
response.sendRedirect("swagger-ui.html");
}
}

View File

@ -27,12 +27,11 @@ import com.google.common.collect.Lists;
import com.google.common.escape.Escaper;
import com.google.common.xml.XmlEscapers;
import eu.dnetlib.OpenaireExporterConfig;
import eu.dnetlib.DnetOpenaireExporterProperties;
import eu.dnetlib.enabling.datasources.common.DsmException;
import eu.dnetlib.enabling.datasources.common.DsmRuntimeException;
import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpException;
import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpService;
import eu.dnetlib.enabling.is.registry.rmi.ISRegistryService;
import eu.dnetlib.openaire.context.Context;
import eu.dnetlib.openaire.context.ContextMappingUtils;
import eu.dnetlib.openaire.dsm.dao.utils.IndexDsInfo;
@ -46,17 +45,11 @@ public class ISClientImpl implements ISClient {
private static final Log log = LogFactory.getLog(ISClientImpl.class);
@Autowired
private OpenaireExporterConfig config;
private DnetOpenaireExporterProperties config;
@Autowired
private ISLookUpService isLookUpService;
@Autowired
private ISRegistryService isRegistryService;
@Autowired
private OperationManager operationManager;
@Override
@Cacheable("indexdsinfo-cache")
public IndexDsInfo calculateCurrentIndexDsInfo() throws DsmException {

View File

@ -1,18 +1,27 @@
package eu.dnetlib.openaire.common;
import java.text.FieldPosition;
import java.util.*;
import java.util.Calendar;
import java.util.Date;
import java.util.GregorianCalendar;
import java.util.Locale;
import java.util.TimeZone;
import com.fasterxml.jackson.databind.util.StdDateFormat;
public class RFC3339DateFormat extends StdDateFormat {
/**
*
*/
private static final long serialVersionUID = 8174507696046505992L;
private static final TimeZone TIMEZONE_Z = TimeZone.getTimeZone("UTC");
// Same as ISO8601DateFormat but serializing milliseconds.
@Override
public StringBuffer format(Date date, StringBuffer toAppendTo, FieldPosition fieldPosition) {
String value = format(date, true, TIMEZONE_Z, Locale.US);
public StringBuffer format(final Date date, final StringBuffer toAppendTo, final FieldPosition fieldPosition) {
final String value = format(date, true, TIMEZONE_Z, Locale.US);
toAppendTo.append(value);
return toAppendTo;
}
@ -20,37 +29,31 @@ public class RFC3339DateFormat extends StdDateFormat {
/**
* Format date into yyyy-MM-ddThh:mm:ss[.sss][Z|[+-]hh:mm]
*
* @param date the date to format
* @param millis true to include millis precision otherwise false
* @param tz timezone to use for the formatting (UTC will produce 'Z')
* @param date
* the date to format
* @param millis
* true to include millis precision otherwise false
* @param tz
* timezone to use for the formatting (UTC will produce 'Z')
* @return the date formatted as yyyy-MM-ddThh:mm:ss[.sss][Z|[+-]hh:mm]
*/
private static String format(Date date, boolean millis, TimeZone tz, Locale loc) {
Calendar calendar = new GregorianCalendar(tz, loc);
private static String format(final Date date, final boolean millis, final TimeZone tz, final Locale loc) {
final Calendar calendar = new GregorianCalendar(tz, loc);
calendar.setTime(date);
// estimate capacity of buffer as close as we can (yeah, that's pedantic ;)
StringBuilder sb = new StringBuilder(30);
sb.append(String.format(
"%04d-%02d-%02dT%02d:%02d:%02d",
calendar.get(Calendar.YEAR),
calendar.get(Calendar.MONTH) + 1,
calendar.get(Calendar.DAY_OF_MONTH),
calendar.get(Calendar.HOUR_OF_DAY),
calendar.get(Calendar.MINUTE),
calendar.get(Calendar.SECOND)
));
final StringBuilder sb = new StringBuilder(30);
sb.append(String.format("%04d-%02d-%02dT%02d:%02d:%02d", calendar.get(Calendar.YEAR), calendar.get(Calendar.MONTH) + 1, calendar
.get(Calendar.DAY_OF_MONTH), calendar.get(Calendar.HOUR_OF_DAY), calendar.get(Calendar.MINUTE), calendar.get(Calendar.SECOND)));
if (millis) {
sb.append(String.format(".%03d", calendar.get(Calendar.MILLISECOND)));
}
int offset = tz.getOffset(calendar.getTimeInMillis());
final int offset = tz.getOffset(calendar.getTimeInMillis());
if (offset != 0) {
int hours = Math.abs((offset / (60 * 1000)) / 60);
int minutes = Math.abs((offset / (60 * 1000)) % 60);
sb.append(String.format("%c%02d:%02d",
(offset < 0 ? '-' : '+'),
hours, minutes));
final int hours = Math.abs(offset / (60 * 1000) / 60);
final int minutes = Math.abs(offset / (60 * 1000) % 60);
sb.append(String.format("%c%02d:%02d", offset < 0 ? '-' : '+', hours, minutes));
} else {
sb.append('Z');
}

View File

@ -1,11 +1,37 @@
package eu.dnetlib.openaire.community;
import com.google.common.base.Functions;
import com.google.common.base.Joiner;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import eu.dnetlib.openaire.common.ISClient;
import eu.dnetlib.openaire.context.Context;
import static eu.dnetlib.openaire.community.CommunityConstants.CCONTENTPROVIDER_NAME;
import static eu.dnetlib.openaire.community.CommunityConstants.CCONTENTPROVIDER_OFFICIALNAME;
import static eu.dnetlib.openaire.community.CommunityConstants.CCONTENTPROVIDER_SELCRITERIA;
import static eu.dnetlib.openaire.community.CommunityConstants.CLABEL;
import static eu.dnetlib.openaire.community.CommunityConstants.CONTENTPROVIDERS_ID_SUFFIX;
import static eu.dnetlib.openaire.community.CommunityConstants.CORGANIZATION_LOGOURL;
import static eu.dnetlib.openaire.community.CommunityConstants.CORGANIZATION_NAME;
import static eu.dnetlib.openaire.community.CommunityConstants.CORGANIZATION_WEBSITEURL;
import static eu.dnetlib.openaire.community.CommunityConstants.CPROFILE_SUBJECT;
import static eu.dnetlib.openaire.community.CommunityConstants.CPROJECT_ACRONYM;
import static eu.dnetlib.openaire.community.CommunityConstants.CPROJECT_FULLNAME;
import static eu.dnetlib.openaire.community.CommunityConstants.CPROJECT_FUNDER;
import static eu.dnetlib.openaire.community.CommunityConstants.CPROJECT_NUMBER;
import static eu.dnetlib.openaire.community.CommunityConstants.CSUMMARY_DESCRIPTION;
import static eu.dnetlib.openaire.community.CommunityConstants.CSUMMARY_LOGOURL;
import static eu.dnetlib.openaire.community.CommunityConstants.CSUMMARY_NAME;
import static eu.dnetlib.openaire.community.CommunityConstants.CSUMMARY_STATUS;
import static eu.dnetlib.openaire.community.CommunityConstants.CSUMMARY_ZENODOC;
import static eu.dnetlib.openaire.community.CommunityConstants.CSV_DELIMITER;
import static eu.dnetlib.openaire.community.CommunityConstants.ID_SEPARATOR;
import static eu.dnetlib.openaire.community.CommunityConstants.OPENAIRE_ID;
import static eu.dnetlib.openaire.community.CommunityConstants.ORGANIZATION_ID_SUFFIX;
import static eu.dnetlib.openaire.community.CommunityConstants.PROJECTS_ID_SUFFIX;
import static eu.dnetlib.openaire.community.CommunityConstants.ZENODOCOMMUNITY_ID_SUFFIX;
import java.util.Base64;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeMap;
import java.util.stream.Collectors;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
@ -14,373 +40,329 @@ import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.cache.annotation.CacheEvict;
import org.springframework.stereotype.Component;
import java.io.IOException;
import java.util.*;
import java.util.stream.Collectors;
import com.google.common.base.Functions;
import com.google.common.base.Joiner;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import static eu.dnetlib.openaire.community.CommunityConstants.*;
import eu.dnetlib.openaire.common.ISClient;
@Component
@ConditionalOnProperty(value = "openaire.exporter.enable.community", havingValue = "true")
public class CommunityApiCore {//implements CommunityClient{
public class CommunityApiCore {// implements CommunityClient{
private static final Log log = LogFactory.getLog(CommunityApiCore.class);
private static final Log log = LogFactory.getLog(CommunityApiCore.class);
@Autowired
private CommunityClient cci;
@Autowired
private CommunityClient cci;
@Autowired
private ISClient isClient;
@Autowired
private ISClient isClient;
@Autowired
private CommunityCommon cc;
@Autowired
private CommunityCommon cc;
public List<CommunitySummary> listCommunities() throws CommunityException {
return cc.listCommunities();
public List<CommunitySummary> listCommunities() throws CommunityException {
return cc.listCommunities();
}
}
public CommunityDetails getCommunity(final String id) throws CommunityException, CommunityNotFoundException {
return cc.getCommunity(id);
public CommunityDetails getCommunity(final String id) throws CommunityException, CommunityNotFoundException {
return cc.getCommunity(id);
}
}
public void setCommunity(final String id, final CommunityWritableProperties details) throws CommunityException, CommunityNotFoundException {
public void setCommunity(final String id, final CommunityWritableProperties details) throws CommunityException, CommunityNotFoundException {
cc.getCommunity(id); // ensure the community exists.
cc.getCommunity(id); // ensure the community exists.
if (details.getShortName() != null) {
isClient.updateContextAttribute(id, CLABEL, details.getShortName());
if(details.getShortName() != null) {
isClient.updateContextAttribute(id, CLABEL, details.getShortName());
}
if (details.getName() != null) {
isClient.updateContextParam(id, CSUMMARY_NAME, details.getName());
}
if (details.getName() != null){
isClient.updateContextParam(id, CSUMMARY_NAME, details.getName());
}
if (details.getDescription() != null) {
isClient.updateContextParam(id, CSUMMARY_DESCRIPTION, details.getDescription());
}
if(details.getDescription() != null) {
isClient.updateContextParam(id, CSUMMARY_DESCRIPTION, details.getDescription());
}
if (details.getLogoUrl() != null) {
isClient.updateContextParam(id, CSUMMARY_LOGOURL, details.getLogoUrl());
}
if (details.getStatus() != null) {
isClient.updateContextParam(id, CSUMMARY_STATUS, details.getStatus().name());
}
if (details.getSubjects() != null) {
isClient.updateContextParam(id, CPROFILE_SUBJECT, Joiner.on(CSV_DELIMITER).join(details.getSubjects()));
}
if (details.getMainZenodoCommunity() != null) {
isClient.updateContextParam(id, CSUMMARY_ZENODOC, details.getMainZenodoCommunity());
}
cc.updateCommunity(id, details);
}
public List<CommunityProject> getCommunityProjects(final String id) throws CommunityException, CommunityNotFoundException {
cc.getCommunity(id); // ensure the community exists.
return cc.getCommunityInfo(id, PROJECTS_ID_SUFFIX, c -> CommunityMappingUtils.asCommunityProject(id, c));
}
public CommunityProject addCommunityProject(final String id, final CommunityProject project) throws CommunityException, CommunityNotFoundException {
if (!StringUtils.equalsIgnoreCase(id, project.getCommunityId())) {
throw new CommunityException("parameters 'id' and project.communityId must be coherent");
}
final TreeMap<Integer, CommunityProject> projects = getCommunityProjectMap(id);
final String project_id = project.getId();
if (project_id != null && projects.keySet().contains(Integer.valueOf(project_id))) {
if (project.getName() != null) {
isClient.updateConceptParam(id + PROJECTS_ID_SUFFIX + ID_SEPARATOR + project_id, CPROJECT_FULLNAME, project.getName());
}
if (project.getAcronym() != null) {
isClient.updateConceptParam(id + PROJECTS_ID_SUFFIX + ID_SEPARATOR + project_id, CPROJECT_ACRONYM, project.getAcronym());
}
if (project.getOpenaireId() != null) {
isClient.updateConceptParam(id + PROJECTS_ID_SUFFIX + ID_SEPARATOR + project_id, OPENAIRE_ID, project.getOpenaireId());
}
if (project.getFunder() != null) {
isClient.updateConceptParam(id + PROJECTS_ID_SUFFIX + ID_SEPARATOR + project_id, CPROJECT_FUNDER, project.getFunder());
}
if(details.getLogoUrl()!=null){
isClient.updateContextParam(id, CSUMMARY_LOGOURL, details.getLogoUrl());
}
if (project.getGrantId() != null) {
isClient.updateConceptParam(id + PROJECTS_ID_SUFFIX + ID_SEPARATOR + project_id, CPROJECT_NUMBER, project.getGrantId());
}
if (details.getStatus() != null) {
isClient.updateContextParam(id, CSUMMARY_STATUS, details.getStatus().name());
}
if (details.getSubjects() != null) {
isClient.updateContextParam(id, CPROFILE_SUBJECT, Joiner.on(CSV_DELIMITER).join(details.getSubjects()));
}
} else {
project.setId(nextId(projects != null && !projects.isEmpty() ? projects.lastKey() : 0));
}
if (details.getMainZenodoCommunity() != null) {
isClient.updateContextParam(id, CSUMMARY_ZENODOC, details.getMainZenodoCommunity());
}
cc.updateCommunity(id, details);
}
isClient.addConcept(id, id + PROJECTS_ID_SUFFIX, CommunityMappingUtils.asProjectXML(id, project));
public List<CommunityProject> getCommunityProjects(final String id) throws CommunityException, CommunityNotFoundException {
cc.getCommunity(id); // ensure the community exists.
return cc.getCommunityInfo(id, PROJECTS_ID_SUFFIX, c -> CommunityMappingUtils.asCommunityProject(id, c));
}
public CommunityProject addCommunityProject(final String id, final CommunityProject project) throws CommunityException, CommunityNotFoundException {
if (!StringUtils.equalsIgnoreCase(id, project.getCommunityId())) {
throw new CommunityException("parameters 'id' and project.communityId must be coherent");
}
final TreeMap<Integer, CommunityProject> projects = getCommunityProjectMap(id);
String project_id = project.getId();
if (project_id != null && projects.keySet().contains(Integer.valueOf(project_id))){
if (project.getName() != null) {
isClient.updateConceptParam(id + PROJECTS_ID_SUFFIX + ID_SEPARATOR + project_id, CPROJECT_FULLNAME,project.getName());
}
if(project.getAcronym()!= null){
isClient.updateConceptParam(id + PROJECTS_ID_SUFFIX + ID_SEPARATOR + project_id, CPROJECT_ACRONYM,project.getAcronym());
}
if (project.getOpenaireId() != null){
isClient.updateConceptParam(id + PROJECTS_ID_SUFFIX + ID_SEPARATOR + project_id, OPENAIRE_ID, project.getOpenaireId());
}
if (project.getFunder() != null){
isClient.updateConceptParam(id + PROJECTS_ID_SUFFIX + ID_SEPARATOR + project_id, CPROJECT_FUNDER, project.getFunder());
}
if(project.getGrantId() != null){
isClient.updateConceptParam(id + PROJECTS_ID_SUFFIX + ID_SEPARATOR + project_id, CPROJECT_NUMBER, project.getGrantId());
}
}else {
project.setId(nextId(projects != null && !projects.isEmpty() ? projects.lastKey() : 0));
isClient.addConcept(id, id + PROJECTS_ID_SUFFIX, CommunityMappingUtils.asProjectXML(id, project));
}
cc.updateProject(id, project );
return project;
}
private String nextId(final Integer id) {
return String.valueOf(id + 1);
}
}
cc.updateProject(id, project);
return project;
}
public void removeCommunityProject(final String id, final Integer projectId) throws CommunityException, CommunityNotFoundException {
final Map<Integer, CommunityProject> projects = getCommunityProjectMap(id);
if (!projects.containsKey(projectId)) {
throw new CommunityNotFoundException(String.format("project '%s' doesn't exist within context '%s'", projectId, id));
}
isClient.removeConcept(
id,
id + PROJECTS_ID_SUFFIX,
id + PROJECTS_ID_SUFFIX + ID_SEPARATOR + projectId);
cc.removeFromCategory(id, PROJECTS_ID_SUFFIX, String.valueOf(projectId));
}
private String nextId(final Integer id) {
return String.valueOf(id + 1);
}
public List<CommunityContentprovider> getCommunityContentproviders(final String id) throws CommunityException, CommunityNotFoundException {
cc.getCommunity(id); // ensure the community exists.
return cc.getCommunityInfo(id, CONTENTPROVIDERS_ID_SUFFIX, c -> CommunityMappingUtils.asCommunityDataprovider(id, c));
}
public void removeCommunityProject(final String id, final Integer projectId) throws CommunityException, CommunityNotFoundException {
final Map<Integer, CommunityProject> projects = getCommunityProjectMap(id);
if (!projects.containsKey(projectId)) {
throw new CommunityNotFoundException(String.format("project '%s' doesn't exist within context '%s'", projectId, id));
}
isClient.removeConcept(id, id + PROJECTS_ID_SUFFIX, id + PROJECTS_ID_SUFFIX + ID_SEPARATOR + projectId);
cc.removeFromCategory(id, PROJECTS_ID_SUFFIX, String.valueOf(projectId));
}
public CommunityContentprovider addCommunityContentprovider(final String id, final CommunityContentprovider cp) throws CommunityException, CommunityNotFoundException {
log.info("content provider to add " + cp.toString());
if (!StringUtils.equalsIgnoreCase(id, cp.getCommunityId())) {
throw new CommunityException("parameters 'id' and cp.communityId must be coherent");
}
final TreeMap<Integer, CommunityContentprovider> cps = getCommunityContentproviderMap(id);
final String concept_id = cp.getId();
if (concept_id != null && cps.keySet().contains(Integer.valueOf(concept_id))){
if (cp.getName() != null) {
isClient.updateConceptParam(id + CONTENTPROVIDERS_ID_SUFFIX + ID_SEPARATOR + concept_id, CCONTENTPROVIDER_NAME,cp.getName());
}
if(cp.getOfficialname()!= null){
isClient.updateConceptParam(id + CONTENTPROVIDERS_ID_SUFFIX + ID_SEPARATOR + concept_id, CCONTENTPROVIDER_OFFICIALNAME,cp.getOfficialname());
}
if (cp.getOpenaireId() != null){
isClient.updateConceptParam(id + CONTENTPROVIDERS_ID_SUFFIX + ID_SEPARATOR + concept_id, OPENAIRE_ID,cp.getOpenaireId());
}
if(cp.getSelectioncriteria() != null){
isClient.updateConceptParamNoEscape(id + CONTENTPROVIDERS_ID_SUFFIX + ID_SEPARATOR + concept_id, CCONTENTPROVIDER_SELCRITERIA, cp.toXML());
}
}else{
log.info("adding new concept for community " + id);
cp.setId(nextId(!cps.isEmpty() ? cps.lastKey() : 0));
isClient.addConcept(id, id + CONTENTPROVIDERS_ID_SUFFIX, CommunityMappingUtils.asContentProviderXML(id, cp));
}
cc.updateDatasource(id, cp);
return cp;
}
public void removeCommunityContentProvider(final String id, final Integer contentproviderId) throws CommunityException, CommunityNotFoundException {
final Map<Integer, CommunityContentprovider> providers = getCommunityContentproviderMap(id);
if (!providers.containsKey(contentproviderId)) {
throw new CommunityNotFoundException(String.format("content provider '%s' doesn't exist within context '%s'", contentproviderId, id));
}
isClient.removeConcept(
id,
id + CONTENTPROVIDERS_ID_SUFFIX,
id + CONTENTPROVIDERS_ID_SUFFIX + ID_SEPARATOR + contentproviderId);
cc.removeFromCategory(id, CONTENTPROVIDERS_ID_SUFFIX, String.valueOf(contentproviderId));
}
public List<CommunityContentprovider> getCommunityContentproviders(final String id) throws CommunityException, CommunityNotFoundException {
cc.getCommunity(id); // ensure the community exists.
return cc.getCommunityInfo(id, CONTENTPROVIDERS_ID_SUFFIX, c -> CommunityMappingUtils.asCommunityDataprovider(id, c));
}
public void removeCommunityOrganization(String id, Integer organizationId) throws CommunityException, CommunityNotFoundException {
final Map<Integer, CommunityOrganization> organizations = getCommunityOrganizationMap(id);
if (!organizations.containsKey(organizationId)) {
throw new CommunityNotFoundException(String.format("organization '%s' doesn't exist within context '%s'", organizationId, id));
}
isClient.removeConcept(
id,
id + ORGANIZATION_ID_SUFFIX,
id + ORGANIZATION_ID_SUFFIX + ID_SEPARATOR + organizationId);
cc.removeFromCategory(id, ORGANIZATION_ID_SUFFIX, String.valueOf(organizationId));
}
public List<CommunityZenodoCommunity> getCommunityZenodoCommunities(final String id) throws CommunityException, CommunityNotFoundException {
return cc.getCommunityZenodoCommunities(id);
}
public List<CommunityOrganization> getCommunityOrganizations(final String id) throws CommunityException, CommunityNotFoundException {
cc.getCommunity(id);
return cc.getCommunityInfo(id,ORGANIZATION_ID_SUFFIX,c->CommunityMappingUtils.asCommunityOrganization(id,c));
}
public CommunityDetails addCommunitySubjects(final String id, final List<String> subjects) throws CommunityException, CommunityNotFoundException {
final CommunityDetails cd = new CommunityDetails();
final Set<String> current = Sets.newHashSet(cc.getCommunity(id).getSubjects());
current.addAll(subjects);
cd.setSubjects(Lists.newArrayList(current));
setCommunity(id, CommunityWritableProperties.fromDetails(cd));
return cd;
}
public CommunityDetails removeCommunitySubjects(final String id, final List<String> subjects) throws CommunityException, CommunityNotFoundException {
final CommunityDetails cd = new CommunityDetails();
final Set<String> current = Sets.newHashSet(cc.getCommunity(id).getSubjects());
current.removeAll(subjects);
cd.setSubjects(Lists.newArrayList(current));
setCommunity(id, CommunityWritableProperties.fromDetails(cd));
return cd;
}
@CacheEvict(value="community-cache",allEntries = true)
public void removeCommunityZenodoCommunity(final String id, final Integer zenodoCommId) throws CommunityException, CommunityNotFoundException {
final Map<Integer, CommunityZenodoCommunity> zcomms = getZenodoCommunityMap(id);
if (!zcomms.containsKey(zenodoCommId)) {
throw new CommunityNotFoundException(String.format("Zenodo community '%s' doesn't exist within context '%s'", zenodoCommId, id));
}
isClient.removeConcept(
id,
id + ZENODOCOMMUNITY_ID_SUFFIX,
id + ZENODOCOMMUNITY_ID_SUFFIX + ID_SEPARATOR + zenodoCommId);
cc.removeFromCategory(id, ZENODOCOMMUNITY_ID_SUFFIX, String.valueOf(zenodoCommId));
}
@CacheEvict(value="community-cache",allEntries = true)
public CommunityZenodoCommunity addCommunityZenodoCommunity(final String id, final CommunityZenodoCommunity zc) throws CommunityException, CommunityNotFoundException {
if (!StringUtils.equalsIgnoreCase(id, zc.getCommunityId())) {
throw new CommunityException("parameters 'id' and zc.communityId must be coherent");
}
if(!StringUtils.isNotBlank(zc.getZenodoid())){
throw new CommunityException("parameter zenodoid cannot be null or empty");
}
final TreeMap<Integer, CommunityZenodoCommunity> zcs = getZenodoCommunityMap(id);
for(CommunityZenodoCommunity czc : zcs.values()){
if (czc.getZenodoid().equals(zc.getZenodoid())){
throw new CommunityException("Zenodo community already associated to the RCD");
}
}
zc.setId(nextId(!zcs.isEmpty() ? zcs.lastKey() : 0));
isClient.addConcept(id, id + ZENODOCOMMUNITY_ID_SUFFIX, CommunityMappingUtils.asZenodoCommunityXML(id, zc));
cc.updateZenodoCommunity(id, zc);
return zc;
}
public CommunityOpenAIRECommunities getOpenAIRECommunities(String zenodoId) throws CommunityException, CommunityNotFoundException {
if(cci.getInverseZenodoCommunityMap().containsKey(zenodoId))
return new CommunityOpenAIRECommunities().setZenodoid(zenodoId).setOpenAirecommunitylist(cci.getInverseZenodoCommunityMap().get(zenodoId).stream().collect(Collectors.toList()));
return new CommunityOpenAIRECommunities();
}
// HELPERS
private TreeMap<Integer, CommunityProject> getCommunityProjectMap(final String id) throws CommunityException, CommunityNotFoundException {
return getCommunityProjects(id).stream()
.collect(Collectors.toMap(
p -> Integer.valueOf(p.getId()),
Functions.identity(),
(p1, p2) -> {
log.warn(String.format("duplicate project found: '%s'", p1.getId()));
return p2;
},
TreeMap::new));
}
public CommunityContentprovider addCommunityContentprovider(final String id, final CommunityContentprovider cp)
throws CommunityException, CommunityNotFoundException {
log.info("content provider to add " + cp.toString());
if (!StringUtils.equalsIgnoreCase(id, cp.getCommunityId())) { throw new CommunityException("parameters 'id' and cp.communityId must be coherent"); }
private TreeMap<Integer, CommunityContentprovider> getCommunityContentproviderMap(final String id) throws CommunityException, CommunityNotFoundException {
log.info("getting community content provider map");
return getCommunityContentproviders(id).stream()
.collect(Collectors.toMap(
cp -> Integer.valueOf(cp.getId()),
Functions.identity(),
(cp1, cp2) -> {
log.warn(String.format("duplicate content provider found: '%s'", cp1.getId()));
return cp2;
},
TreeMap::new));
}
private TreeMap<Integer,CommunityZenodoCommunity> getZenodoCommunityMap(final String id) throws CommunityException, CommunityNotFoundException{
return getCommunityZenodoCommunities(id).stream()
.collect(Collectors.toMap(
cp -> Integer.valueOf(cp.getId()),
Functions.identity(),
(cp1, cp2) -> {
log.warn(String.format("duplicate Zenodo community found: '%s'", cp1.getId()));
return cp2;
},
TreeMap::new));
}
private TreeMap<Integer, CommunityOrganization> getCommunityOrganizationMap(final String id) throws CommunityException, CommunityNotFoundException {
return getCommunityOrganizations(id).stream()
.collect(Collectors.toMap(
o -> Integer.valueOf(o.getId()),
Functions.identity(),
(o1, o2) -> {
log.warn(String.format("duplicate content provider found: '%s'", o1.getId()));
return o2;
},
TreeMap::new));
}
private Map<String, Context> getContextMap() throws CommunityException {
try {
return isClient.getCommunityContextMap();
} catch (IOException e) {
throw new CommunityException(e);
}
}
public CommunityOrganization addCommunityOrganization(String id, CommunityOrganization organization) throws CommunityException, CommunityNotFoundException {
if (!StringUtils.equalsIgnoreCase(id, organization.getCommunityId())) {
throw new CommunityException("parameters 'id' and organization.communityId must be coherent");
}
final TreeMap<Integer, CommunityOrganization> cps = getCommunityOrganizationMap(id);
final String organization_id = organization.getId();
if (organization_id != null && cps.keySet().contains(Integer.valueOf(organization_id))){
if (organization.getName() != null) {
isClient.updateConceptParam(id + ORGANIZATION_ID_SUFFIX + ID_SEPARATOR + organization_id, CORGANIZATION_NAME,organization.getName());
}
if(organization.getLogo_url()!= null){
isClient.updateConceptParam(id + ORGANIZATION_ID_SUFFIX + ID_SEPARATOR + organization_id, CORGANIZATION_LOGOURL, Base64.getEncoder().encodeToString(organization.getLogo_url().getBytes()));
}
if (organization.getWebsite_url() != null){
isClient.updateConceptParam(id + ORGANIZATION_ID_SUFFIX + ID_SEPARATOR + organization_id, CORGANIZATION_WEBSITEURL,Base64.getEncoder().encodeToString(organization.getWebsite_url().getBytes()));
}
}else{
organization.setId(nextId(!cps.isEmpty() ? cps.lastKey() : 0));
isClient.addConcept(id, id + ORGANIZATION_ID_SUFFIX, CommunityMappingUtils.asOrganizationXML(id, organization));
}
cc.updateOrganization(id, organization);
return organization;
}
}
final TreeMap<Integer, CommunityContentprovider> cps = getCommunityContentproviderMap(id);
final String concept_id = cp.getId();
if (concept_id != null && cps.keySet().contains(Integer.valueOf(concept_id))) {
if (cp.getName() != null) {
isClient.updateConceptParam(id + CONTENTPROVIDERS_ID_SUFFIX + ID_SEPARATOR + concept_id, CCONTENTPROVIDER_NAME, cp.getName());
}
if (cp.getOfficialname() != null) {
isClient.updateConceptParam(id + CONTENTPROVIDERS_ID_SUFFIX + ID_SEPARATOR + concept_id, CCONTENTPROVIDER_OFFICIALNAME, cp.getOfficialname());
}
if (cp.getOpenaireId() != null) {
isClient.updateConceptParam(id + CONTENTPROVIDERS_ID_SUFFIX + ID_SEPARATOR + concept_id, OPENAIRE_ID, cp.getOpenaireId());
}
if (cp.getSelectioncriteria() != null) {
isClient.updateConceptParamNoEscape(id + CONTENTPROVIDERS_ID_SUFFIX + ID_SEPARATOR + concept_id, CCONTENTPROVIDER_SELCRITERIA, cp.toXML());
}
} else {
log.info("adding new concept for community " + id);
cp.setId(nextId(!cps.isEmpty() ? cps.lastKey() : 0));
isClient.addConcept(id, id + CONTENTPROVIDERS_ID_SUFFIX, CommunityMappingUtils.asContentProviderXML(id, cp));
}
cc.updateDatasource(id, cp);
return cp;
}
public void removeCommunityContentProvider(final String id, final Integer contentproviderId) throws CommunityException, CommunityNotFoundException {
final Map<Integer, CommunityContentprovider> providers = getCommunityContentproviderMap(id);
if (!providers.containsKey(contentproviderId)) {
throw new CommunityNotFoundException(String.format("content provider '%s' doesn't exist within context '%s'", contentproviderId, id));
}
isClient.removeConcept(id, id + CONTENTPROVIDERS_ID_SUFFIX, id + CONTENTPROVIDERS_ID_SUFFIX + ID_SEPARATOR + contentproviderId);
cc.removeFromCategory(id, CONTENTPROVIDERS_ID_SUFFIX, String.valueOf(contentproviderId));
}
public void removeCommunityOrganization(final String id, final Integer organizationId) throws CommunityException, CommunityNotFoundException {
final Map<Integer, CommunityOrganization> organizations = getCommunityOrganizationMap(id);
if (!organizations.containsKey(organizationId)) {
throw new CommunityNotFoundException(String.format("organization '%s' doesn't exist within context '%s'", organizationId, id));
}
isClient.removeConcept(id, id + ORGANIZATION_ID_SUFFIX, id + ORGANIZATION_ID_SUFFIX + ID_SEPARATOR + organizationId);
cc.removeFromCategory(id, ORGANIZATION_ID_SUFFIX, String.valueOf(organizationId));
}
public List<CommunityZenodoCommunity> getCommunityZenodoCommunities(final String id) throws CommunityException, CommunityNotFoundException {
return cc.getCommunityZenodoCommunities(id);
}
public List<CommunityOrganization> getCommunityOrganizations(final String id) throws CommunityException, CommunityNotFoundException {
cc.getCommunity(id);
return cc.getCommunityInfo(id, ORGANIZATION_ID_SUFFIX, c -> CommunityMappingUtils.asCommunityOrganization(id, c));
}
public CommunityDetails addCommunitySubjects(final String id, final List<String> subjects) throws CommunityException, CommunityNotFoundException {
final CommunityDetails cd = new CommunityDetails();
final Set<String> current = Sets.newHashSet(cc.getCommunity(id).getSubjects());
current.addAll(subjects);
cd.setSubjects(Lists.newArrayList(current));
setCommunity(id, CommunityWritableProperties.fromDetails(cd));
return cd;
}
public CommunityDetails removeCommunitySubjects(final String id, final List<String> subjects) throws CommunityException, CommunityNotFoundException {
final CommunityDetails cd = new CommunityDetails();
final Set<String> current = Sets.newHashSet(cc.getCommunity(id).getSubjects());
current.removeAll(subjects);
cd.setSubjects(Lists.newArrayList(current));
setCommunity(id, CommunityWritableProperties.fromDetails(cd));
return cd;
}
@CacheEvict(value = "community-cache", allEntries = true)
public void removeCommunityZenodoCommunity(final String id, final Integer zenodoCommId) throws CommunityException, CommunityNotFoundException {
final Map<Integer, CommunityZenodoCommunity> zcomms = getZenodoCommunityMap(id);
if (!zcomms.containsKey(zenodoCommId)) {
throw new CommunityNotFoundException(String.format("Zenodo community '%s' doesn't exist within context '%s'", zenodoCommId, id));
}
isClient.removeConcept(id, id + ZENODOCOMMUNITY_ID_SUFFIX, id + ZENODOCOMMUNITY_ID_SUFFIX + ID_SEPARATOR + zenodoCommId);
cc.removeFromCategory(id, ZENODOCOMMUNITY_ID_SUFFIX, String.valueOf(zenodoCommId));
}
@CacheEvict(value = "community-cache", allEntries = true)
public CommunityZenodoCommunity addCommunityZenodoCommunity(final String id, final CommunityZenodoCommunity zc)
throws CommunityException, CommunityNotFoundException {
if (!StringUtils.equalsIgnoreCase(id, zc.getCommunityId())) { throw new CommunityException("parameters 'id' and zc.communityId must be coherent"); }
if (!StringUtils.isNotBlank(zc.getZenodoid())) { throw new CommunityException("parameter zenodoid cannot be null or empty"); }
final TreeMap<Integer, CommunityZenodoCommunity> zcs = getZenodoCommunityMap(id);
for (final CommunityZenodoCommunity czc : zcs.values()) {
if (czc.getZenodoid().equals(zc.getZenodoid())) { throw new CommunityException("Zenodo community already associated to the RCD"); }
}
zc.setId(nextId(!zcs.isEmpty() ? zcs.lastKey() : 0));
isClient.addConcept(id, id + ZENODOCOMMUNITY_ID_SUFFIX, CommunityMappingUtils.asZenodoCommunityXML(id, zc));
cc.updateZenodoCommunity(id, zc);
return zc;
}
public CommunityOpenAIRECommunities getOpenAIRECommunities(final String zenodoId) throws CommunityException, CommunityNotFoundException {
if (cci.getInverseZenodoCommunityMap().containsKey(zenodoId)) {
return new CommunityOpenAIRECommunities().setZenodoid(zenodoId)
.setOpenAirecommunitylist(cci.getInverseZenodoCommunityMap().get(zenodoId).stream().collect(Collectors.toList()));
}
return new CommunityOpenAIRECommunities();
}
// HELPERS
private TreeMap<Integer, CommunityProject> getCommunityProjectMap(final String id) throws CommunityException, CommunityNotFoundException {
return getCommunityProjects(id).stream()
.collect(Collectors.toMap(p -> Integer.valueOf(p.getId()), Functions.identity(), (p1, p2) -> {
log.warn(String.format("duplicate project found: '%s'", p1.getId()));
return p2;
}, TreeMap::new));
}
private TreeMap<Integer, CommunityContentprovider> getCommunityContentproviderMap(final String id) throws CommunityException, CommunityNotFoundException {
log.info("getting community content provider map");
return getCommunityContentproviders(id).stream()
.collect(Collectors.toMap(cp -> Integer.valueOf(cp.getId()), Functions.identity(), (cp1, cp2) -> {
log.warn(String.format("duplicate content provider found: '%s'", cp1.getId()));
return cp2;
}, TreeMap::new));
}
private TreeMap<Integer, CommunityZenodoCommunity> getZenodoCommunityMap(final String id) throws CommunityException, CommunityNotFoundException {
return getCommunityZenodoCommunities(id).stream()
.collect(Collectors.toMap(cp -> Integer.valueOf(cp.getId()), Functions.identity(), (cp1, cp2) -> {
log.warn(String.format("duplicate Zenodo community found: '%s'", cp1.getId()));
return cp2;
}, TreeMap::new));
}
private TreeMap<Integer, CommunityOrganization> getCommunityOrganizationMap(final String id) throws CommunityException, CommunityNotFoundException {
return getCommunityOrganizations(id).stream()
.collect(Collectors.toMap(o -> Integer.valueOf(o.getId()), Functions.identity(), (o1, o2) -> {
log.warn(String.format("duplicate content provider found: '%s'", o1.getId()));
return o2;
}, TreeMap::new));
}
public CommunityOrganization addCommunityOrganization(final String id, final CommunityOrganization organization)
throws CommunityException, CommunityNotFoundException {
if (!StringUtils.equalsIgnoreCase(id, organization.getCommunityId())) {
throw new CommunityException("parameters 'id' and organization.communityId must be coherent");
}
final TreeMap<Integer, CommunityOrganization> cps = getCommunityOrganizationMap(id);
final String organization_id = organization.getId();
if (organization_id != null && cps.keySet().contains(Integer.valueOf(organization_id))) {
if (organization.getName() != null) {
isClient.updateConceptParam(id + ORGANIZATION_ID_SUFFIX + ID_SEPARATOR + organization_id, CORGANIZATION_NAME, organization.getName());
}
if (organization.getLogo_url() != null) {
isClient.updateConceptParam(id + ORGANIZATION_ID_SUFFIX + ID_SEPARATOR + organization_id, CORGANIZATION_LOGOURL, Base64.getEncoder()
.encodeToString(organization.getLogo_url().getBytes()));
}
if (organization.getWebsite_url() != null) {
isClient.updateConceptParam(id + ORGANIZATION_ID_SUFFIX + ID_SEPARATOR + organization_id, CORGANIZATION_WEBSITEURL, Base64.getEncoder()
.encodeToString(organization.getWebsite_url().getBytes()));
}
} else {
organization.setId(nextId(!cps.isEmpty() ? cps.lastKey() : 0));
isClient.addConcept(id, id + ORGANIZATION_ID_SUFFIX, CommunityMappingUtils.asOrganizationXML(id, organization));
}
cc.updateOrganization(id, organization);
return organization;
}
}

View File

@ -4,16 +4,13 @@ import javax.validation.constraints.NotNull;
import com.fasterxml.jackson.annotation.JsonAutoDetect;
import com.google.gson.Gson;
import eu.dnetlib.openaire.community.selectioncriteria.SelectionCriteria;
import io.swagger.annotations.ApiModelProperty;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
@JsonAutoDetect
public class CommunityContentprovider {
private static final Log log = LogFactory.getLog(CommunityContentprovider.class);
@ApiModelProperty(value = "OpenAIRE identifier for this content provider, if available", required = false)
private String openaireId;
@ -32,7 +29,7 @@ public class CommunityContentprovider {
@ApiModelProperty(value = "content provider official name", required = true)
private String officialname;
//@NotNull
// @NotNull
@ApiModelProperty(value = "content provider selection criteria", required = false)
private SelectionCriteria selectioncriteria;
@ -78,28 +75,26 @@ public class CommunityContentprovider {
public SelectionCriteria getSelectioncriteria() {
return this.selectioncriteria;
return this.selectioncriteria;
}
public void setSelectioncriteria(SelectionCriteria selectioncriteria) {
public void setSelectioncriteria(final SelectionCriteria selectioncriteria) {
this.selectioncriteria = selectioncriteria;
}
public String toString(){
return String.format("id %s, name %s, selection criteria %s" , this.id, this.name, toJson());
@Override
public String toString() {
return String.format("id %s, name %s, selection criteria %s", this.id, this.name, toJson());
}
public String toJson() {
if (selectioncriteria == null)
return "";
if (selectioncriteria == null) { return ""; }
return new Gson().toJson(selectioncriteria);
}
public String toXML() {
if (selectioncriteria == null)
return "";
return "<![CDATA["+ toJson() + "]]>";
if (selectioncriteria == null) { return ""; }
return "<![CDATA[" + toJson() + "]]>";
}
}

View File

@ -10,6 +10,11 @@ import org.springframework.web.bind.annotation.ResponseStatus;
@ResponseStatus(value = HttpStatus.INTERNAL_SERVER_ERROR)
public class CommunityException extends Exception {
/**
*
*/
private static final long serialVersionUID = -4961233580574761346L;
public CommunityException(final String message) {
super(message);
}

View File

@ -8,6 +8,11 @@ import org.springframework.web.bind.annotation.ResponseStatus;
@ResponseStatus(value = HttpStatus.NOT_FOUND)
public class CommunityNotFoundException extends Exception {
/**
*
*/
private static final long serialVersionUID = -5605421323034135778L;
public CommunityNotFoundException(final String msg) {
super(msg);
}

View File

@ -1,9 +1,9 @@
package eu.dnetlib.openaire.community;
import java.util.Date;
import java.util.List;
import com.fasterxml.jackson.annotation.JsonAutoDetect;
import io.swagger.annotations.ApiModelProperty;
@JsonAutoDetect
@ -42,21 +42,20 @@ public class CommunitySummary {
@ApiModelProperty("Zenodo community associated to this community")
protected String zenodoCommunity;
public CommunitySummary() {
}
public CommunitySummary() {}
public CommunitySummary(
final String id,
final String queryId,
final String type,
final String name,
final String shortName,
final Date creationDate,
final Date lastUpdateDate,
final String description,
final String logoUrl,
final CommunityStatus status,
final String zenodoCommunity) {
final String id,
final String queryId,
final String type,
final String name,
final String shortName,
final Date creationDate,
final Date lastUpdateDate,
final String description,
final String logoUrl,
final CommunityStatus status,
final String zenodoCommunity) {
this.id = id;
this.queryId = queryId;
this.type = type;
@ -72,16 +71,16 @@ public class CommunitySummary {
public CommunitySummary(final CommunitySummary summary) {
this(summary.getId(),
summary.getQueryId(),
summary.getType(),
summary.getName(),
summary.getShortName(),
summary.getCreationDate(),
summary.getLastUpdateDate(),
summary.getDescription(),
summary.getLogoUrl(),
summary.getStatus(),
summary.getZenodoCommunity());
summary.getQueryId(),
summary.getType(),
summary.getName(),
summary.getShortName(),
summary.getCreationDate(),
summary.getLastUpdateDate(),
summary.getDescription(),
summary.getLogoUrl(),
summary.getStatus(),
summary.getZenodoCommunity());
}
public String getId() {
@ -124,13 +123,21 @@ public class CommunitySummary {
this.shortName = shortName;
}
public Date getCreationDate() { return creationDate; }
public Date getCreationDate() {
return creationDate;
}
public void setCreationDate(final Date creationDate) { this.creationDate = creationDate; }
public void setCreationDate(final Date creationDate) {
this.creationDate = creationDate;
}
public Date getLastUpdateDate() { return lastUpdateDate; }
public Date getLastUpdateDate() {
return lastUpdateDate;
}
public void setLastUpdateDate(final Date lastUpdateDate) { this.lastUpdateDate = lastUpdateDate; }
public void setLastUpdateDate(final Date lastUpdateDate) {
this.lastUpdateDate = lastUpdateDate;
}
public String getDescription() {
return description;
@ -160,7 +167,7 @@ public class CommunitySummary {
return zenodoCommunity;
}
public void setZenodoCommunity(String zenodoCommunity) {
public void setZenodoCommunity(final String zenodoCommunity) {
this.zenodoCommunity = zenodoCommunity;
}

View File

@ -1,44 +1,45 @@
package eu.dnetlib.openaire.community.selectioncriteria;
import com.fasterxml.jackson.annotation.JsonAutoDetect;
import java.io.Serializable;
import com.fasterxml.jackson.annotation.JsonAutoDetect;
@JsonAutoDetect
public class Constraint implements Serializable {
private String verb;
private String field;
private String value;
/**
*
*/
private static final long serialVersionUID = -5996232267609464747L;
public Constraint() {
}
private String verb;
private String field;
private String value;
public String getVerb() {
return verb;
}
public Constraint() {}
public void setVerb(String verb) {
this.verb = verb;
}
public String getVerb() {
return verb;
}
public String getField() {
return field;
}
public void setVerb(final String verb) {
this.verb = verb;
}
public void setField(String field) {
this.field = field;
}
public String getField() {
return field;
}
public String getValue() {
return value;
}
public void setValue(String value) {
this.value = value;
}
public void setField(final String field) {
this.field = field;
}
public String getValue() {
return value;
}
public void setValue(final String value) {
this.value = value;
}
}

View File

@ -1,23 +1,27 @@
package eu.dnetlib.openaire.community.selectioncriteria;
import com.fasterxml.jackson.annotation.JsonAutoDetect;
import java.io.Serializable;
import java.util.List;
import com.fasterxml.jackson.annotation.JsonAutoDetect;
@JsonAutoDetect
public class Constraints implements Serializable {
private List<Constraint> constraint;
/**
*
*/
private static final long serialVersionUID = 2694950017620361195L;
private List<Constraint> constraint;
public Constraints() {
}
public List<Constraint> getConstraint() {
return constraint;
}
public Constraints() {}
public void setConstraint(List<Constraint> constraint) {
this.constraint = constraint;
}
public List<Constraint> getConstraint() {
return constraint;
}
public void setConstraint(final List<Constraint> constraint) {
this.constraint = constraint;
}
}

View File

@ -1,30 +1,33 @@
package eu.dnetlib.openaire.community.selectioncriteria;
import com.fasterxml.jackson.annotation.JsonAutoDetect;
import com.google.gson.Gson;
import java.io.Serializable;
import java.util.List;
import com.fasterxml.jackson.annotation.JsonAutoDetect;
import com.google.gson.Gson;
@JsonAutoDetect
public class SelectionCriteria implements Serializable {
private List<Constraints> criteria;
public SelectionCriteria() {
}
/**
*
*/
private static final long serialVersionUID = 4303936216579280542L;
private List<Constraints> criteria;
public List<Constraints> getCriteria() {
return criteria;
}
public SelectionCriteria() {}
public void setCriteria(List<Constraints> criteria) {
this.criteria = criteria;
}
public List<Constraints> getCriteria() {
return criteria;
}
public void setCriteria(final List<Constraints> criteria) {
this.criteria = criteria;
}
public static SelectionCriteria fromJson(final String json) {
return new Gson().fromJson(json, SelectionCriteria.class);
public static SelectionCriteria fromJson(final String json) {
return new Gson().fromJson(json, SelectionCriteria.class);
}
}
}

View File

@ -6,102 +6,96 @@ import java.util.Map;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import com.google.common.base.Splitter;
import com.google.common.collect.Lists;
import eu.dnetlib.openaire.common.ISClient;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.stereotype.Component;
import com.google.common.base.Splitter;
import com.google.common.collect.Lists;
import eu.dnetlib.openaire.common.ISClient;
@Component
@ConditionalOnProperty(value = "openaire.exporter.enable.context", havingValue = "true")
public class ContextApiCore {
private static final Log log = LogFactory.getLog(ContextApiCore.class);
private static final String SEPARATOR = "::";
@Autowired
private ISClient isClient;
public List<ContextSummary> listContexts(final List<String> type) throws ContextException {
return getContextMap(type).values().stream()
.map(c -> new ContextSummary()
.setId(c.getId())
.setType(c.getType())
.setLabel(c.getLabel())
.setStatus(c.getParams().containsKey("status") ? c.getParams().get("status").get(0).getValue() : ""))
.collect(Collectors.toList());
return getContextMap(type).values()
.stream()
.map(c -> new ContextSummary()
.setId(c.getId())
.setType(c.getType())
.setLabel(c.getLabel())
.setStatus(c.getParams().containsKey("status") ? c.getParams().get("status").get(0).getValue() : ""))
.collect(Collectors.toList());
}
public List<CategorySummary> listCategories(final String contextId, Boolean all) throws ContextException {
public List<CategorySummary> listCategories(final String contextId, final Boolean all) throws ContextException {
final Stream<Category> categories = getContextMap().get(contextId).getCategories().values().stream();
return all ? asCategorySummaries(categories) : asCategorySummaries(categories.filter(Category::isClaim));
}
private List<CategorySummary> asCategorySummaries(Stream<Category> categories) {
private List<CategorySummary> asCategorySummaries(final Stream<Category> categories) {
return categories
.map(c -> new CategorySummary()
.setId(c.getId())
.setLabel(c.getLabel())
.setHasConcept(c.hasConcepts()))
.collect(Collectors.toList());
.map(c -> new CategorySummary()
.setId(c.getId())
.setLabel(c.getLabel())
.setHasConcept(c.hasConcepts()))
.collect(Collectors.toList());
}
public List<ConceptSummary> listConcepts(final String categoryId, Boolean all) throws ContextException {
public List<ConceptSummary> listConcepts(final String categoryId, final Boolean all) throws ContextException {
final String contextId = StringUtils.substringBefore(categoryId, SEPARATOR);
final Stream<Concept> concepts = getContextMap().get(contextId)
.getCategories()
.get(categoryId)
.getConcepts()
.stream();
.getCategories()
.get(categoryId)
.getConcepts()
.stream();
return all ? asConceptSummaries(concepts) : asConceptSummaries(concepts.filter(Concept::isClaim));
}
private List<ConceptSummary> asConceptSummaries(Stream<Concept> concepts) {
private List<ConceptSummary> asConceptSummaries(final Stream<Concept> concepts) {
return concepts
.map(c -> new ConceptSummary()
.setId(c.getId())
.setLabel(c.getLabel())
.setHasSubConcept(c.hasSubConcepts()))
.collect(Collectors.toList());
.map(c -> new ConceptSummary()
.setId(c.getId())
.setLabel(c.getLabel())
.setHasSubConcept(c.hasSubConcepts()))
.collect(Collectors.toList());
}
public List<ConceptSummary> listSubConcepts(final String conceptId, Boolean all) throws ContextException {
public List<ConceptSummary> listSubConcepts(final String conceptId, final Boolean all) throws ContextException {
final List<String> ids = Splitter.on(SEPARATOR).splitToList(conceptId);
if (ids.size() < 3) {
throw new ContextException("");
}
if (ids.size() < 3) { throw new ContextException(""); }
final String contextId = ids.get(0);
final String categoryId = contextId + SEPARATOR + ids.get(1);
final Stream<Concept> concepts = getContextMap().get(contextId)
.getCategories()
.get(categoryId)
.getConcepts()
.stream()
.filter(c -> conceptId.equals(c.getId()));
.getCategories()
.get(categoryId)
.getConcepts()
.stream()
.filter(c -> conceptId.equals(c.getId()));
return all ?
mapConcepts(concepts.filter(Concept::isClaim).collect(Collectors.toList())) :
mapConcepts(concepts.collect(Collectors.toList()));
return all ? mapConcepts(concepts.filter(Concept::isClaim).collect(Collectors.toList())) : mapConcepts(concepts.collect(Collectors.toList()));
}
private List<ConceptSummary> mapConcepts(final List<Concept> concepts) {
if (concepts == null || concepts.isEmpty()) {
return null;
}
if (concepts == null || concepts.isEmpty()) { return null; }
return concepts.stream()
.map(c -> new ConceptSummary()
.setId(c.getId())
.setLabel(c.getLabel())
.setHasSubConcept(c.hasSubConcepts())
.setConcept(mapConcepts(c.getConcepts())))
.collect(Collectors.toList());
.map(c -> new ConceptSummary()
.setId(c.getId())
.setLabel(c.getLabel())
.setHasSubConcept(c.hasSubConcepts())
.setConcept(mapConcepts(c.getConcepts())))
.collect(Collectors.toList());
}
private Map<String, Context> getContextMap() throws ContextException {
@ -111,10 +105,9 @@ public class ContextApiCore {
private Map<String, Context> getContextMap(final List<String> type) throws ContextException {
try {
return isClient.getContextMap(type);
} catch (IOException e) {
} catch (final IOException e) {
throw new ContextException(e);
}
}
}

View File

@ -10,6 +10,11 @@ import org.springframework.web.bind.annotation.ResponseStatus;
@ResponseStatus(value = HttpStatus.INTERNAL_SERVER_ERROR)
public class ContextException extends Exception {
/**
*
*/
private static final long serialVersionUID = -5489369676370127052L;
public ContextException(final String message) {
super(message);
}

View File

@ -8,6 +8,11 @@ import org.springframework.web.bind.annotation.ResponseStatus;
@ResponseStatus(value = HttpStatus.NOT_FOUND)
public class ContextNotFoundException extends Exception {
/**
*
*/
private static final long serialVersionUID = -2026506752817353752L;
public ContextNotFoundException(final String msg) {
super(msg);
}

View File

@ -162,9 +162,9 @@ public class DsmApiController extends AbstractExporterController {
@ApiResponse(code = 200, message = "OK", response = DatasourceResponse.class),
@ApiResponse(code = 500, message = "unexpected error", response = ErrorMessage.class)
})
public SimpleResponse recentRegistered(@PathVariable final int size) throws Throwable {
public SimpleResponse<?> recentRegistered(@PathVariable final int size) throws Throwable {
final StopWatch stop = StopWatch.createStarted();
final SimpleResponse rsp = dsmCore.searchRecentRegistered(size);
final SimpleResponse<?> rsp = dsmCore.searchRecentRegistered(size);
return prepareResponse(1, size, stop, rsp);
}

View File

@ -9,18 +9,11 @@ import java.nio.charset.Charset;
import java.util.Collection;
import java.util.HashSet;
import java.util.List;
import java.util.Queue;
import java.util.Set;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ScheduledThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.function.Function;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import javax.annotation.PostConstruct;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.exception.ExceptionUtils;
@ -33,37 +26,23 @@ import org.springframework.jdbc.core.BeanPropertyRowMapper;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.stereotype.Component;
import com.google.common.util.concurrent.FutureCallback;
import com.google.common.util.concurrent.Futures;
import com.google.common.util.concurrent.ListeningExecutorService;
import com.google.common.util.concurrent.MoreExecutors;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
import eu.dnetlib.OpenaireExporterConfig;
import eu.dnetlib.enabling.datasources.common.AggregationInfo;
import eu.dnetlib.enabling.datasources.common.AggregationStage;
import eu.dnetlib.enabling.datasources.common.Datasource;
import eu.dnetlib.enabling.datasources.common.DsmException;
import eu.dnetlib.enabling.datasources.common.DsmForbiddenException;
import eu.dnetlib.enabling.datasources.common.DsmNotFoundException;
import eu.dnetlib.openaire.common.ISClient;
import eu.dnetlib.openaire.community.CommunityClient;
import eu.dnetlib.openaire.dsm.dao.DatasourceDao;
import eu.dnetlib.openaire.dsm.dao.DatasourceIndexClient;
import eu.dnetlib.openaire.dsm.dao.MongoLoggerClient;
import eu.dnetlib.openaire.dsm.dao.ObjectStoreClient;
import eu.dnetlib.openaire.dsm.dao.ResponseUtils;
import eu.dnetlib.openaire.dsm.dao.VocabularyClient;
import eu.dnetlib.openaire.dsm.dao.utils.DsmMappingUtils;
import eu.dnetlib.openaire.dsm.dao.utils.IndexDsInfo;
import eu.dnetlib.openaire.dsm.dao.utils.IndexRecordsInfo;
import eu.dnetlib.openaire.dsm.domain.AggregationHistoryResponse;
import eu.dnetlib.openaire.dsm.domain.ApiDetails;
import eu.dnetlib.openaire.dsm.domain.ApiDetailsResponse;
import eu.dnetlib.openaire.dsm.domain.DatasourceDetailResponse;
import eu.dnetlib.openaire.dsm.domain.DatasourceDetails;
import eu.dnetlib.openaire.dsm.domain.DatasourceDetailsUpdate;
import eu.dnetlib.openaire.dsm.domain.DatasourceInfo;
import eu.dnetlib.openaire.dsm.domain.DatasourceSnippetResponse;
import eu.dnetlib.openaire.dsm.domain.RegisteredDatasourceInfo;
import eu.dnetlib.openaire.dsm.domain.RequestFilter;
@ -87,20 +66,11 @@ public class DsmCore {
@Autowired
private ISClient isClient;
@Autowired
private ObjectStoreClient objectStoreClient;
@Autowired
private DatasourceIndexClient datasourceIndexClient;
@Autowired
private VocabularyClient vocabularyClient;
@Autowired
private DatasourceDao dsDao;
@Autowired
private OpenaireExporterConfig config;
private DatasourceDao<DatasourceDbEntry, ApiDbEntry> dsDao;
@Autowired
private JdbcTemplate jdbcTemplate;
@ -108,14 +78,6 @@ public class DsmCore {
@Autowired
private CommunityClient communityClient;
private ListeningExecutorService executor;
@PostConstruct
public void init() {
executor = MoreExecutors.listeningDecorator(new ScheduledThreadPoolExecutor(config.getRequestWorkers(),
new ThreadFactoryBuilder().setNameFormat("dsm-client-%d").build()));
}
public List<Country> listCountries() throws DsmException {
try {
return dsDao.listCountries();
@ -182,7 +144,7 @@ public class DsmCore {
public ApiDetailsResponse getApis(final String dsId) throws DsmException {
try {
final List<ApiDbEntry> apis = dsDao.getApis(dsId);
final List<? extends ApiDbEntry> apis = dsDao.getApis(dsId);
final List<ApiDetails> api = apis.stream()
.map(DsmMappingUtils::asDetails)
.collect(Collectors.toList());
@ -218,25 +180,24 @@ public class DsmCore {
public void updateDatasource(final DatasourceDetailsUpdate d) throws DsmException, DsmNotFoundException {
try {
// initialize with current values from DB
final Datasource ds = dsDao.getDs(d.getId());
final DatasourceDbEntry dbEntry = (DatasourceDbEntry) ds;
final DatasourceDbEntry ds = dsDao.getDs(d.getId());
if (dbEntry == null) { throw new DsmNotFoundException(String.format("ds '%s' does not exist", d.getId())); }
if (ds == null) { throw new DsmNotFoundException(String.format("ds '%s' does not exist", d.getId())); }
final DatasourceDbEntry update = asDbEntry(d);
if (d.getIdentities() != null) {
final Set<IdentityDbEntry> identities = new HashSet<>(
Stream.of(update.getIdentities(), dbEntry.getIdentities())
Stream.of(update.getIdentities(), ds.getIdentities())
.flatMap(Collection::stream)
.collect(Collectors.toMap(i -> i.getIssuertype() + i.getPid(), Function.identity(), (i1, i2) -> i1))
.values());
copyNonNullProperties(update, dbEntry);
dbEntry.setIdentities(identities);
copyNonNullProperties(update, ds);
ds.setIdentities(identities);
} else {
copyNonNullProperties(update, dbEntry);
copyNonNullProperties(update, ds);
}
dsDao.saveDs(dbEntry);
dsDao.saveDs(ds);
} catch (final Throwable e) {
log.error(ExceptionUtils.getStackTrace(e));
throw e;
@ -287,112 +248,7 @@ public class DsmCore {
// HELPERS //////////////
private DatasourceInfo enrichDatasourceInfo(final DatasourceDetails d, final CountDownLatch outerLatch, final Queue<Throwable> errors) {
final DatasourceInfo dsInfo = new DatasourceInfo().setDatasource(d);
getAggregationHistory(d.getId(), outerLatch, errors, dsInfo);
getIndexDsInfo(d.getId(), outerLatch, errors, dsInfo);
return dsInfo;
}
private void getAggregationHistory(final String dsId,
final CountDownLatch outerLatch,
final Queue<Throwable> errors,
final DatasourceInfo datasourceInfo) {
Futures.addCallback(executor.submit(() -> mongoLoggerClient.getAggregationHistory(dsId)), new FutureCallback<List<AggregationInfo>>() {
@Override
public void onSuccess(final List<AggregationInfo> info) {
setAggregationHistory(datasourceInfo, info);
outerLatch.countDown();
}
@Override
public void onFailure(final Throwable e) {
log.error(ExceptionUtils.getStackTrace(e));
errors.offer(e);
outerLatch.countDown();
}
}, executor);
}
private void setAggregationHistory(final DatasourceInfo datasourceInfo, final List<AggregationInfo> info) {
datasourceInfo.setAggregationHistory(info);
if (!info.isEmpty()) {
datasourceInfo
.setLastCollection(info.stream().filter(a -> AggregationStage.COLLECT.equals(a.getAggregationStage())).findFirst().get())
.setLastTransformation(info.stream().filter(a -> AggregationStage.TRANSFORM.equals(a.getAggregationStage())).findFirst().get());
}
}
private void getIndexDsInfo(final String dsId,
final CountDownLatch outerLatch,
final Queue<Throwable> errors,
final DatasourceInfo datasourceInfo) {
Futures.addCallback(executor.submit(() -> isClient.calculateCurrentIndexDsInfo()), new FutureCallback<IndexDsInfo>() {
@Override
public void onSuccess(final IndexDsInfo info) {
final CountDownLatch innerLatch = new CountDownLatch(2);
Futures.addCallback(executor.submit(() -> datasourceIndexClient.getIndexInfo(dsId, info, errors)), new FutureCallback<IndexRecordsInfo>() {
@Override
public void onSuccess(final IndexRecordsInfo info) {
datasourceInfo
.setIndexRecords(info.getTotal())
.setFundedContent(info.getFunded())
.setLastIndexingDate(info.getDate());
innerLatch.countDown();
}
@Override
public void onFailure(final Throwable e) {
errors.offer(e);
innerLatch.countDown();
}
}, executor);
Futures.addCallback(executor.submit(() -> objectStoreClient.getObjectStoreSize(isClient.getObjectStoreId(dsId))), new FutureCallback<Long>() {
@Override
public void onSuccess(final Long objectStoreSize) {
datasourceInfo.setFulltexts(objectStoreSize);
innerLatch.countDown();
}
@Override
public void onFailure(final Throwable e) {
errors.offer(e);
innerLatch.countDown();
}
}, executor);
waitLatch(innerLatch, errors, config.getRequestTimeout());
outerLatch.countDown();
}
@Override
public void onFailure(final Throwable e) {
// log.error(ExceptionUtils.getStackTrace(e));
errors.offer(e);
outerLatch.countDown();
}
}, executor);
}
private void waitLatch(final CountDownLatch latch, final Queue<Throwable> errors, final int waitSeconds) {
try {
if (!latch.await(waitSeconds, TimeUnit.SECONDS)) {
errors.offer(new TimeoutException("Waiting for requests to complete has timed out."));
}
} catch (final InterruptedException e) {
errors.offer(e);
}
}
public SimpleResponse searchRecentRegistered(final int size) throws Throwable {
public SimpleResponse<?> searchRecentRegistered(final int size) throws Throwable {
try {
final String sql =
IOUtils.toString(getClass().getResourceAsStream("/eu/dnetlib/openaire/sql/recent_registered_datasources.sql.st"), Charset.defaultCharset());
@ -413,17 +269,13 @@ public class DsmCore {
IOUtils.toString(getClass().getResourceAsStream("/eu/dnetlib/openaire/sql/recent_registered_datasources_fromDate_typology.st.sql"), Charset
.defaultCharset());
return jdbcTemplate.queryForObject(sql, new Object[] {
fromDate, typologyFilter + "%"
}, Long.class);
return jdbcTemplate.queryForObject(sql, Long.class, fromDate, typologyFilter + "%");
} else {
final String sql =
IOUtils.toString(getClass().getResourceAsStream("/eu/dnetlib/openaire/sql/recent_registered_datasources_fromDate.st.sql"), Charset
.defaultCharset());
return jdbcTemplate.queryForObject(sql, new Object[] {
fromDate
}, Long.class);
return jdbcTemplate.queryForObject(sql, Long.class, fromDate);
}
} catch (final Throwable e) {

View File

@ -1,19 +1,20 @@
package eu.dnetlib.openaire.dsm.dao;
import com.google.common.collect.Lists;
import eu.dnetlib.OpenaireExporterConfig;
import eu.dnetlib.enabling.datasources.common.DsmException;
import eu.dnetlib.enabling.datasources.common.DsmForbiddenException;
import eu.dnetlib.enabling.datasources.common.DsmNotFoundException;
import eu.dnetlib.openaire.dsm.domain.RequestFilter;
import eu.dnetlib.openaire.dsm.domain.RequestSort;
import eu.dnetlib.openaire.dsm.domain.RequestSortOrder;
import eu.dnetlib.openaire.dsm.domain.db.ApiDbEntry;
import eu.dnetlib.openaire.dsm.domain.db.ApiParamDbEntry;
import eu.dnetlib.openaire.dsm.domain.db.DatasourceApiDbEntry;
import eu.dnetlib.openaire.dsm.domain.db.DatasourceDbEntry;
import eu.dnetlib.openaire.vocabularies.Country;
import eu.dnetlib.openaire.vocabularies.Vocabulary;
import static eu.dnetlib.openaire.common.ExporterConstants.OAI;
import static eu.dnetlib.openaire.common.ExporterConstants.SET;
import static eu.dnetlib.openaire.dsm.dao.DatasourceSpecs.apiSpec;
import static eu.dnetlib.openaire.dsm.dao.DatasourceSpecs.dsRegisteredbyNotNullSpec;
import static eu.dnetlib.openaire.dsm.dao.DatasourceSpecs.dsSpec;
import java.sql.Date;
import java.util.HashSet;
import java.util.List;
import java.util.Objects;
import java.util.Set;
import java.util.stream.Collectors;
import javax.persistence.EntityNotFoundException;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
@ -26,20 +27,21 @@ import org.springframework.data.jpa.domain.Specification;
import org.springframework.stereotype.Component;
import org.springframework.transaction.annotation.Transactional;
import javax.persistence.EntityNotFoundException;
import java.sql.Date;
import java.util.HashSet;
import java.util.List;
import java.util.Objects;
import java.util.Set;
import java.util.stream.Collectors;
import static eu.dnetlib.openaire.common.ExporterConstants.OAI;
import static eu.dnetlib.openaire.common.ExporterConstants.SET;
import static eu.dnetlib.openaire.dsm.dao.DatasourceSpecs.apiSpec;
import static eu.dnetlib.openaire.dsm.dao.DatasourceSpecs.dsSpec;
import static eu.dnetlib.openaire.dsm.dao.DatasourceSpecs.dsRegisteredbyNotNullSpec;
import com.google.common.collect.Lists;
import eu.dnetlib.DnetOpenaireExporterProperties;
import eu.dnetlib.enabling.datasources.common.DsmException;
import eu.dnetlib.enabling.datasources.common.DsmForbiddenException;
import eu.dnetlib.enabling.datasources.common.DsmNotFoundException;
import eu.dnetlib.openaire.dsm.domain.RequestFilter;
import eu.dnetlib.openaire.dsm.domain.RequestSort;
import eu.dnetlib.openaire.dsm.domain.RequestSortOrder;
import eu.dnetlib.openaire.dsm.domain.db.ApiDbEntry;
import eu.dnetlib.openaire.dsm.domain.db.ApiParamDbEntry;
import eu.dnetlib.openaire.dsm.domain.db.DatasourceApiDbEntry;
import eu.dnetlib.openaire.dsm.domain.db.DatasourceDbEntry;
import eu.dnetlib.openaire.vocabularies.Country;
import eu.dnetlib.openaire.vocabularies.Vocabulary;
/**
* Created by claudio on 20/10/2016.
@ -51,7 +53,7 @@ public class DatasourceDaoImpl implements DatasourceDao<DatasourceDbEntry, ApiDb
private static final Log log = LogFactory.getLog(DatasourceDao.class);
@Autowired
private OpenaireExporterConfig config;
private DnetOpenaireExporterProperties config;
@Autowired
private CountryTermRepository countryTermRepository;
@ -72,25 +74,33 @@ public class DatasourceDaoImpl implements DatasourceDao<DatasourceDbEntry, ApiDb
public List<Country> listCountries() throws DsmException {
final List<Country> countries = Lists.newArrayList();
final Vocabulary v = vocabularyClient.getCountries();
countries.addAll(countryTermRepository.findAll().stream()
.filter(Objects::nonNull)
.map(t -> new Country(t.getTerm(), v.getEnglishName(t.getTerm())))
.collect(Collectors.toList()));
countries.addAll(countryTermRepository.findAll()
.stream()
.filter(Objects::nonNull)
.map(t -> new Country(t.getTerm(), v.getEnglishName(t.getTerm())))
.collect(Collectors.toList()));
return countries;
}
@Override
public Page<DatasourceDbEntry> search(final RequestSort requestSortBy, RequestSortOrder order, RequestFilter requestFilter, final int page, final int size)
throws DsmException {
public Page<DatasourceDbEntry> search(final RequestSort requestSortBy,
final RequestSortOrder order,
final RequestFilter requestFilter,
final int page,
final int size)
throws DsmException {
final Specification<DatasourceDbEntry> spec = dsSpec(requestSortBy, order, requestFilter);
return dsRepository.findAll(spec, PageRequest.of(page, size));
}
@Override
public Page<DatasourceDbEntry> searchRegistered(final RequestSort requestSortBy, RequestSortOrder order, RequestFilter requestFilter, final int page, final int size)
throws DsmException {
public Page<DatasourceDbEntry> searchRegistered(final RequestSort requestSortBy,
final RequestSortOrder order,
final RequestFilter requestFilter,
final int page,
final int size)
throws DsmException {
final Specification<DatasourceDbEntry> spec = dsSpec(requestSortBy, order, requestFilter).and(dsRegisteredbyNotNullSpec());
return dsRepository.findAll(spec, PageRequest.of(page, size));
@ -98,7 +108,7 @@ public class DatasourceDaoImpl implements DatasourceDao<DatasourceDbEntry, ApiDb
@Override
public DatasourceDbEntry getDs(final String dsId) throws DsmException {
return dsRepository.getOne(dsId);
return dsRepository.findById(dsId).orElseThrow(() -> new DsmException("Datasource not found. ID: " + dsId));
}
@Override
@ -114,7 +124,7 @@ public class DatasourceDaoImpl implements DatasourceDao<DatasourceDbEntry, ApiDb
}
@Override
public void updateCompliance(String dsId, String apiId, String compliance, boolean override) {
public void updateCompliance(final String dsId, final String apiId, final String compliance, final boolean override) {
log.info(String.format("setting compatibility = '%s' for ds '%s'", compliance, apiId));
apiRepository.updateCompatibility(apiId, compliance);
}
@ -126,15 +136,13 @@ public class DatasourceDaoImpl implements DatasourceDao<DatasourceDbEntry, ApiDb
@Override
public void deleteApi(final String dsId, final String apiId) throws DsmForbiddenException, DsmNotFoundException {
final ApiDbEntry api = apiRepository.getOne(apiId);
final ApiDbEntry api = apiRepository.findById(apiId).orElseThrow(() -> new DsmNotFoundException("Api not found. ID: " + apiId));
try {
if (!api.getRemovable()) {
throw new DsmForbiddenException(HttpStatus.SC_UNAUTHORIZED, "api is not removable");
}
if (!api.getRemovable()) { throw new DsmForbiddenException(HttpStatus.SC_UNAUTHORIZED, "api is not removable"); }
apiRepository.deleteById(apiId);
log.info(String.format("deleted api '%s'", apiId));
} catch (EntityNotFoundException e) {
} catch (final EntityNotFoundException e) {
throw new DsmNotFoundException(HttpStatus.SC_NOT_FOUND, "api not found");
}
}
@ -167,7 +175,7 @@ public class DatasourceDaoImpl implements DatasourceDao<DatasourceDbEntry, ApiDb
@Override
public void updateName(final String dsId, final String officialname, final String englishname) {
//TODO what if one of the two names is null or empty?
// TODO what if one of the two names is null or empty?
dsRepository.setDatasourcename(dsId, officialname, englishname);
}
@ -189,7 +197,7 @@ public class DatasourceDaoImpl implements DatasourceDao<DatasourceDbEntry, ApiDb
@Override
@Transactional
public boolean upsertApiOaiSet(final String apiId, final String oaiSet) throws DsmException {
final ApiDbEntry api = apiRepository.getOne(apiId);
final ApiDbEntry api = apiRepository.findById(apiId).orElseThrow(() -> new DsmNotFoundException("Api not found. ID: " + apiId));
if (OAI.equalsIgnoreCase(api.getProtocol())) {
final Set<ApiParamDbEntry> apiParams = api.getApiParams();
@ -211,10 +219,12 @@ public class DatasourceDaoImpl implements DatasourceDao<DatasourceDbEntry, ApiDb
public List<String> findApiBaseURLs(final RequestFilter requestFilter, final int page, final int size) throws DsmException {
final PageRequest pageable = PageRequest.of(page, size);
final Specification<DatasourceApiDbEntry> spec = apiSpec(requestFilter);
final Set<String> set = dsApiRepository.findAll(spec, pageable).getContent().stream()
.map(DatasourceApiDbEntry::getBaseurl)
.filter(StringUtils::isNotBlank)
.collect(Collectors.toCollection(HashSet::new));
final Set<String> set = dsApiRepository.findAll(spec, pageable)
.getContent()
.stream()
.map(DatasourceApiDbEntry::getBaseurl)
.filter(StringUtils::isNotBlank)
.collect(Collectors.toCollection(HashSet::new));
return Lists.newArrayList(set);
}
@ -228,11 +238,9 @@ public class DatasourceDaoImpl implements DatasourceDao<DatasourceDbEntry, ApiDb
final Vocabulary typologies = vocabularyClient.getDatasourceTypologies();
if (!typologies.hasCode(typology)) {
throw new DsmException(
HttpStatus.SC_BAD_REQUEST,
String.format(
"invalid datasource typology '%s', provide one according to vocabulary %s",
typology,
config.getVocabularies().getDatasourceTypologiesEndpoint()));
HttpStatus.SC_BAD_REQUEST,
String.format("invalid datasource typology '%s', provide one according to vocabulary %s", typology, config.getVocabularies()
.getDatasourceTypologiesEndpoint()));
}
dsRepository.setTypology(dsId, typology);
}
@ -251,8 +259,8 @@ public class DatasourceDaoImpl implements DatasourceDao<DatasourceDbEntry, ApiDb
dsRepository.setPlatform(dsId, platform);
}
//HELPER
private void ensureRegistrationDate(String dsId) {
// HELPER
private void ensureRegistrationDate(final String dsId) {
if (!dsRepository.hasRegistrationdate(dsId)) {
log.info("setting registration date for datasource: " + dsId);
dsRepository.setRegistrationDate(dsId, new Date(System.currentTimeMillis()));

View File

@ -1,8 +1,9 @@
package eu.dnetlib.openaire.dsm.dao;
import java.sql.Date;
import javax.transaction.Transactional;
import eu.dnetlib.openaire.dsm.domain.db.DatasourceDbEntry;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.JpaSpecificationExecutor;
@ -10,7 +11,7 @@ import org.springframework.data.jpa.repository.Modifying;
import org.springframework.data.jpa.repository.Query;
import org.springframework.stereotype.Repository;
import java.sql.Date;
import eu.dnetlib.openaire.dsm.domain.db.DatasourceDbEntry;
/**
* Created by claudio on 12/04/2017.
@ -19,8 +20,6 @@ import java.sql.Date;
@ConditionalOnProperty(value = "openaire.exporter.enable.dsm", havingValue = "true")
public interface DatasourceDbEntryRepository extends JpaRepository<DatasourceDbEntry, String>, JpaSpecificationExecutor<DatasourceDbEntry> {
DatasourceDbEntry findOneById(String id);
@Query("select d.managed from #{#entityName} d where d.id = ?1")
boolean isManaged(String id);

View File

@ -12,7 +12,7 @@ import javax.annotation.PreDestroy;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import com.google.common.util.concurrent.*;
import eu.dnetlib.OpenaireExporterConfig;
import eu.dnetlib.DnetOpenaireExporterProperties;
import eu.dnetlib.enabling.datasources.common.DsmException;
import eu.dnetlib.miscutils.functional.hash.Hashing;
import eu.dnetlib.openaire.dsm.dao.utils.DsmMappingUtils;
@ -46,7 +46,7 @@ public class DatasourceIndexClientImpl implements DatasourceIndexClient {
public static final String DSVERSION = "__dsversion";
@Autowired
private OpenaireExporterConfig config;
private DnetOpenaireExporterProperties config;
private ListeningExecutorService executor;

View File

@ -16,8 +16,8 @@ import com.mongodb.BasicDBObject;
import com.mongodb.MongoClient;
import com.mongodb.client.FindIterable;
import com.mongodb.client.MongoCollection;
import eu.dnetlib.OpenaireExporterConfig;
import eu.dnetlib.OpenaireExporterConfig.Datasource;
import eu.dnetlib.DnetOpenaireExporterProperties;
import eu.dnetlib.DnetOpenaireExporterProperties.Datasource;
import eu.dnetlib.enabling.datasources.common.AggregationInfo;
import eu.dnetlib.enabling.datasources.common.AggregationStage;
import eu.dnetlib.enabling.datasources.common.DsmException;
@ -57,7 +57,7 @@ public class MongoLoggerClientImpl implements MongoLoggerClient {
private MongoClient datasourcePublisherMongoClient;
@Autowired
private OpenaireExporterConfig config;
private DnetOpenaireExporterProperties config;
private final static String LOADTIME = "loadtime";
private final LoadingCache<String, Instant> loadingCache = CacheBuilder.newBuilder()

View File

@ -5,7 +5,16 @@ import java.util.Queue;
import com.google.common.collect.Lists;
import eu.dnetlib.openaire.dsm.domain.*;
import eu.dnetlib.openaire.dsm.domain.ApiDetails;
import eu.dnetlib.openaire.dsm.domain.ApiDetailsResponse;
import eu.dnetlib.openaire.dsm.domain.DatasourceDetailResponse;
import eu.dnetlib.openaire.dsm.domain.DatasourceDetails;
import eu.dnetlib.openaire.dsm.domain.DatasourceInfo;
import eu.dnetlib.openaire.dsm.domain.DatasourceSearchResponse;
import eu.dnetlib.openaire.dsm.domain.DatasourceSnippetExtended;
import eu.dnetlib.openaire.dsm.domain.DatasourceSnippetResponse;
import eu.dnetlib.openaire.dsm.domain.Header;
import eu.dnetlib.openaire.dsm.domain.SimpleResponse;
public class ResponseUtils {
@ -33,18 +42,17 @@ public class ResponseUtils {
return rsp;
}
public static Header header(final Queue<Throwable> errors, final long total) {
return Header.newInsance()
.setExceptions(errors)
.setTotal(total);
.setExceptions(errors)
.setTotal(total);
}
public static Header header(final long total) {
return header(Lists.newLinkedList(), total);
}
public static SimpleResponse simpleResponse(final List<?> list) {
public static SimpleResponse<?> simpleResponse(final List<?> list) {
final SimpleResponse rsp = new SimpleResponse().setResponse(list);;
rsp.setHeader(header(Lists.newLinkedList(), list.size()));
return rsp;

View File

@ -1,6 +1,6 @@
package eu.dnetlib.openaire.dsm.dao;
import eu.dnetlib.OpenaireExporterConfig;
import eu.dnetlib.DnetOpenaireExporterProperties;
import eu.dnetlib.enabling.datasources.common.DsmException;
import eu.dnetlib.openaire.vocabularies.Vocabulary;
import org.apache.commons.logging.Log;
@ -22,7 +22,7 @@ public class VocabularyClientImpl implements VocabularyClient {
private static final Log log = LogFactory.getLog(VocabularyClientImpl.class);
@Autowired
private OpenaireExporterConfig config;
private DnetOpenaireExporterProperties config;
@Override
@Cacheable("vocabularies-cache")

View File

@ -3,12 +3,18 @@ package eu.dnetlib.openaire.dsm.domain;
import java.util.HashMap;
import com.fasterxml.jackson.annotation.JsonAutoDetect;
import io.swagger.annotations.ApiModel;
@JsonAutoDetect
@ApiModel(value = "Request filter", description = "field name and value pairs")
public class RequestFilter extends HashMap<FilterName, Object> {
/**
*
*/
private static final long serialVersionUID = 5501969842482508379L;
public RequestFilter() {}
}

View File

@ -18,7 +18,7 @@ public class SimpleResponse<T> extends Response {
return response;
}
public SimpleResponse setResponse(final List<T> response) {
public SimpleResponse<T> setResponse(final List<T> response) {
this.response = response;
return this;
}

View File

@ -20,7 +20,7 @@ public class ApiDbEntry extends Api<ApiParamDbEntry> {
return compatibilityOverride;
}
public Api setCompatibilityOverride(final String compatibilityOverride) {
public Api<ApiParamDbEntry> setCompatibilityOverride(final String compatibilityOverride) {
this.compatibilityOverride = compatibilityOverride;
return this;
}

View File

@ -4,6 +4,7 @@ import javax.persistence.Embeddable;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import eu.dnetlib.enabling.datasources.common.ApiParamKey;
/**
@ -13,6 +14,11 @@ import eu.dnetlib.enabling.datasources.common.ApiParamKey;
@JsonIgnoreProperties(ignoreUnknown = true)
public class ApiParamKeyDbEntry extends ApiParamKey<ApiDbEntry> {
/**
*
*/
private static final long serialVersionUID = 1L;
@Override
@JsonIgnore
public ApiDbEntry getApi() {

View File

@ -172,4 +172,12 @@ public class DatasourceApiDbEntry {
public void setId(String id) {
this.id = id;
}
public String getCollectedfrom() {
return collectedfrom;
}
public void setCollectedfrom(String collectedfrom) {
this.collectedfrom = collectedfrom;
}
}

View File

@ -3,43 +3,41 @@ package eu.dnetlib.openaire.funders;
import java.util.List;
import java.util.stream.Collectors;
import eu.dnetlib.openaire.funders.domain.ConversionUtils;
import eu.dnetlib.openaire.funders.domain.ExtendedFunderDetails;
import eu.dnetlib.openaire.funders.domain.FunderDetails;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.data.domain.PageRequest;
import org.springframework.stereotype.Component;
import eu.dnetlib.openaire.funders.domain.ConversionUtils;
import eu.dnetlib.openaire.funders.domain.ExtendedFunderDetails;
import eu.dnetlib.openaire.funders.domain.FunderDetails;
@Component
@ConditionalOnProperty(value = "openaire.exporter.enable.funders", havingValue = "true")
public class FunderDao {
private static final Log log = LogFactory.getLog(FunderDao.class);
@Autowired
private FunderRepository funderRepository;
public ExtendedFunderDetails getExtendedFunderDetails(final String funderId) throws FundersApiException {
return ConversionUtils.asExtendedFunderDetails(funderRepository.getOne(funderId));
return ConversionUtils
.asExtendedFunderDetails(funderRepository.findById(funderId).orElseThrow(() -> new FundersApiException("Funder not found. ID: " + funderId)));
}
public List<FunderDetails> listFunderDetails(final int page, final int size) throws FundersApiException {
return funderRepository.findAll(PageRequest.of(page, size))
.getContent()
.stream()
.map(ConversionUtils::asFunderDetails)
.collect(Collectors.toList());
.getContent()
.stream()
.map(ConversionUtils::asFunderDetails)
.collect(Collectors.toList());
}
public List<String> listFunderIds(final int page, final int size) throws FundersApiException {
return funderRepository.findAll(PageRequest.of(page, size))
.getContent()
.stream()
.map(f -> f.getId())
.collect(Collectors.toList());
.getContent()
.stream()
.map(f -> f.getId())
.collect(Collectors.toList());
}
}

View File

@ -2,8 +2,12 @@ package eu.dnetlib.openaire.funders;
public class FundersApiException extends Exception {
public FundersApiException() {
}
/**
*
*/
private static final long serialVersionUID = 842353818131133522L;
public FundersApiException() {}
public FundersApiException(final String message) {
super(message);

View File

@ -1,24 +1,32 @@
package eu.dnetlib.openaire.info;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import eu.dnetlib.openaire.common.AbstractExporterController;
import eu.dnetlib.openaire.common.ExporterConstants;
import io.swagger.annotations.*;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.format.annotation.DateTimeFormat;
import org.springframework.web.bind.annotation.*;
import java.time.LocalDate;
import java.util.List;
import java.util.Map;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.web.bind.annotation.CrossOrigin;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RestController;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import eu.dnetlib.openaire.common.AbstractExporterController;
import eu.dnetlib.openaire.common.ExporterConstants;
import io.swagger.annotations.ApiOperation;
import io.swagger.annotations.ApiResponse;
import io.swagger.annotations.ApiResponses;
@RestController
@CrossOrigin(origins = { "*" })
@CrossOrigin(origins = {
"*"
})
@ConditionalOnProperty(value = "openaire.exporter.enable.info", havingValue = "true")
@io.swagger.annotations.Api(tags = "OpenAIRE Info API", description = "the OpenAIRE info API")
public class InfoController extends AbstractExporterController {
@ -30,51 +38,66 @@ public class InfoController extends AbstractExporterController {
@Autowired
private JdbcInfoDao jdbcInfoDao;
@RequestMapping(value = "/info/{infoKey}", produces = { "application/json" }, method = RequestMethod.GET)
@ApiOperation(value = "get info date", notes = "get info date", tags = { ExporterConstants.R }, response = LocalDate.class)
@RequestMapping(value = "/info/{infoKey}", produces = {
"application/json"
}, method = RequestMethod.GET)
@ApiOperation(value = "get info date", notes = "get info date", tags = {
ExporterConstants.R
}, response = LocalDate.class)
@ApiResponses(value = {
@ApiResponse(code = 200, message = "OK", response = LocalDate.class),
@ApiResponse(code = 500, message = "unexpected error", response = ErrorMessage.class) })
public LocalDate getDate(@PathVariable final String infoKey){
JdbcInfoDao.DATE_INFO info = JdbcInfoDao.DATE_INFO.valueOf(infoKey);
if(info == null) throw new RuntimeException(infoKey + " not recognized");
@ApiResponse(code = 200, message = "OK", response = LocalDate.class),
@ApiResponse(code = 500, message = "unexpected error", response = ErrorMessage.class)
})
public LocalDate getDate(@PathVariable final String infoKey) {
final JdbcInfoDao.DATE_INFO info = JdbcInfoDao.DATE_INFO.valueOf(infoKey);
if (info == null) { throw new RuntimeException(infoKey + " not recognized"); }
return jdbcInfoDao.getDate(info);
}
@RequestMapping(value = "/info", produces = { "application/json" }, method = RequestMethod.GET)
@ApiOperation(value = "get all the info date", notes = "get all the info date", tags = { ExporterConstants.R }, response = Map.class)
@RequestMapping(value = "/info", produces = {
"application/json"
}, method = RequestMethod.GET)
@ApiOperation(value = "get all the info date", notes = "get all the info date", tags = {
ExporterConstants.R
}, response = Map.class)
@ApiResponses(value = {
@ApiResponse(code = 200, message = "OK", response = LocalDate.class),
@ApiResponse(code = 500, message = "unexpected error", response = ErrorMessage.class) })
public Map<String, LocalDate> listInfo(){
Map<String, LocalDate> map = Maps.newHashMap();
for(JdbcInfoDao.DATE_INFO dateInfo : JdbcInfoDao.DATE_INFO.values()){
@ApiResponse(code = 200, message = "OK", response = LocalDate.class),
@ApiResponse(code = 500, message = "unexpected error", response = ErrorMessage.class)
})
public Map<String, LocalDate> listInfo() {
final Map<String, LocalDate> map = Maps.newHashMap();
for (final JdbcInfoDao.DATE_INFO dateInfo : JdbcInfoDao.DATE_INFO.values()) {
map.put(dateInfo.name(), jdbcInfoDao.getDate(dateInfo));
}
return map;
}
@RequestMapping(value = "/info/keys", produces = { "application/json" }, method = RequestMethod.GET)
@ApiOperation(value = "get the available keys", notes = "get the available keys", tags = { ExporterConstants.R },
response = String.class, responseContainer = "List")
@RequestMapping(value = "/info/keys", produces = {
"application/json"
}, method = RequestMethod.GET)
@ApiOperation(value = "get the available keys", notes = "get the available keys", tags = {
ExporterConstants.R
}, response = String.class, responseContainer = "List")
@ApiResponses(value = {
@ApiResponse(code = 200, message = "OK", response = LocalDate.class),
@ApiResponse(code = 500, message = "unexpected error", response = ErrorMessage.class) })
public List<String> listInfoKeys(){
List<String> keys = Lists.newArrayList();
for(JdbcInfoDao.DATE_INFO dateInfo : JdbcInfoDao.DATE_INFO.values()){
@ApiResponse(code = 200, message = "OK", response = LocalDate.class),
@ApiResponse(code = 500, message = "unexpected error", response = ErrorMessage.class)
})
public List<String> listInfoKeys() {
final List<String> keys = Lists.newArrayList();
for (final JdbcInfoDao.DATE_INFO dateInfo : JdbcInfoDao.DATE_INFO.values()) {
keys.add(dateInfo.name());
}
return keys;
}
@RequestMapping(value = "/info/dropCache", produces = { "application/json" }, method = RequestMethod.GET)
@ApiOperation(value = "Drops the info cache", notes = "Drops the info cache", tags = { ExporterConstants.R })
public void dropCache(){
@RequestMapping(value = "/info/dropCache", produces = {
"application/json"
}, method = RequestMethod.GET)
@ApiOperation(value = "Drops the info cache", notes = "Drops the info cache", tags = {
ExporterConstants.R
})
public void dropCache() {
jdbcInfoDao.dropCache();
}
}

View File

@ -1,15 +1,6 @@
package eu.dnetlib.openaire.info;
import eu.dnetlib.openaire.project.dao.ValueCleaner;
import org.antlr.stringtemplate.StringTemplate;
import java.io.IOException;
import java.io.OutputStream;
import java.sql.SQLException;
import java.time.LocalDate;
import java.util.Date;
import java.util.Map;
import java.util.zip.ZipOutputStream;
public interface JdbcInfoDao {

View File

@ -1,20 +1,13 @@
package eu.dnetlib.openaire.info;
import com.google.common.base.Joiner;
import com.google.common.base.Splitter;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import eu.dnetlib.OpenaireExporterConfig;
import eu.dnetlib.openaire.project.dao.JdbcApiDao;
import eu.dnetlib.openaire.project.dao.ProjectTsvRepository;
import eu.dnetlib.openaire.project.dao.ValueCleaner;
import eu.dnetlib.openaire.project.domain.Project;
import eu.dnetlib.openaire.project.domain.db.ProjectDetails;
import eu.dnetlib.openaire.project.domain.db.ProjectTsv;
import org.antlr.stringtemplate.StringTemplate;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.time.DateFormatUtils;
import org.apache.commons.lang3.time.DateUtils;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.time.LocalDate;
import javax.sql.DataSource;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.beans.factory.annotation.Autowired;
@ -24,31 +17,12 @@ import org.springframework.cache.annotation.Cacheable;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Component;
import javax.sql.DataSource;
import java.io.BufferedOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.nio.charset.Charset;
import java.sql.*;
import java.time.Duration;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.time.format.DateTimeFormatter;
import java.time.format.FormatStyle;
import java.util.Arrays;
import java.util.Date;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.zip.GZIPOutputStream;
import java.util.zip.ZipEntry;
import java.util.zip.ZipOutputStream;
import eu.dnetlib.DnetOpenaireExporterProperties;
/**
* Created by alessia on 29/04/2020
*
* Get and set info dates via JDBC. Dates are expected to be in a table named 'info' with two columns:
* key - see JdbcInfoDao.DATE_INFO enum
* Get and set info dates via JDBC. Dates are expected to be in a table named 'info' with two columns: key - see JdbcInfoDao.DATE_INFO enum
* value - the date (LocalDate, no time)
*
*/
@ -59,7 +33,7 @@ public class JdbcInfoDaoImpl implements JdbcInfoDao {
private static final Log log = LogFactory.getLog(JdbcInfoDaoImpl.class);
@Autowired
private OpenaireExporterConfig config;
private DnetOpenaireExporterProperties config;
@Autowired
private DataSource dataSource;
@ -71,11 +45,11 @@ public class JdbcInfoDaoImpl implements JdbcInfoDao {
LocalDate date = null;
try (final Connection con = getConn(); final PreparedStatement stm = getStm(sql, con, dateInfo.name()); final ResultSet rs = getRs(stm)) {
log.info("loading info "+dateInfo+" Query: "+stm.toString());
if(rs.next()) {
log.info("loading info " + dateInfo + " Query: " + stm.toString());
if (rs.next()) {
date = rs.getObject("value", LocalDate.class);
}
} catch (SQLException e) {
} catch (final SQLException e) {
throw new RuntimeException(e);
}
@ -83,7 +57,9 @@ public class JdbcInfoDaoImpl implements JdbcInfoDao {
}
@Override
@CacheEvict(cacheNames = { "info" }, allEntries = true)
@CacheEvict(cacheNames = {
"info"
}, allEntries = true)
@Scheduled(fixedDelayString = "${openaire.exporter.cache.ttl}")
public void dropCache() {
log.debug("dropped info cache");

View File

@ -2,9 +2,6 @@ package eu.dnetlib.openaire.project;
import java.util.regex.Pattern;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
public class ProjectQueryParams {
private final Pattern patternFundingStream = Pattern.compile("(\\w*(::|%| )*)*");
@ -20,8 +17,6 @@ public class ProjectQueryParams {
private String endFrom = null;
private String endUntil = null;
private static final Log log = LogFactory.getLog(ProjectQueryParams.class); // NOPMD by marko on 11/24/08 5:02 PM
public String getFundingProgramme() {
return fundingProgramme;
}
@ -71,15 +66,16 @@ public class ProjectQueryParams {
}
protected String verifyParam(final String p) {
if ((p != null) && !patternFundingStream.matcher(p).matches()) {
if (p != null && !patternFundingStream.matcher(p).matches()) {
throw new IllegalArgumentException(String.format("Parameter '%s' contains an invalid character", p));
}
return p;
}
protected String verifyDateParam(final String date) {
if ((date != null) && !patternDate.matcher(date).matches()) {
throw new IllegalArgumentException(String.format("Parameter date '%s' contains an invalid character. Accepted pattern is %s", date, patternDate.toString()));
if (date != null && !patternDate.matcher(date).matches()) {
throw new IllegalArgumentException(
String.format("Parameter date '%s' contains an invalid character. Accepted pattern is %s", date, patternDate.toString()));
}
return date;
}

View File

@ -13,8 +13,8 @@ import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import com.google.common.xml.XmlEscapers;
import eu.dnetlib.OpenaireExporterConfig;
import eu.dnetlib.OpenaireExporterConfig.Project;
import eu.dnetlib.DnetOpenaireExporterProperties;
import eu.dnetlib.DnetOpenaireExporterProperties.Project;
import eu.dnetlib.openaire.common.AbstractExporterController;
import eu.dnetlib.openaire.common.ExporterConstants;
import eu.dnetlib.openaire.project.domain.db.ProjectTsv;
@ -49,7 +49,7 @@ public class ProjectsController extends AbstractExporterController {
public final static String UTF8 = "UTF-8";
@Autowired
private OpenaireExporterConfig config;
private DnetOpenaireExporterProperties config;
@Autowired
private JdbcApiDao dao;

View File

@ -21,7 +21,7 @@ import com.google.common.base.Joiner;
import com.google.common.base.Splitter;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import eu.dnetlib.OpenaireExporterConfig;
import eu.dnetlib.DnetOpenaireExporterProperties;
import eu.dnetlib.openaire.project.domain.Project;
import eu.dnetlib.openaire.project.domain.db.ProjectDetails;
import eu.dnetlib.openaire.project.domain.db.ProjectTsv;
@ -48,7 +48,7 @@ public class JdbcApiDaoImpl implements JdbcApiDao {
private static final Log log = LogFactory.getLog(JdbcApiDaoImpl.class);
@Autowired
private OpenaireExporterConfig config;
private DnetOpenaireExporterProperties config;
@Autowired
private DataSource dataSource;

View File

@ -0,0 +1,176 @@
INSERT INTO dsm_identities(pid, issuertype) VALUES ('77', 'roar');
INSERT INTO dsm_identities(pid, issuertype) VALUES ('680', 'roar');
INSERT INTO dsm_identities(pid, issuertype) VALUES ('260', 'roar');
INSERT INTO dsm_identities(pid, issuertype) VALUES ('637', 'roar');
INSERT INTO dsm_identities(pid, issuertype) VALUES ('528', 'roar');
INSERT INTO dsm_identities(pid, issuertype) VALUES ('577', 'roar');
INSERT INTO dsm_identities(pid, issuertype) VALUES ('566', 'roar');
INSERT INTO dsm_identities(pid, issuertype) VALUES ('241', 'roar');
INSERT INTO dsm_identities(pid, issuertype) VALUES ('491', 'roar');
INSERT INTO dsm_identities(pid, issuertype) VALUES ('324', 'roar');
INSERT INTO dsm_identities(pid, issuertype) VALUES ('375', 'roar');
INSERT INTO dsm_identities(pid, issuertype) VALUES ('152', 'roar');
INSERT INTO dsm_identities(pid, issuertype) VALUES ('1025', 'roar');
INSERT INTO dsm_identities(pid, issuertype) VALUES ('10.13039/100004440', 'doi');
INSERT INTO dsm_identities(pid, issuertype) VALUES ('10.13039/100000001', 'doi');
INSERT INTO dsm_identities(pid, issuertype) VALUES ('10.13039/100000002', 'doi');
INSERT INTO dsm_identities(pid, issuertype) VALUES ('10.13039/501100007601', 'doi');
INSERT INTO dsm_identities(pid, issuertype) VALUES ('10.13039/501100001871', 'doi');
INSERT INTO dsm_identities(pid, issuertype) VALUES ('10.13039/501100008982', 'doi');
INSERT INTO dsm_identities(pid, issuertype) VALUES ('10.13039/501100000925', 'doi');
INSERT INTO dsm_identities(pid, issuertype) VALUES ('10.13039/501100000923', 'doi');
INSERT INTO dsm_identities(pid, issuertype) VALUES ('10.13039/501100001602', 'doi');
INSERT INTO dsm_identities(pid, issuertype) VALUES ('10.13039/501100002428', 'doi');
INSERT INTO dsm_identities(pid, issuertype) VALUES ('10.13039/501100000690', 'doi');
INSERT INTO dsm_identities(pid, issuertype) VALUES ('10.13039/501100002341', 'doi');
INSERT INTO dsm_identities(pid, issuertype) VALUES ('0000 0001 2242 8989', 'isni');
INSERT INTO dsm_identities(pid, issuertype) VALUES ('0000 0004 5900 900X', 'isni');
INSERT INTO dsm_identities(pid, issuertype) VALUES ('0000 0001 2169 9189', 'isni');
INSERT INTO dsm_identities(pid, issuertype) VALUES ('0000 0001 2181 2823', 'isni');
INSERT INTO dsm_identities(pid, issuertype) VALUES ('0000 0004 0427 7672', 'isni');
INSERT INTO dsm_identities(pid, issuertype) VALUES ('0000 0001 1958 7073', 'isni');
INSERT INTO dsm_identities(pid, issuertype) VALUES ('0000 0001 2169 1945', 'isni');
INSERT INTO dsm_identities(pid, issuertype) VALUES ('0000 0001 2154 0709', 'isni');
INSERT INTO dsm_identities(pid, issuertype) VALUES ('0000 0001 2176 1982', 'isni');
INSERT INTO dsm_identities(pid, issuertype) VALUES ('0000 0001 0789 9694', 'isni');
INSERT INTO dsm_identities(pid, issuertype) VALUES ('0000 0001 2228 3249', 'isni');
INSERT INTO dsm_identities(pid, issuertype) VALUES ('0000 0001 2176 7727', 'isni');
INSERT INTO dsm_identities(pid, issuertype) VALUES ('0000 0004 0611 9213', 'isni');
INSERT INTO dsm_identities(pid, issuertype) VALUES ('0000 0004 0609 4140', 'isni');
INSERT INTO dsm_identities(pid, issuertype) VALUES ('0000 0004 4663 8325', 'isni');
INSERT INTO dsm_identities(pid, issuertype) VALUES ('0000 0001 1092 7772', 'isni');
INSERT INTO dsm_identities(pid, issuertype) VALUES ('0000 0004 0452 5752', 'isni');
INSERT INTO dsm_identities(pid, issuertype) VALUES ('0000 0001 2297 5165', 'isni');
INSERT INTO dsm_identities(pid, issuertype) VALUES ('0000 0001 0672 3101', 'isni');
INSERT INTO dsm_identities(pid, issuertype) VALUES ('0000 0001 1957 0992', 'isni');
INSERT INTO dsm_identities(pid, issuertype) VALUES ('0000 0001 1091 8438', 'isni');
INSERT INTO dsm_identities(pid, issuertype) VALUES ('0000 0001 0507 0997', 'isni');
INSERT INTO dsm_identities(pid, issuertype) VALUES ('0000 0001 0665 7300', 'isni');
INSERT INTO dsm_identities(pid, issuertype) VALUES ('0000 0001 0685 2712', 'isni');
INSERT INTO dsm_identities(pid, issuertype) VALUES ('0000 0004 0647 6886', 'isni');
INSERT INTO dsm_identities(pid, issuertype) VALUES ('0000 0001 0943 9683', 'isni');
INSERT INTO dsm_identities(pid, issuertype) VALUES ('0000 0001 2096 9829', 'isni');
INSERT INTO dsm_identities(pid, issuertype) VALUES ('0000 0001 1957 9997', 'isni');
INSERT INTO funder_identity(funder, pid) VALUES ('H2020', '680');
INSERT INTO funder_identity(funder, pid) VALUES ('H2020', '10.13039/501100007601');
INSERT INTO funder_identity(funder, pid) VALUES ('H2020', '0000 0001 2242 8989');
INSERT INTO funder_identity(funder, pid) VALUES ('FP7', '0000 0004 5900 900X');
INSERT INTO funder_identity(funder, pid) VALUES ('FCT', '260');
INSERT INTO funder_identity(funder, pid) VALUES ('FCT', '10.13039/501100001871');
INSERT INTO funder_identity(funder, pid) VALUES ('FCT', '0000 0001 2169 9189');
INSERT INTO funder_identity(funder, pid) VALUES ('FCT', '0000 0001 2181 2823');
INSERT INTO funder_identity(funder, pid) VALUES ('WT', '637');
INSERT INTO funder_identity(funder, pid) VALUES ('WT', '10.13039/100004440');
INSERT INTO funder_identity(funder, pid) VALUES ('WT', '0000 0004 0427 7672');
INSERT INTO funder_identity(funder, pid) VALUES ('NSF', '528');
INSERT INTO funder_identity(funder, pid) VALUES ('NSF', '10.13039/501100008982');
INSERT INTO funder_identity(funder, pid) VALUES ('NSF', '10.13039/100000001');
INSERT INTO funder_identity(funder, pid) VALUES ('NSF', '0000 0001 1958 7073');
INSERT INTO funder_identity(funder, pid) VALUES ('NSF', '0000 0001 2169 1945');
INSERT INTO funder_identity(funder, pid) VALUES ('NSF', '0000 0001 2154 0709');
INSERT INTO funder_identity(funder, pid) VALUES ('NHMRC', '577');
INSERT INTO funder_identity(funder, pid) VALUES ('NHMRC', '10.13039/501100000925');
INSERT INTO funder_identity(funder, pid) VALUES ('NHMRC', '0000 0001 2176 1982');
INSERT INTO funder_identity(funder, pid) VALUES ('NHMRC', '0000 0001 0789 9694');
INSERT INTO funder_identity(funder, pid) VALUES ('NHMRC', '0000 0001 2228 3249');
INSERT INTO funder_identity(funder, pid) VALUES ('NHMRC', '0000 0001 2176 7727');
INSERT INTO funder_identity(funder, pid) VALUES ('NHMRC', '0000 0001 0789 9694');
INSERT INTO funder_identity(funder, pid) VALUES ('NHMRC', '0000 0001 2228 3249');
INSERT INTO funder_identity(funder, pid) VALUES ('ARC', '566');
INSERT INTO funder_identity(funder, pid) VALUES ('ARC', '10.13039/501100000923');
INSERT INTO funder_identity(funder, pid) VALUES ('ARC', '0000 0004 0611 9213');
INSERT INTO funder_identity(funder, pid) VALUES ('MSES', '0000 0004 0609 4140');
INSERT INTO funder_identity(funder, pid) VALUES ('CSF', '0000 0004 4663 8325');
INSERT INTO funder_identity(funder, pid) VALUES ('NWO', '241');
INSERT INTO funder_identity(funder, pid) VALUES ('NWO', '0000 0001 1092 7772');
INSERT INTO funder_identity(funder, pid) VALUES ('SFI', '10.13039/501100001602');
INSERT INTO funder_identity(funder, pid) VALUES ('SFI', '0000 0004 0452 5752');
INSERT INTO funder_identity(funder, pid) VALUES ('NIH', '10.13039/100000002');
INSERT INTO funder_identity(funder, pid) VALUES ('NIH', '0000 0001 2297 5165');
INSERT INTO funder_identity(funder, pid) VALUES ('NIH', '491');
INSERT INTO funder_identity(funder, pid) VALUES ('SNSF', '324');
INSERT INTO funder_identity(funder, pid) VALUES ('SNSF', '0000 0001 0672 3101');
INSERT INTO funder_identity(funder, pid) VALUES ('SNSF', '0000 0001 1957 0992');
INSERT INTO funder_identity(funder, pid) VALUES ('FWF', '77');
INSERT INTO funder_identity(funder, pid) VALUES ('FWF', '10.13039/501100002428');
INSERT INTO funder_identity(funder, pid) VALUES ('FWF', '0000 0001 1091 8438');
INSERT INTO funder_identity(funder, pid) VALUES ('RCUK', '375');
INSERT INTO funder_identity(funder, pid) VALUES ('RCUK', '10.13039/501100000690');
INSERT INTO funder_identity(funder, pid) VALUES ('RCUK', '0000 0001 0507 0997');
INSERT INTO funder_identity(funder, pid) VALUES ('TBT', '0000 0001 0665 7300');
INSERT INTO funder_identity(funder, pid) VALUES ('TBT', '0000 0001 0685 2712');
INSERT INTO funder_identity(funder, pid) VALUES ('AFF', '1025');
INSERT INTO funder_identity(funder, pid) VALUES ('AFF', '10.13039/501100002341');
INSERT INTO funder_identity(funder, pid) VALUES ('AFF', '0000 0004 0647 6886');
INSERT INTO funder_identity(funder, pid) VALUES ('CONICYT', '0000 0001 0943 9683');
INSERT INTO funder_identity(funder, pid) VALUES ('DFG', '152');
INSERT INTO funder_identity(funder, pid) VALUES ('DFG', '0000 0001 2096 9829');
INSERT INTO funder_identity(funder, pid) VALUES ('DFG', '0000 0001 1957 9997');
INSERT INTO funders(id, name, shortname, jurisdiction, websiteurl, policy, registrationdate)
VALUES ('H2020', 'European Commission - Horizon 2020', 'H2020', 'EU', 'http://ec.europa.eu/research/participants/data/ref/h2020/grants_manual/hi/oa_pilot/h2020-hi-oa-pilot-guide_en.pdf', 'OA mandate for publications; OA to research data by default', '2015-08-24');
INSERT INTO funders(id, name, shortname, jurisdiction, websiteurl, policy, registrationdate)
VALUES ('FP7', 'European Commission - 7th Framework program', 'FP7', 'EU', 'http://ec.europa.eu/research/fp7', 'OA advised for publications; 2012 2013 (FP7)Special Clause 39 ERC on Open Access applies', '2013-05-07');
INSERT INTO funders(id, name, shortname, jurisdiction, websiteurl, policy, registrationdate)
VALUES ('FCT', 'Portuguese Foundation for Science and Technology', 'FCT', 'PT', 'https://www.fct.pt/acessoaberto/index.phtml.en', 'OA mandate for publications; OA to research data advised', '2015-02-27');
INSERT INTO funders(id, name, shortname, jurisdiction, websiteurl, policy, registrationdate)
VALUES ('WT', 'Wellcome Trust', 'WT', '', 'https://wellcome.ac.uk/funding/managing-grant/open-access-policy', 'OA mandate for articles, monographs and book chapters; CC-BY licence required', '2013-05-07');
INSERT INTO funders(id, name, shortname, jurisdiction, websiteurl, policy, registrationdate)
VALUES ('NSF', 'National Science Foundation', 'NSF', 'US', 'https://www.nsf.gov/news/special_reports/public_access', 'Version of record or AAM of articles, conferences and data deposited into the repository; persistent identifier to full text on publishers website', '2016-03-04');
INSERT INTO funders(id, name, shortname, jurisdiction, websiteurl, policy, registrationdate)
VALUES ('NHMRC', 'National Health and Medical Research Council', 'NHMRC', 'AU', 'https://www.nhmrc.gov.au/grants-funding/policy/nhmrc-open-access-policy', 'OA mandate for publications', '2015-08-24');
INSERT INTO funders(id, name, shortname, jurisdiction, websiteurl, policy, registrationdate)
VALUES ('ARC', 'Australian Research Council', 'ARC', 'AU', 'http://www.arc.gov.au/arc-open-access-policy', 'OA mandate for research outputs, except for research data', '2015-08-24');
INSERT INTO funders(id, name, shortname, jurisdiction, websiteurl, policy, registrationdate)
VALUES ('MESTD', 'Ministry of Education, Science and Technological Development', 'MESTD', 'RS', 'NOAD', 'NOAD', '2017-01-23');
INSERT INTO funders(id, name, shortname, jurisdiction, websiteurl, policy, registrationdate)
VALUES ('MSES', 'Ministry of Science Education and Sport', 'MSES/MZOS', 'HR', 'NOAD', 'NOAD', '2015-09-14');
INSERT INTO funders(id, name, shortname, jurisdiction, websiteurl, policy, registrationdate)
VALUES ('CSF', 'Croatian Science Foundation', 'CSF/HRZZ', 'HR', 'NOAD', 'NOAD', '2015-09-14');
INSERT INTO funders(id, name, shortname, jurisdiction, websiteurl, policy, registrationdate)
VALUES ('NWO', 'Netherlands Organisation for Scientific Research', 'NWO', 'NL', 'NOAD', 'NOAD', '2016-06-23');
INSERT INTO funders(id, name, shortname, jurisdiction, websiteurl, policy, registrationdate)
VALUES ('SFI', 'Science Foundation Ireland', 'SFI', 'IE', 'http://www.sfi.ie/resources/open-access-dec-10.pdf', 'OA mandate to publications, conference proceedings and technical reports', '2015-07-21');
INSERT INTO funders(id, name, shortname, jurisdiction, websiteurl, policy, registrationdate)
VALUES ('NIH', 'National Institute of Health', 'NIH', 'US', 'https://publicaccess.nih.gov/policy.htm', 'OA to articles', '2017-09-21');
INSERT INTO funders(id, name, shortname, jurisdiction, websiteurl, policy, registrationdate)
VALUES ('SNSF', 'Swiss National Science Foundation', 'SNSF', 'CH', 'NOAD', 'NOAD', '2016-11-16');
INSERT INTO funders(id, name, shortname, jurisdiction, websiteurl, policy, registrationdate)
VALUES ('FWF', 'Austrian Science Fund', 'FWF', 'AT', 'NOAD', 'NOAD', '2017-09-21');
INSERT INTO funders(id, name, shortname, jurisdiction, websiteurl, policy, registrationdate)
VALUES ('RCUK', 'Research Council UK', 'RCUK', 'UK', 'NOAD', 'NOAD', '2017-11-02');
INSERT INTO funders(id, name, shortname, jurisdiction, websiteurl, policy, registrationdate)
VALUES ('TBT', 'Scientific and Technological Research Council of Turkey', 'Tubitak', 'TR', 'NOAD', 'NOAD', '2017-11-02');
INSERT INTO funders(id, name, shortname, jurisdiction, websiteurl, policy, registrationdate)
VALUES ('DFG', 'German Research Foundation', 'DFG', 'DE', 'NOAD', 'NOAD', '2017-09-29');
INSERT INTO funders(id, name, shortname, jurisdiction, websiteurl, policy, registrationdate)
VALUES ('TARA', 'Tara Expedition Foundation', 'TARA', 'FR', 'NOAD', 'NOAD', '2016-12-02');
INSERT INTO funders(id, name, shortname, jurisdiction, websiteurl, policy, registrationdate)
VALUES ('AFF', 'Academy of Finland', 'AFF', 'FI', 'NOAD', 'NOAD', '2017-09-21');
INSERT INTO funders(id, name, shortname, jurisdiction, websiteurl, policy, registrationdate)
VALUES ('CONICYT', 'Comisión Nacional de Investigación Científica y Tecnológica', 'CONICYT', 'CL', 'http://www.lareferencia.info', '', '2017-09-08');
INSERT INTO funders(id, name, shortname, jurisdiction, websiteurl, policy, registrationdate)
VALUES ('SGOV', 'Ministry of Economy, Industry and Competitiveness', 'SGOV', 'ES', 'NOAD', 'NOAD', '2017-09-14');

View File

@ -0,0 +1,510 @@
CREATE TABLE dsm_api (
id character varying(255) NOT NULL,
protocol character varying(255),
datasource character varying(255),
contentdescription character varying(255) DEFAULT 'metadata'::character varying,
active boolean DEFAULT false,
removable boolean DEFAULT false,
typology character varying(255) DEFAULT 'UNKNOWN'::character varying,
compatibility character varying(255) DEFAULT 'UNKNOWN'::character varying,
metadata_identifier_path character varying(512) DEFAULT NULL::character varying,
last_collection_total integer,
last_collection_date timestamp without time zone,
last_collection_mdid character varying(255) DEFAULT NULL::character varying,
last_aggregation_total integer,
last_aggregation_date timestamp without time zone,
last_aggregation_mdid character varying(255) DEFAULT NULL::character varying,
last_download_total integer,
last_download_date timestamp without time zone,
last_download_objid character varying(255) DEFAULT NULL::character varying,
last_validation_job character varying(255) DEFAULT NULL::character varying,
baseurl text,
_dnet_resource_identifier_ character varying(2048) DEFAULT ((('temp_'::text || md5((clock_timestamp())::text)) || '_'::text) || md5((random())::text)),
compatibility_override character varying(255) DEFAULT NULL::character varying
);
ALTER TABLE public.dsm_api OWNER TO dnetapi;
CREATE TABLE dsm_datasource_organization (
datasource character varying(255) NOT NULL,
organization character varying(255) NOT NULL,
_dnet_resource_identifier_ character varying(2048) DEFAULT ((('temp_'::text || md5((clock_timestamp())::text)) || '_'::text) || md5((random())::text))
);
ALTER TABLE public.dsm_datasource_organization OWNER TO dnetapi;
--
-- Name: dsm_datasources; Type: TABLE; Schema: public; Owner: dnetapi; Tablespace:
--
CREATE TABLE dsm_datasources (
id character varying(255) NOT NULL,
officialname character varying(512) NOT NULL,
englishname character varying(512),
websiteurl character varying(255),
logourl character varying(255),
contactemail character varying(255),
latitude double precision DEFAULT 0.0,
longitude double precision DEFAULT 0.0,
timezone character varying(10) DEFAULT '0.0'::character varying,
namespaceprefix character(12) NOT NULL,
languages text,
od_contenttypes text,
collectedfrom character varying(255),
dateofvalidation date,
optional1 character varying(255),
optional2 character varying(255),
typology character varying(255) NOT NULL,
provenanceaction character varying(255) DEFAULT 'UNKNOWN'::character varying,
dateofcollection date DEFAULT ('now'::text)::date NOT NULL,
platform character varying(255),
activationid character varying(255),
description text,
releasestartdate date,
releaseenddate date,
missionstatementurl character varying(512),
dataprovider boolean,
serviceprovider boolean,
databaseaccesstype character varying(64),
datauploadtype character varying(64),
databaseaccessrestriction character varying(64),
datauploadrestriction character varying(64),
versioning boolean,
citationguidelineurl character varying(512),
qualitymanagementkind character varying(64),
pidsystems text,
certificates text,
aggregator character varying(64) DEFAULT 'OPENAIRE'::character varying NOT NULL,
issn character varying(20),
eissn character varying(20),
lissn character varying(20),
registeredby character varying(255),
subjects text,
managed boolean DEFAULT false,
registrationdate date,
consentTermsOfUse boolean default null,
fullTextDownload boolean default null,
_dnet_resource_identifier_ character varying(2048) DEFAULT ((('temp_'::text || md5((clock_timestamp())::text)) || '_'::text) || md5((random())::text))
);
ALTER TABLE public.dsm_datasources OWNER TO dnetapi;
--
-- Name: dsm_organizations; Type: TABLE; Schema: public; Owner: dnetapi; Tablespace:
--
CREATE TABLE dsm_organizations (
id character varying(255) NOT NULL,
legalshortname character varying(255),
legalname character varying(255),
websiteurl text,
logourl character varying(255),
ec_legalbody boolean DEFAULT false,
ec_legalperson boolean DEFAULT false,
ec_nonprofit boolean DEFAULT false,
ec_researchorganization boolean DEFAULT false,
ec_highereducation boolean DEFAULT false,
ec_internationalorganizationeurinterests boolean DEFAULT false,
ec_internationalorganization boolean DEFAULT false,
ec_enterprise boolean DEFAULT false,
ec_smevalidated boolean DEFAULT false,
ec_nutscode boolean DEFAULT false,
country character varying(255),
collectedfrom character varying(255),
optional1 character varying(255),
optional2 character varying(255),
dateofcollection date DEFAULT ('now'::text)::date NOT NULL,
provenanceaction character varying(255) DEFAULT 'UNKNOWN'::character varying,
_dnet_resource_identifier_ character varying(2048) DEFAULT ((('temp_'::text || md5((clock_timestamp())::text)) || '_'::text) || md5((random())::text)),
lastupdate date DEFAULT ('now'::text)::date NOT NULL,
trust double precision DEFAULT 0.9
);
ALTER TABLE public.dsm_organizations OWNER TO dnetapi;
--
-- Name: browse_countries; Type: VIEW; Schema: public; Owner: dnet
--
CREATE VIEW browse_countries AS
SELECT o.country AS term,
count(*) AS total
FROM (((dsm_api a
LEFT JOIN dsm_datasources d ON (((a.datasource)::text = (d.id)::text)))
LEFT JOIN dsm_datasource_organization dao ON (((d.id)::text = (dao.datasource)::text)))
LEFT JOIN dsm_organizations o ON (((dao.organization)::text = (o.id)::text)))
GROUP BY o.country
ORDER BY count(*) DESC;
ALTER TABLE public.browse_countries OWNER TO dnet;
CREATE TABLE dsm_apiparams (
param character varying(255) NOT NULL,
value text DEFAULT ''::character varying NOT NULL,
api character varying(255) NOT NULL,
_dnet_resource_identifier_ character varying(2048) DEFAULT ((('temp_'::text || md5((clock_timestamp())::text)) || '_'::text) || md5((random())::text))
);
ALTER TABLE public.dsm_apiparams OWNER TO dnetapi;
--
-- Name: dsm_datasource_api; Type: VIEW; Schema: public; Owner: dnet
--
CREATE OR REPLACE VIEW dsm_datasource_api AS
SELECT
row_number() OVER (ORDER BY a.id) AS rowid,
d.id,
d.officialname,
d.englishname,
d.websiteurl,
d.contactemail,
d.collectedfrom,
d.typology,
d.platform,
d.registeredby,
d.managed,
a.protocol,
a.contentdescription,
a.active,
a.removable,
a.typology AS apitypology,
a.compatibility,
a.baseurl
FROM (dsm_datasources d
LEFT JOIN dsm_api a ON (((d.id)::text = (a.datasource)::text)));
--
-- Name: dsm_datasourcepids; Type: TABLE; Schema: public; Owner: dnetapi; Tablespace:
--
CREATE TABLE dsm_datasourcepids (
datasource character varying(255) NOT NULL,
pid character varying(255) NOT NULL,
_dnet_resource_identifier_ character varying(2048) DEFAULT ((('temp_'::text || md5((clock_timestamp())::text)) || '_'::text) || md5((random())::text))
);
ALTER TABLE public.dsm_datasourcepids OWNER TO dnetapi;
--
-- Name: dsm_identities; Type: TABLE; Schema: public; Owner: dnetapi; Tablespace:
--
CREATE TABLE dsm_identities (
pid character varying(255) NOT NULL,
issuertype character varying(255),
_dnet_resource_identifier_ character varying(2048) DEFAULT ((('temp_'::text || md5((clock_timestamp())::text)) || '_'::text) || md5((random())::text))
);
ALTER TABLE public.dsm_identities OWNER TO dnetapi;
--
-- Name: dsm_organizationpids; Type: TABLE; Schema: public; Owner: dnetapi; Tablespace:
--
CREATE TABLE dsm_organizationpids (
organization character varying(255) NOT NULL,
pid character varying(255) NOT NULL,
_dnet_resource_identifier_ character varying(2048) DEFAULT ((('temp_'::text || md5((clock_timestamp())::text)) || '_'::text) || md5((random())::text))
);
ALTER TABLE public.dsm_organizationpids OWNER TO dnetapi;
--
-- Name: funder_identity; Type: TABLE; Schema: public; Owner: dnet; Tablespace:
--
CREATE TABLE funder_identity (
funder character varying(255),
pid character varying(255)
);
ALTER TABLE public.funder_identity OWNER TO dnet;
--
-- Name: funders; Type: TABLE; Schema: public; Owner: dnet; Tablespace:
--
CREATE TABLE funders (
id character varying(255) NOT NULL,
name character varying(255),
shortname character varying(255),
jurisdiction character varying(255),
websiteurl text,
policy character varying(255),
registrationdate date DEFAULT ('now'::text)::date NOT NULL,
lastupdatedate date
);
ALTER TABLE public.funders OWNER TO dnet;
--
-- Name: fundingpaths; Type: TABLE; Schema: public; Owner: dnet; Tablespace:
--
--
-- Name: project_organization; Type: TABLE; Schema: public; Owner: dnet; Tablespace:
--
--CREATE TABLE project_organization (
-- participantnumber integer,
-- project character varying(255) NOT NULL,
-- resporganization character varying(255) NOT NULL,
-- semanticclass character varying(255) DEFAULT 'UNKNOWN'::character varying,
-- trust double precision DEFAULT 0.9,
-- _dnet_resource_identifier_ character varying(2048) DEFAULT ((('temp_'::text || md5((clock_timestamp())::text)) || '_'::text) || md5((random())::text))
--);
--ALTER TABLE public.project_organization OWNER TO dnet;
--
-- Name: dsm_api_pkey; Type: CONSTRAINT; Schema: public; Owner: dnetapi; Tablespace:
--
ALTER TABLE ONLY dsm_api
ADD CONSTRAINT dsm_api_pkey PRIMARY KEY (id);
--
-- Name: dsm_apicollection_pkey; Type: CONSTRAINT; Schema: public; Owner: dnetapi; Tablespace:
--
ALTER TABLE ONLY dsm_apiparams
ADD CONSTRAINT dsm_apicollection_pkey PRIMARY KEY (api, param);
--
-- Name: dsm_datasourcepids_pkey; Type: CONSTRAINT; Schema: public; Owner: dnetapi; Tablespace:
--
ALTER TABLE ONLY dsm_datasourcepids
ADD CONSTRAINT dsm_datasourcepids_pkey PRIMARY KEY (datasource, pid);
--
-- Name: dsm_datasources_namespaceprefix_key; Type: CONSTRAINT; Schema: public; Owner: dnetapi; Tablespace:
--
ALTER TABLE ONLY dsm_datasources
ADD CONSTRAINT dsm_datasources_namespaceprefix_key UNIQUE (namespaceprefix);
--
-- Name: dsm_datasources_pkey; Type: CONSTRAINT; Schema: public; Owner: dnetapi; Tablespace:
--
ALTER TABLE ONLY dsm_datasources
ADD CONSTRAINT dsm_datasources_pkey PRIMARY KEY (id);
--
-- Name: dsm_identities_pkey; Type: CONSTRAINT; Schema: public; Owner: dnetapi; Tablespace:
--
ALTER TABLE ONLY dsm_identities
ADD CONSTRAINT dsm_identities_pkey PRIMARY KEY (pid);
--
-- Name: dsm_organization_datasource_pkey; Type: CONSTRAINT; Schema: public; Owner: dnetapi; Tablespace:
--
ALTER TABLE ONLY dsm_datasource_organization
ADD CONSTRAINT dsm_organization_datasource_pkey PRIMARY KEY (datasource, organization);
--
-- Name: dsm_organizationpids_pkey; Type: CONSTRAINT; Schema: public; Owner: dnetapi; Tablespace:
--
ALTER TABLE ONLY dsm_organizationpids
ADD CONSTRAINT dsm_organizationpids_pkey PRIMARY KEY (organization, pid);
--
-- Name: dsm_organizations_pkey; Type: CONSTRAINT; Schema: public; Owner: dnetapi; Tablespace:
--
ALTER TABLE ONLY dsm_organizations
ADD CONSTRAINT dsm_organizations_pkey PRIMARY KEY (id);
--
-- Name: dsm_datasources_contactemail_idx; Type: INDEX; Schema: public; Owner: dnetapi; Tablespace:
--
CREATE INDEX dsm_datasources_contactemail_idx ON dsm_datasources USING btree (contactemail);
--
-- Name: dsm_datasources_englishname_idx; Type: INDEX; Schema: public; Owner: dnetapi; Tablespace:
--
CREATE INDEX dsm_datasources_englishname_idx ON dsm_datasources USING btree (englishname);
--
-- Name: dsm_datasources_managed_idx; Type: INDEX; Schema: public; Owner: dnetapi; Tablespace:
--
CREATE INDEX dsm_datasources_managed_idx ON dsm_datasources USING btree (managed);
--
-- Name: dsm_datasources_officialname_idx; Type: INDEX; Schema: public; Owner: dnetapi; Tablespace:
--
CREATE INDEX dsm_datasources_officialname_idx ON dsm_datasources USING btree (officialname);
--
-- Name: dsm_datasources_registeredby_idx; Type: INDEX; Schema: public; Owner: dnetapi; Tablespace:
--
CREATE INDEX dsm_datasources_registeredby_idx ON dsm_datasources USING btree (registeredby);
--
-- Name: dsm_organizations_country_idx; Type: INDEX; Schema: public; Owner: dnetapi; Tablespace:
--
CREATE INDEX dsm_organizations_country_idx ON dsm_organizations USING btree (country);
ALTER TABLE ONLY dsm_api
ADD CONSTRAINT dsm_api_datasource_fkey FOREIGN KEY (datasource) REFERENCES dsm_datasources(id) ON DELETE CASCADE;
--
-- Name: dsm_apicollections_api_fkey; Type: FK CONSTRAINT; Schema: public; Owner: dnetapi
--
ALTER TABLE ONLY dsm_apiparams
ADD CONSTRAINT dsm_apicollections_api_fkey FOREIGN KEY (api) REFERENCES dsm_api(id);
--
-- Name: dsm_datasource_organization_datasource_fkey; Type: FK CONSTRAINT; Schema: public; Owner: dnetapi
--
ALTER TABLE ONLY dsm_datasource_organization
ADD CONSTRAINT dsm_datasource_organization_datasource_fkey FOREIGN KEY (datasource) REFERENCES dsm_datasources(id);
--
-- Name: dsm_datasource_organization_organization_fkey; Type: FK CONSTRAINT; Schema: public; Owner: dnetapi
--
ALTER TABLE ONLY dsm_datasource_organization
ADD CONSTRAINT dsm_datasource_organization_organization_fkey FOREIGN KEY (organization) REFERENCES dsm_organizations(id) ON DELETE CASCADE;
--
-- Name: dsm_datasourcepids_datasource_fkey; Type: FK CONSTRAINT; Schema: public; Owner: dnetapi
--
ALTER TABLE ONLY dsm_datasourcepids
ADD CONSTRAINT dsm_datasourcepids_datasource_fkey FOREIGN KEY (datasource) REFERENCES dsm_datasources(id);
--
-- Name: dsm_datasourcepids_pid_fkey; Type: FK CONSTRAINT; Schema: public; Owner: dnetapi
--
ALTER TABLE ONLY dsm_datasourcepids
ADD CONSTRAINT dsm_datasourcepids_pid_fkey FOREIGN KEY (pid) REFERENCES dsm_identities(pid);
--
-- Name: dsm_datasources_collectedfrom_fkey; Type: FK CONSTRAINT; Schema: public; Owner: dnetapi
--
ALTER TABLE ONLY dsm_datasources
ADD CONSTRAINT dsm_datasources_collectedfrom_fkey FOREIGN KEY (collectedfrom) REFERENCES dsm_datasources(id);
--
-- Name: dsm_organizationpids_organization_fkey; Type: FK CONSTRAINT; Schema: public; Owner: dnetapi
--
ALTER TABLE ONLY dsm_organizationpids
ADD CONSTRAINT dsm_organizationpids_organization_fkey FOREIGN KEY (organization) REFERENCES dsm_organizations(id);
--
-- Name: dsm_organizationpids_pid_fkey; Type: FK CONSTRAINT; Schema: public; Owner: dnetapi
--
ALTER TABLE ONLY dsm_organizationpids
ADD CONSTRAINT dsm_organizationpids_pid_fkey FOREIGN KEY (pid) REFERENCES dsm_identities(pid);
--
-- Name: dsm_organizations_collectedfrom_fkey; Type: FK CONSTRAINT; Schema: public; Owner: dnetapi
--
ALTER TABLE ONLY dsm_organizations
ADD CONSTRAINT dsm_organizations_collectedfrom_fkey FOREIGN KEY (collectedfrom) REFERENCES dsm_datasources(id);
REVOKE ALL ON SCHEMA public FROM PUBLIC;
REVOKE ALL ON SCHEMA public FROM postgres;
GRANT ALL ON SCHEMA public TO postgres;
GRANT ALL ON SCHEMA public TO PUBLIC;
REVOKE ALL ON TABLE browse_countries FROM PUBLIC;
REVOKE ALL ON TABLE browse_countries FROM dnet;
GRANT ALL ON TABLE browse_countries TO dnet;
GRANT ALL ON TABLE browse_countries TO dnetapi;
REVOKE ALL ON TABLE dsm_datasource_api FROM PUBLIC;
REVOKE ALL ON TABLE dsm_datasource_api FROM dnet;
GRANT ALL ON TABLE dsm_datasource_api TO dnet;
GRANT SELECT ON TABLE dsm_datasource_api TO dnetapi;

View File

@ -0,0 +1,21 @@
CREATE TABLE info (
key character varying(255) NOT NULL PRIMARY KEY,
value DATE
);
GRANT ALL ON TABLE info TO dnet;
GRANT ALL ON TABLE info TO dnetapi;
INSERT INTO info(key) VALUES ('oaf_load_date');
INSERT INTO info(key) VALUES ('odf_load_date');
INSERT INTO info(key) VALUES ('inference_date');
INSERT INTO info(key) VALUES ('claim_load_date');
INSERT INTO info(key) VALUES ('stats_update_date');
INSERT INTO info(key) VALUES ('crossref_update_date');
INSERT INTO info(key) VALUES ('unpaywall_update_date');
INSERT INTO info(key) VALUES ('orcid_update_date');
INSERT INTO info(key) VALUES ('mag_update_date');