Merge branch 'master' of https://code-repo.d4science.org/D-Net/dnet-hadoop
This commit is contained in:
commit
f680eb3e12
|
@ -1,15 +1,22 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.utils;
|
package eu.dnetlib.dhp.utils;
|
||||||
|
|
||||||
import org.apache.commons.logging.Log;
|
import java.util.Map;
|
||||||
import org.apache.commons.logging.LogFactory;
|
|
||||||
|
import javax.xml.ws.BindingProvider;
|
||||||
|
|
||||||
import org.apache.cxf.jaxws.JaxWsProxyFactoryBean;
|
import org.apache.cxf.jaxws.JaxWsProxyFactoryBean;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpService;
|
import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpService;
|
||||||
|
|
||||||
public class ISLookupClientFactory {
|
public class ISLookupClientFactory {
|
||||||
|
|
||||||
private static final Log log = LogFactory.getLog(ISLookupClientFactory.class);
|
private static final Logger log = LoggerFactory.getLogger(ISLookupClientFactory.class);
|
||||||
|
|
||||||
|
private static int requestTimeout = 60000 * 10;
|
||||||
|
private static int connectTimeout = 60000 * 10;
|
||||||
|
|
||||||
public static ISLookUpService getLookUpService(final String isLookupUrl) {
|
public static ISLookUpService getLookUpService(final String isLookupUrl) {
|
||||||
return getServiceStub(ISLookUpService.class, isLookupUrl);
|
return getServiceStub(ISLookUpService.class, isLookupUrl);
|
||||||
|
@ -21,6 +28,25 @@ public class ISLookupClientFactory {
|
||||||
final JaxWsProxyFactoryBean jaxWsProxyFactory = new JaxWsProxyFactoryBean();
|
final JaxWsProxyFactoryBean jaxWsProxyFactory = new JaxWsProxyFactoryBean();
|
||||||
jaxWsProxyFactory.setServiceClass(clazz);
|
jaxWsProxyFactory.setServiceClass(clazz);
|
||||||
jaxWsProxyFactory.setAddress(endpoint);
|
jaxWsProxyFactory.setAddress(endpoint);
|
||||||
return (T) jaxWsProxyFactory.create();
|
|
||||||
|
final T service = (T) jaxWsProxyFactory.create();
|
||||||
|
|
||||||
|
if (service instanceof BindingProvider) {
|
||||||
|
log
|
||||||
|
.info(
|
||||||
|
"setting timeouts for {} to requestTimeout: {}, connectTimeout: {}",
|
||||||
|
BindingProvider.class.getName(), requestTimeout, connectTimeout);
|
||||||
|
|
||||||
|
Map<String, Object> requestContext = ((BindingProvider) service).getRequestContext();
|
||||||
|
|
||||||
|
requestContext.put("com.sun.xml.internal.ws.request.timeout", requestTimeout);
|
||||||
|
requestContext.put("com.sun.xml.internal.ws.connect.timeout", connectTimeout);
|
||||||
|
requestContext.put("com.sun.xml.ws.request.timeout", requestTimeout);
|
||||||
|
requestContext.put("com.sun.xml.ws.connect.timeout", connectTimeout);
|
||||||
|
requestContext.put("javax.xml.ws.client.receiveTimeout", requestTimeout);
|
||||||
|
requestContext.put("javax.xml.ws.client.connectionTimeout", connectTimeout);
|
||||||
|
}
|
||||||
|
|
||||||
|
return service;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -64,38 +64,12 @@
|
||||||
</configuration>
|
</configuration>
|
||||||
</global>
|
</global>
|
||||||
|
|
||||||
<start to="generate_events"/>
|
<start to="index_es"/>
|
||||||
|
|
||||||
<kill name="Kill">
|
<kill name="Kill">
|
||||||
<message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
|
<message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
|
||||||
</kill>
|
</kill>
|
||||||
|
|
||||||
<action name="generate_events">
|
|
||||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
||||||
<master>yarn</master>
|
|
||||||
<mode>cluster</mode>
|
|
||||||
<name>GenerateEventsJob</name>
|
|
||||||
<class>eu.dnetlib.dhp.broker.oa.GenerateEventsJob</class>
|
|
||||||
<jar>dhp-broker-events-${projectVersion}.jar</jar>
|
|
||||||
<spark-opts>
|
|
||||||
--executor-cores=${sparkExecutorCores}
|
|
||||||
--executor-memory=${sparkExecutorMemory}
|
|
||||||
--driver-memory=${sparkDriverMemory}
|
|
||||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
||||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
||||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
||||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
||||||
--conf spark.sql.shuffle.partitions=3840
|
|
||||||
</spark-opts>
|
|
||||||
<arg>--workingPath</arg><arg>${workingPath}</arg>
|
|
||||||
<arg>--datasourceIdWhitelist</arg><arg>${datasourceIdWhitelist}</arg>
|
|
||||||
<arg>--datasourceTypeWhitelist</arg><arg>${datasourceTypeWhitelist}</arg>
|
|
||||||
<arg>--datasourceIdBlacklist</arg><arg>${datasourceIdBlacklist}</arg>
|
|
||||||
</spark>
|
|
||||||
<ok to="index_es"/>
|
|
||||||
<error to="Kill"/>
|
|
||||||
</action>
|
|
||||||
|
|
||||||
<action name="index_es">
|
<action name="index_es">
|
||||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||||
<master>yarn</master>
|
<master>yarn</master>
|
||||||
|
@ -117,30 +91,6 @@
|
||||||
<arg>--index</arg><arg>${esIndexName}</arg>
|
<arg>--index</arg><arg>${esIndexName}</arg>
|
||||||
<arg>--esHost</arg><arg>${esIndexHost}</arg>
|
<arg>--esHost</arg><arg>${esIndexHost}</arg>
|
||||||
</spark>
|
</spark>
|
||||||
<ok to="stats"/>
|
|
||||||
<error to="Kill"/>
|
|
||||||
</action>
|
|
||||||
|
|
||||||
<action name="stats">
|
|
||||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
||||||
<master>yarn</master>
|
|
||||||
<mode>cluster</mode>
|
|
||||||
<name>GenerateStatsJob</name>
|
|
||||||
<class>eu.dnetlib.dhp.broker.oa.GenerateStatsJob</class>
|
|
||||||
<jar>dhp-broker-events-${projectVersion}.jar</jar>
|
|
||||||
<spark-opts>
|
|
||||||
--executor-cores=${sparkExecutorCores}
|
|
||||||
--executor-memory=${sparkExecutorMemory}
|
|
||||||
--driver-memory=${sparkDriverMemory}
|
|
||||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
||||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
||||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
||||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
||||||
--conf spark.sql.shuffle.partitions=3840
|
|
||||||
</spark-opts>
|
|
||||||
<arg>--graphPath</arg><arg>${graphInputPath}</arg>
|
|
||||||
<arg>--workingPath</arg><arg>${workingPath}</arg>
|
|
||||||
</spark>
|
|
||||||
<ok to="End"/>
|
<ok to="End"/>
|
||||||
<error to="Kill"/>
|
<error to="Kill"/>
|
||||||
</action>
|
</action>
|
||||||
|
|
|
@ -5,9 +5,7 @@ import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||||
import static org.junit.jupiter.api.Assertions.assertFalse;
|
import static org.junit.jupiter.api.Assertions.assertFalse;
|
||||||
import static org.junit.jupiter.api.Assertions.assertNotNull;
|
import static org.junit.jupiter.api.Assertions.assertNotNull;
|
||||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||||
import static org.mockito.ArgumentMatchers.anyString;
|
|
||||||
import static org.mockito.Mockito.lenient;
|
import static org.mockito.Mockito.lenient;
|
||||||
import static org.mockito.Mockito.when;
|
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
@ -21,8 +19,9 @@ import org.junit.jupiter.api.extension.ExtendWith;
|
||||||
import org.mockito.Mock;
|
import org.mockito.Mock;
|
||||||
import org.mockito.junit.jupiter.MockitoExtension;
|
import org.mockito.junit.jupiter.MockitoExtension;
|
||||||
|
|
||||||
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
|
|
||||||
import eu.dnetlib.dhp.oa.graph.clean.CleaningFunctionTest;
|
import eu.dnetlib.dhp.oa.graph.clean.CleaningFunctionTest;
|
||||||
import eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils;
|
|
||||||
import eu.dnetlib.dhp.oa.graph.raw.common.VocabularyGroup;
|
import eu.dnetlib.dhp.oa.graph.raw.common.VocabularyGroup;
|
||||||
import eu.dnetlib.dhp.schema.common.ModelConstants;
|
import eu.dnetlib.dhp.schema.common.ModelConstants;
|
||||||
import eu.dnetlib.dhp.schema.oaf.Author;
|
import eu.dnetlib.dhp.schema.oaf.Author;
|
||||||
|
@ -266,6 +265,17 @@ public class MappersTest {
|
||||||
assertTrue(s.getInstance().size() > 0);
|
assertTrue(s.getInstance().size() > 0);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// @Test
|
||||||
|
void testDataset_2() throws IOException {
|
||||||
|
final String xml = IOUtils.toString(getClass().getResourceAsStream("odf_dataset_2.xml"));
|
||||||
|
|
||||||
|
final List<Oaf> list = new OdfToOafMapper(vocs, false).processMdRecord(xml);
|
||||||
|
|
||||||
|
System.out.println("***************");
|
||||||
|
System.out.println(new ObjectMapper().writeValueAsString(list));
|
||||||
|
System.out.println("***************");
|
||||||
|
}
|
||||||
|
|
||||||
private void assertValidId(final String id) {
|
private void assertValidId(final String id) {
|
||||||
assertEquals(49, id.length());
|
assertEquals(49, id.length());
|
||||||
assertEquals('|', id.charAt(2));
|
assertEquals('|', id.charAt(2));
|
||||||
|
|
|
@ -0,0 +1,75 @@
|
||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<oai:record xmlns:dr="http://www.driver-repository.eu/namespace/dr"
|
||||||
|
xmlns:dri="http://www.driver-repository.eu/namespace/dri"
|
||||||
|
xmlns:oaf="http://namespace.openaire.eu/oaf" xmlns:oai="http://www.openarchives.org/OAI/2.0/">
|
||||||
|
<oai:header>
|
||||||
|
<dri:objIdentifier>opentrials__::0000bf8e63d3d7e6b88421eabafae3f6</dri:objIdentifier>
|
||||||
|
<dri:recordIdentifier>feabb67c-1fd1-423b-aec6-606d04ce53c6</dri:recordIdentifier>
|
||||||
|
<dri:dateOfCollection>2019-03-27T15:15:22.22Z</dri:dateOfCollection>
|
||||||
|
<oaf:datasourceprefix>opentrials__</oaf:datasourceprefix>
|
||||||
|
<dr:dateOfTransformation>2019-04-17T16:04:20.586Z</dr:dateOfTransformation>
|
||||||
|
</oai:header>
|
||||||
|
<oai:metadata>
|
||||||
|
<resource xmlns="http://datacite.org/schema/kernel-3"
|
||||||
|
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://datacite.org/schema/kernel-3 http://schema.datacite.org/meta/kernel-3/metadata.xsd">
|
||||||
|
<identifier identifierType="URL">https://clinicaltrials.gov/ct2/show/NCT02321059</identifier>
|
||||||
|
<alternateIdentifiers>
|
||||||
|
<alternateIdentifier alternateIdentifierType="URL">http://apps.who.int/trialsearch/Trial3.aspx?trialid=NCT02321059</alternateIdentifier>
|
||||||
|
<alternateIdentifier alternateIdentifierType="nct">NCT02321059</alternateIdentifier>
|
||||||
|
</alternateIdentifiers>
|
||||||
|
<creators>
|
||||||
|
<creator>
|
||||||
|
<creatorName>Jensen, Kristian K</creatorName>
|
||||||
|
</creator>
|
||||||
|
</creators>
|
||||||
|
<titles>
|
||||||
|
<title>Validation of the Goodstrength System for Assessment of Abdominal Wall Strength in Patients With Incisional Hernia</title>
|
||||||
|
</titles>
|
||||||
|
<publisher>nct</publisher>
|
||||||
|
<geoLocations>
|
||||||
|
<geoLocationPlace>Denmark</geoLocationPlace>
|
||||||
|
</geoLocations>
|
||||||
|
<resourceType resourceTypeGeneral="clinicalTrial">0037</resourceType>
|
||||||
|
<descriptions>
|
||||||
|
<description descriptionType="Abstract">Patients with an incisional hernia in the midline and controls with an intact abdominal wall are examined twice with one week apart, in order to establish the test-retest reliability and internal and external validity of the Goodstrength trunk dynamometer.</description>
|
||||||
|
</descriptions>
|
||||||
|
</resource>
|
||||||
|
<oaf:accessrights>OPEN</oaf:accessrights>
|
||||||
|
<dr:CobjCategory type="dataset">0037</dr:CobjCategory>
|
||||||
|
<oaf:dateAccepted>2014-11-11</oaf:dateAccepted>
|
||||||
|
<oaf:hostedBy id="openaire____::opentrials" name="OpenTrials"/>
|
||||||
|
<oaf:collectedFrom id="openaire____::opentrials" name="OpenTrials"/>
|
||||||
|
<oaf:about>
|
||||||
|
<oaf:datainfo>
|
||||||
|
<oaf:inferred>false</oaf:inferred>
|
||||||
|
<oaf:deletedbyinference>false</oaf:deletedbyinference>
|
||||||
|
<oaf:trust>0.9</oaf:trust>
|
||||||
|
<oaf:inferenceprovenance/>
|
||||||
|
<oaf:provenanceaction
|
||||||
|
classid="sysimport:crosswalk:datasetarchive"
|
||||||
|
classname="sysimport:crosswalk:datasetarchive"
|
||||||
|
schemeid="dnet:provenanceActions" schemename="dnet:provenanceActions"/>
|
||||||
|
</oaf:datainfo>
|
||||||
|
</oaf:about>
|
||||||
|
</oai:metadata>
|
||||||
|
<about xmlns:dc="http://purl.org/dc/elements/1.1/"
|
||||||
|
xmlns:prov="http://www.openarchives.org/OAI/2.0/provenance" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
|
||||||
|
<provenance xmlns="http://www.openarchives.org/OAI/2.0/provenance" xsi:schemaLocation="http://www.openarchives.org/OAI/2.0/provenance http://www.openarchives.org/OAI/2.0/provenance.xsd">
|
||||||
|
<originDescription altered="true" harvestDate="2019-03-27T15:15:22.22Z">
|
||||||
|
<baseURL>file:///var/lib/dnet/data/opentrials/opentrials.csv</baseURL>
|
||||||
|
<identifier/>
|
||||||
|
<datestamp/>
|
||||||
|
<metadataNamespace/>
|
||||||
|
</originDescription>
|
||||||
|
</provenance>
|
||||||
|
<oaf:datainfo>
|
||||||
|
<oaf:inferred>false</oaf:inferred>
|
||||||
|
<oaf:deletedbyinference>false</oaf:deletedbyinference>
|
||||||
|
<oaf:trust>0.9</oaf:trust>
|
||||||
|
<oaf:inferenceprovenance/>
|
||||||
|
<oaf:provenanceaction classid="sysimport:crosswalk:datasetarchive"
|
||||||
|
classname="sysimport:crosswalk:datasetarchive"
|
||||||
|
schemeid="dnet:provenanceActions" schemename="dnet:provenanceActions"/>
|
||||||
|
</oaf:datainfo>
|
||||||
|
</about>
|
||||||
|
</oai:record>
|
Loading…
Reference in New Issue