foreach published api, the datasource name is stored into graph datasourceApis

This commit is contained in:
Enrico Ottonello 2020-01-24 10:52:04 +01:00
parent d3b4e6c864
commit 17552bafa0
6 changed files with 33 additions and 16 deletions

View File

@ -196,7 +196,7 @@ public class GraphDBClient {
}
public long feedProvenance(final String datasourceApi) throws AriadnePlusPublisherException {
public long feedProvenance(final String datasource, final String datasourceApi) throws AriadnePlusPublisherException {
try {
log.debug("init connection to graphDBServerUrl " + this.graphDBServerUrl);
@ -209,13 +209,14 @@ public class GraphDBClient {
IRI IS_API_OF = factory.createIRI(PROVENANCE_NS, "isApiOf");
IRI INSERTED_IN_DATE = factory.createIRI(PROVENANCE_NS, "insertedInDate");
IRI rApi = factory.createIRI(getGraphDBBaseURI(), datasourceApi);
Statement stmApi = factory.createStatement(rApi, IS_API_OF, factory.createLiteral(datasourceApi));
Statement stmApi = factory.createStatement(rApi, IS_API_OF, factory.createLiteral(datasource));
LocalDateTime now = LocalDateTime.now();
Statement stmInsertedDate = factory.createStatement(rApi, INSERTED_IN_DATE, factory.createLiteral(now.toString()));
IRI datasourceApisGraph = factory.createIRI(getGraphDBBaseURI(), "datasourceApis");
try (RepositoryConnection con = repository.getConnection()) {
con.begin();
log.debug("Adding stmt " + stmApi.toString() + " into graph " + datasourceApisGraph.toString());
con.remove(rApi, INSERTED_IN_DATE, null, datasourceApisGraph);
con.add(stmApi, datasourceApisGraph);
log.debug("Adding stmt " + stmInsertedDate.toString() + " into graph " + datasourceApisGraph.toString());
con.add(stmInsertedDate, datasourceApisGraph);

View File

@ -39,11 +39,11 @@ public class AriadnePlusPublisherController {
}
@RequestMapping(value = "/feedProvenance", method = RequestMethod.POST)
public void feedProvenance(@RequestParam final String datasourceApi, @RequestParam(required = false) String ariadneplusTarget) throws AriadnePlusPublisherException {
public void feedProvenance(@RequestParam final String datasource, @RequestParam final String datasourceApi, @RequestParam(required = false) String ariadneplusTarget) throws AriadnePlusPublisherException {
if (ariadneplusTarget==null) {
ariadneplusTarget = DEFAULT_TARGET_ENDPOINT;
}
getAriadnePlusPublisherHelper().feedProvenance(datasourceApi, getTarget(ariadneplusTarget));
getAriadnePlusPublisherHelper().feedProvenance(datasource, datasourceApi, getTarget(ariadneplusTarget));
}
@RequestMapping(value = "/dropDatasourceApisPartitionInfo", method = RequestMethod.POST)

View File

@ -35,10 +35,10 @@ public class AriadnePlusPublisherHelper {
}
public void feedProvenance(final String datasourceApi, final AriadnePlusTargets target) throws AriadnePlusPublisherException {
public void feedProvenance(final String datasource, final String datasourceApi, final AriadnePlusTargets target) throws AriadnePlusPublisherException {
switch(target){
case GRAPHDB:
feedProvenance(datasourceApi);
feedProvenance(datasource, datasourceApi);
break;
default: throw new AriadnePlusPublisherException("Target "+target+" not supported yet");
}
@ -72,10 +72,10 @@ public class AriadnePlusPublisherHelper {
graphDBClient.feed(record);
}
private void feedProvenance(final String datasourceApi) throws AriadnePlusPublisherException {
log.debug("Feed Provenance " + datasourceApi);
private void feedProvenance(final String datasource, final String datasourceApi) throws AriadnePlusPublisherException {
log.debug("Feed Provenance " + datasource + " - " + datasourceApi);
GraphDBClient graphDBClient = this.graphdbClientFactory.getGraphDBClient();
graphDBClient.feedProvenance(datasourceApi);
graphDBClient.feedProvenance(datasource, datasourceApi);
}
private void dropDatasourceApisPartitionInfo(final String datasourceApi) throws AriadnePlusPublisherException {
@ -87,8 +87,7 @@ public class AriadnePlusPublisherHelper {
private long unpublishGraphDB(final String datasourceInterface) {
log.info("Unpublishing from graphdb "+datasourceInterface);
GraphDBClient graphDBClient = this.graphdbClientFactory.getGraphDBClient();
long deletedTriples = graphDBClient.drop(datasourceInterface);
log.info("# triples deleted for "+datasourceInterface+": "+deletedTriples);
return deletedTriples;
return 0;
}
}

View File

@ -46,6 +46,7 @@ public class PublishGraphDBJobNode extends AsyncJobNode {
private String publisherEndpoint;
private String datasourceInterface;
private String datasource;
//for parallel requests to the publisher endpoint
private int nThreads = 5;
@ -178,6 +179,9 @@ public class PublishGraphDBJobNode extends AsyncJobNode {
String datasourceInterfaceValue = getDatasourceInterface();
log.info("feeding provenance for datasourceInterface " + datasourceInterfaceValue);
params.add(new BasicNameValuePair("datasourceApi", datasourceInterfaceValue));
String datasourceValue = getDatasource();
log.info("feeding provenance for datasource " + datasourceValue);
params.add(new BasicNameValuePair("datasource", datasourceValue));
UrlEncodedFormEntity ent = new UrlEncodedFormEntity(params, "UTF-8");
post.setEntity(ent);
responsePFPOST = client.execute(post);
@ -256,4 +260,14 @@ public class PublishGraphDBJobNode extends AsyncJobNode {
token.setProgressProvider(new ResultsetProgressProvider(token.getEnv().getAttribute(getEprParam(), ResultSet.class), this.resultSetClient));
}
public String getDatasource() {
return datasource;
}
public void setDatasource(String datasource) {
this.datasource = datasource;
}
}

View File

@ -105,9 +105,10 @@
<PARAM name="wfTemplateId" value="7426eaaf-93c9-4914-b69a-c9d5c478405a_V29ya2Zsb3dUZW1wbGF0ZURTUmVzb3VyY2VzL1dvcmtmbG93VGVtcGxhdGVEU1Jlc291cmNlVHlwZQ=="/>
<PARAM name="wfTemplateParams">
<MAP>
<ENTRY key="dsId" value="d52b32f0-2b19-4bef-930d-470ef044def5_UmVwb3NpdG9yeVNlcnZpY2VSZXNvdXJjZXMvUmVwb3NpdG9yeVNlcnZpY2VSZXNvdXJjZVR5cGU="/>
<ENTRY key="interface" value="api_________::ariadne_plus::ads::1"/>
<ENTRY key="cleanMdstoreId" ref="cleanMdstoreId"/>
<ENTRY key="dsId" value="$dsId$" />
<ENTRY key="dsName" value="$dsName$" />
<ENTRY key="interface" value="$interface$" />
<ENTRY key="cleanMdstoreId" ref="cleanMdstoreId"/>
<ENTRY key="publisherEndpoint" ref="publisherEndpoint" />
</MAP>
</PARAM>

View File

@ -9,10 +9,11 @@
<BODY>
<CONFIGURATION>
<PARAMETERS>
<PARAM description="Datasource Name" name="dsName" required="true" type="string"/>
<PARAM description="Datasource Id" name="dsId" required="true" type="string"/>
<PARAM description="Datasource Interface" name="interface" required="true" type="string"/>
<PARAM description="Store for transformed records" name="cleanMdstoreId" required="true" type="string"/>
<PARAM name="publisherEndpoint" description="AriadnePlus Publisher Endpoint" required="true" managedBy="user" type="string">http://localhost:8080/ariadneplus/publish</PARAM>
<PARAM name="publisherEndpoint" description="AriadnePlus Publisher Endpoint" required="true" type="string">http://localhost:8080/ariadneplus/publish</PARAM>
</PARAMETERS>
<WORKFLOW>
<NODE isStart="true" name="fetchMdStore" type="FetchMDStoreRecords">
@ -31,6 +32,7 @@
<PARAM name="eprParam" value="clean_epr"/>
<PARAM name="publisherEndpoint" ref="publisherEndpoint"/>
<PARAM name="datasourceInterface" ref="interface"/>
<PARAM name="datasource" ref="dsName"/>
</PARAMETERS>
<ARCS>
<ARC to="success"/>