removed a few logs
This commit is contained in:
parent
11e00c806d
commit
f61f794c25
dnet-ariadneplus-graphdb-publisher/src/main/java/eu/dnetlib/ariadneplus
elasticsearch
graphdb
reader
|
@ -125,11 +125,11 @@ public class BulkUpload {
|
|||
request.add(new IndexRequest(elasticSearchIndexName).id(idES)
|
||||
.source(ace.toJson(),XContentType.JSON));
|
||||
|
||||
log.info("Indexing: "+idES+" :: "+ace.toJson());
|
||||
// log.info("Indexing: "+idES+" :: "+ace.toJson());
|
||||
BulkResponse bulkResponse = client.bulk(request, RequestOptions.DEFAULT);
|
||||
if (bulkResponse!=null) {
|
||||
esResponseCode = bulkResponse.status().getStatus();
|
||||
log.info("Indexing to ES completed with status: " + bulkResponse.status());
|
||||
// log.info("Indexing to ES completed with status: " + bulkResponse.status());
|
||||
if (bulkResponse.hasFailures()) {
|
||||
log.error("FailureMessage: " + bulkResponse.buildFailureMessage());
|
||||
}
|
||||
|
|
|
@ -91,10 +91,10 @@ public class GraphDBClient {
|
|||
try (RepositoryConnection con = repository.getConnection()) {
|
||||
con.begin();
|
||||
String recordURI = getRecordURI(objIdentifier, datasourceApi);
|
||||
log.debug("Trying to adding record with recordURI " + recordURI + " into graph " + graph);
|
||||
// log.debug("Trying to adding record with recordURI " + recordURI + " into graph " + graph);
|
||||
con.add(IOUtils.toInputStream(getRDFBlock(record), "UTF-8"), recordURI, RDFFormat.RDFXML, graph);
|
||||
con.commit();
|
||||
log.debug("statement added");
|
||||
// log.debug("statement added");
|
||||
con.close();
|
||||
}
|
||||
catch (RDF4JException e) {
|
||||
|
@ -126,13 +126,13 @@ public class GraphDBClient {
|
|||
IRI datasourceApisGraph = factory.createIRI(getGraphDBBaseURI(), "datasourceApis");
|
||||
try (RepositoryConnection con = repository.getConnection()) {
|
||||
con.begin();
|
||||
log.debug("Adding stmt " + stmApi.toString() + " into graph " + datasourceApisGraph.toString());
|
||||
// log.debug("Adding stmt " + stmApi.toString() + " into graph " + datasourceApisGraph.toString());
|
||||
con.remove(rApi, INSERTED_IN_DATE, null, datasourceApisGraph);
|
||||
con.add(stmApi, datasourceApisGraph);
|
||||
log.debug("Adding stmt " + stmInsertedDate.toString() + " into graph " + datasourceApisGraph.toString());
|
||||
// log.debug("Adding stmt " + stmInsertedDate.toString() + " into graph " + datasourceApisGraph.toString());
|
||||
con.add(stmInsertedDate, datasourceApisGraph);
|
||||
con.commit();
|
||||
log.debug("statements added");
|
||||
// log.debug("statements added");
|
||||
con.close();
|
||||
}
|
||||
catch (RDF4JException e) {
|
||||
|
@ -335,12 +335,11 @@ public class GraphDBClient {
|
|||
|
||||
public String indexOnES(String datasource, String collectionId) throws AriadnePlusPublisherException {
|
||||
try {
|
||||
log.info("Start indexing from "+ datasource + " " + collectionId);
|
||||
runSPQRLQuery.setupConnection( getWriterUser(), getWriterPwd(), this.graphDBServerUrl, getRepository());
|
||||
runSPQRLQuery.setParser(parseRDFJSON);
|
||||
runSPQRLQuery.setResourceManager(resourceManager);
|
||||
runSPQRLQuery.setBulkUpload(bulkUpload);
|
||||
// String recordId = "https://ariadne-infrastructure.eu/aocat/Resource/02E4F4B5-24B7-3AD7-B460-CFA8B1F0BD1F";
|
||||
// List<String> recordIds = Arrays.asList(recordId);
|
||||
List<String> recordIds = runSPQRLQuery.selectRecordIds(datasource, collectionId);
|
||||
final ClassPathResource queryTemplateResource = new ClassPathResource("eu/dnetlib/ariadneplus/sparql/read_record_data_template.sparql");
|
||||
String queryTemplate = IOUtils.toString(queryTemplateResource.getInputStream(), StandardCharsets.UTF_8.name());
|
||||
|
|
|
@ -16,6 +16,7 @@ import org.springframework.stereotype.Service;
|
|||
|
||||
import java.io.StringWriter;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Calendar;
|
||||
import java.util.List;
|
||||
|
||||
|
@ -64,11 +65,26 @@ public class RunSPARQLQueryService {
|
|||
return null;
|
||||
final String selectQueryTemplate = queryTemplate.replaceAll("%datasource", datasource).replaceAll("%collectionId", collectionId);
|
||||
log.info("Start indexing "+ recordIds.size()+ " records ...");
|
||||
final List<Integer> errorCodesCount = Arrays.asList(new Integer(0));
|
||||
final List<Integer> successCodesCount = Arrays.asList(new Integer(0));
|
||||
recordIds.forEach(recordId -> {
|
||||
int operationResult = executeQueryGraph(selectQueryTemplate, recordId, isCollection);
|
||||
log.info("Indexing final code: "+Integer.toString(operationResult));
|
||||
if (operationResult!=200) {
|
||||
log.error(recordId + " error_code: "+ Integer.toString(operationResult));
|
||||
int currentErrorsCount = errorCodesCount.get(0).intValue();
|
||||
currentErrorsCount+=1;
|
||||
errorCodesCount.set(0, new Integer(currentErrorsCount));
|
||||
}
|
||||
else {
|
||||
int currentSuccessCount = successCodesCount.get(0).intValue();
|
||||
currentSuccessCount+=1;
|
||||
successCodesCount.set(0, new Integer(currentSuccessCount));
|
||||
}
|
||||
});
|
||||
return "ok";
|
||||
String report = "Total indexed records: "+ successCodesCount.get(0).intValue() +
|
||||
" , " + "Total errors: "+ errorCodesCount.get(0).intValue();
|
||||
log.info(report);
|
||||
return report;
|
||||
}
|
||||
|
||||
private int executeQueryGraph(String selectQueryTemplate, String recordId, boolean isCollection){
|
||||
|
@ -78,7 +94,7 @@ public class RunSPARQLQueryService {
|
|||
} catch (InterruptedException e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
log.info("Retrieving "+recordId+" - isCollection:"+isCollection );
|
||||
// log.info("Retrieving "+recordId+" - isCollection:"+isCollection );
|
||||
String query = selectQueryTemplate.replaceAll("%record", "<"+recordId+">");
|
||||
openConnection();
|
||||
StringWriter recordWriter = null;
|
||||
|
@ -90,7 +106,7 @@ public class RunSPARQLQueryService {
|
|||
resultsModel = QueryResults.asModel(graphQueryResult);
|
||||
graphQueryResult.close();
|
||||
// log.debug("Finished at: "+Calendar.getInstance().getTime().toString());
|
||||
log.info("Statements retrieved: " + resultsModel.size());
|
||||
// log.info("Statements retrieved: " + resultsModel.size());
|
||||
if (resultsModel.size()==0) {
|
||||
return -2;
|
||||
}
|
||||
|
@ -105,7 +121,7 @@ public class RunSPARQLQueryService {
|
|||
}
|
||||
String bufferedRecord = recordWriter.toString();
|
||||
int size = parser.parse(bufferedRecord);
|
||||
log.debug("json elements: "+size);
|
||||
// log.debug("json elements: "+size);
|
||||
if (size==-1) {
|
||||
return -4;
|
||||
}
|
||||
|
|
|
@ -55,8 +55,8 @@ public class ParseRDFJSON {
|
|||
setJson(json);
|
||||
fillMap();
|
||||
DocumentContext jsonContext = JsonPath.parse(json);
|
||||
log.debug("jsonPath: "+getCatalogEntryJsonPath());
|
||||
log.debug("json from jsonContext: "+json);
|
||||
// log.debug("jsonPath: "+getCatalogEntryJsonPath());
|
||||
// log.debug("json from jsonContext: "+json);
|
||||
JSONArray entries = jsonContext.read(getCatalogEntryJsonPath());
|
||||
int size = entries.size();
|
||||
if (size==0) {
|
||||
|
|
Loading…
Reference in New Issue