modified log level for a few message

This commit is contained in:
Enrico Ottonello 2021-10-22 12:32:04 +02:00
parent 4a4da01d6b
commit 0010458a23
2 changed files with 15 additions and 10 deletions

View File

@ -348,7 +348,7 @@ public class GraphDBClient {
String recordsIndexReport = ""; String recordsIndexReport = "";
String collectionIndexReport = ""; String collectionIndexReport = "";
try { try {
log.info("Start indexing from "+ datasource + " " + collectionId); log.debug("Start indexing from "+ datasource + " " + collectionId);
runSPQRLQuery.setupConnection( getWriterUser(), getWriterPwd(), this.graphDBServerUrl, getRepository()); runSPQRLQuery.setupConnection( getWriterUser(), getWriterPwd(), this.graphDBServerUrl, getRepository());
runSPQRLQuery.setParser(parseRDFJSON); runSPQRLQuery.setParser(parseRDFJSON);
runSPQRLQuery.setResourceManager(resourceManager); runSPQRLQuery.setResourceManager(resourceManager);
@ -409,7 +409,7 @@ public class GraphDBClient {
return "empty identifier"; return "empty identifier";
} }
List <String> identifiers = Arrays.asList(identifier); List <String> identifiers = Arrays.asList(identifier);
log.info("Start indexing from "+ datasource + " " + collectionId); log.info("Indexing "+ datasource + " " + collectionId + " " + identifier);
runSPQRLQuery.setupConnection( getWriterUser(), getWriterPwd(), this.graphDBServerUrl, getRepository()); runSPQRLQuery.setupConnection( getWriterUser(), getWriterPwd(), this.graphDBServerUrl, getRepository());
runSPQRLQuery.setParser(parseRDFJSON); runSPQRLQuery.setParser(parseRDFJSON);
runSPQRLQuery.setResourceManager(resourceManager); runSPQRLQuery.setResourceManager(resourceManager);

View File

@ -77,7 +77,7 @@ public class RunSPARQLQueryService {
recordIds.forEach(recordId -> { recordIds.forEach(recordId -> {
int waitAmount=1; int waitAmount=1;
int retryResult = 0; int retryResult = 0;
int operationResult = executeQueryGraph(selectQueryTemplate, recordId, isCollection); int operationResult = executeQueryGraph(selectQueryTemplate, datasource, collectionId, recordId, isCollection);
if (operationResult!=200) { if (operationResult!=200) {
log.error(recordId + " error_code: "+ operationResult); log.error(recordId + " error_code: "+ operationResult);
if (operationResult==-5) { if (operationResult==-5) {
@ -90,7 +90,7 @@ public class RunSPARQLQueryService {
} catch (InterruptedException ie) { } catch (InterruptedException ie) {
log.error(ie); log.error(ie);
} }
retryResult = executeQueryGraph(selectQueryTemplate, recordId, isCollection); retryResult = executeQueryGraph(selectQueryTemplate, datasource, collectionId, recordId, isCollection);
log.debug("retryResult: " + retryResult); log.debug("retryResult: " + retryResult);
} while (retryResult!=200); } while (retryResult!=200);
operationResult = retryResult; operationResult = retryResult;
@ -125,11 +125,11 @@ public class RunSPARQLQueryService {
String report = "Total indexed records: "+ successCodesCount.get(0).intValue() + String report = "Total indexed records: "+ successCodesCount.get(0).intValue() +
" , " + "Total errors: "+ errorCodesCount.get(0).intValue(); " , " + "Total errors: "+ errorCodesCount.get(0).intValue();
log.info(report); log.debug(report);
return report; return report;
} }
private int executeQueryGraph(String selectQueryTemplate, String recordId, boolean isCollection){ private int executeQueryGraph(String selectQueryTemplate, String datasource, String collectionId, String recordId, boolean isCollection){
// decrease queries execution rate to avoid heap overload on graphdb // decrease queries execution rate to avoid heap overload on graphdb
try { try {
Thread.sleep(50); Thread.sleep(50);
@ -149,6 +149,7 @@ public class RunSPARQLQueryService {
long end = System.currentTimeMillis(); long end = System.currentTimeMillis();
int triples = resultsModel.size(); int triples = resultsModel.size();
if (resultsModel.size()==0) { if (resultsModel.size()==0) {
log.error("No data found on graphdb for "+datasource+" "+collectionId+" "+recordId);
return -2; return -2;
} }
recordWriter = new StringWriter(); recordWriter = new StringWriter();
@ -156,23 +157,27 @@ public class RunSPARQLQueryService {
Rio.write(resultsModel, rdfRecordWriter); Rio.write(resultsModel, rdfRecordWriter);
parser.setCollection(isCollection); parser.setCollection(isCollection);
String bufferedRecord = recordWriter.toString(); String bufferedRecord = recordWriter.toString();
// log.debug(bufferedRecord);
int size = parser.parse(bufferedRecord); int size = parser.parse(bufferedRecord);
log.debug(recordId+" is_coll: "+isCollection+" query_time(sec): "+(end-start)/1000 +" triples: "+triples +" json: "+size); log.debug(recordId+" is_coll: "+isCollection+" query_time(sec): "+(end-start)/1000 +" triples: "+triples +" json: "+size);
if (size==-1) { if (size==-1) {
log.error("RDF parsing failed for "+datasource+" "+collectionId+" "+recordId);
return -4; return -4;
} }
resourceManager.manage(parser); resourceManager.manage(parser);
return bulkUpload.index(resourceManager, isCollection); int ret = bulkUpload.index(resourceManager, isCollection);
if (ret!=200) {
log.error("Indexing error for "+datasource+" "+collectionId+" "+recordId);
}
return ret;
} catch (HTTPQueryEvaluationException qe) { } catch (HTTPQueryEvaluationException qe) {
log.error(qe); log.error("HTTPQueryEvaluationException for "+datasource+" "+collectionId+" "+recordId, qe);
if (qe.getMessage()!=null && if (qe.getMessage()!=null &&
qe.getMessage().contains("Insufficient free Heap Memory")) { qe.getMessage().contains("Insufficient free Heap Memory")) {
return -5; return -5;
} }
return -6; return -6;
} catch(Exception e){ } catch(Exception e){
log.error(e); log.error("Generic error for "+datasource+" "+collectionId+" "+recordId, e);
return -1; return -1;
} finally{ } finally{
closeConnection(); closeConnection();