added other check on parsing json from rdf statements

This commit is contained in:
Enrico Ottonello 2020-07-24 00:32:49 +02:00
parent 5eaa655f01
commit eb61aebcd5
3 changed files with 15 additions and 5 deletions

View File

@ -100,7 +100,13 @@ public class RunSPARQLQueryService {
if (isCollection) { if (isCollection) {
parser.setCollection(true); parser.setCollection(true);
} }
parser.parse(recordWriter.toString()); String bufferedRecord = recordWriter.toString();
log.debug("before json parser :: "+bufferedRecord);
int size = parser.parse(bufferedRecord);
log.debug("json elements: "+size);
if (size==-1) {
return -4;
}
resourceManager.manage(parser); resourceManager.manage(parser);
return bulkUpload.index(resourceManager, isCollection); return bulkUpload.index(resourceManager, isCollection);
} catch(Exception e){ } catch(Exception e){
@ -199,7 +205,7 @@ public class RunSPARQLQueryService {
String jsonRecord = null; String jsonRecord = null;
List<String> results = new ArrayList<>(); List<String> results = new ArrayList<>();
try { try {
log.debug("Started at: "+Calendar.getInstance().getTime().toString()); // log.debug("Started at: "+Calendar.getInstance().getTime().toString());
TupleQuery selectQuery = connection.prepareTupleQuery(QueryLanguage.SPARQL, query); TupleQuery selectQuery = connection.prepareTupleQuery(QueryLanguage.SPARQL, query);
TupleQueryResult selectQueryResult = selectQuery.evaluate(); TupleQueryResult selectQueryResult = selectQuery.evaluate();
int counter = 0; int counter = 0;
@ -210,7 +216,7 @@ public class RunSPARQLQueryService {
counter++; counter++;
} }
log.debug("Total records retrieved: "+counter); log.debug("Total records retrieved: "+counter);
log.debug("Finished at: "+Calendar.getInstance().getTime().toString()); // log.debug("Finished at: "+Calendar.getInstance().getTime().toString());
} catch(Exception e){ } catch(Exception e){
e.printStackTrace(); e.printStackTrace();
} finally{ } finally{

View File

@ -51,13 +51,17 @@ public class ParseRDFJSON {
} }
public void parse(String json) throws ParseException { public int parse(String json) throws ParseException {
setJson(json); setJson(json);
fillMap(); fillMap();
DocumentContext jsonContext = JsonPath.parse(json); DocumentContext jsonContext = JsonPath.parse(json);
JSONArray entries = jsonContext.read(getCatalogEntryJsonPath()); JSONArray entries = jsonContext.read(getCatalogEntryJsonPath());
int size = entries.size(); int size = entries.size();
if (size==0) {
return -1;
}
it = entries.iterator(); it = entries.iterator();
return size;
} }
public boolean hasNextElement(){ public boolean hasNextElement(){

View File

@ -57,7 +57,7 @@ public class GraphDbReaderAndESIndexTest {
final ClassPathResource queryTemplateResource; final ClassPathResource queryTemplateResource;
boolean testRecord = true; boolean testRecord = true;
if (testRecord) { if (testRecord) {
recordId = "https://ariadne-infrastructure.eu/aocat/Resource/10DB6647-5D40-397A-8434-8CED55B2FC29"; recordId = "https://ariadne-infrastructure.eu/aocat/Resource/FE3155A7-AF9F-3C5F-A92E-93041EF495E0";
queryTemplateResource = new ClassPathResource("eu/dnetlib/ariadneplus/sparql/read_record_data_template.sparql"); queryTemplateResource = new ClassPathResource("eu/dnetlib/ariadneplus/sparql/read_record_data_template.sparql");
} }
else { else {