added rest method indexOnES(datasource,collectionId) to retrieve records data from graphDB and index to ES

This commit is contained in:
Enrico Ottonello 2020-06-12 18:14:41 +02:00
parent 064f298329
commit 06995feefa
12 changed files with 163 additions and 167 deletions

View File

@ -1,13 +1,10 @@
package eu.dnetlib.ariadneplus.elasticsearch;
import java.io.IOException;
import java.lang.reflect.InvocationTargetException;
import javax.annotation.PostConstruct;
import eu.dnetlib.ariadneplus.elasticsearch.model.AgentInfo;
import eu.dnetlib.ariadneplus.elasticsearch.model.AriadneCatalogEntry;
import eu.dnetlib.ariadneplus.reader.ResourceManager;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.http.HttpHost;
import org.elasticsearch.action.bulk.BulkRequest;
import org.elasticsearch.action.bulk.BulkResponse;
@ -19,11 +16,20 @@ import org.elasticsearch.common.xcontent.XContentType;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
import javax.annotation.PostConstruct;
import java.io.IOException;
import java.lang.reflect.InvocationTargetException;
@Service
public class BulkUpload {
@Value("${elasticsearch.url:localhost:9200}")
private String elasticsearchUrl;
private static final Log log = LogFactory.getLog(BulkUpload.class);
@Value("${elasticsearch.hostname}")
private String elasticSearchHostName;
@Value("${elasticsearch.indexname}")
private String elasticSearchIndexName;
private RestHighLevelClient client;
@ -31,7 +37,7 @@ public class BulkUpload {
public void init() throws IOException {
client = new RestHighLevelClient(
RestClient.builder(
new HttpHost("elastic-test.ariadne.d4science.org",9200,"http")));
new HttpHost(elasticSearchHostName,9200,"http")));
}
@ -46,13 +52,13 @@ public class BulkUpload {
testPublisher.setName("TEST");
ace.getPublisher().add(testPublisher);
String[] splits = ace.getIdentifier().split("/");
request.add(new IndexRequest("catalog_test").id(splits[splits.length-1])
request.add(new IndexRequest(elasticSearchIndexName).id(splits[splits.length-1])
.source(ace.toJson(),XContentType.JSON));
System.out.println("indexing to ES record "+ace.toJson());
log.debug("Indexing to ES: "+ace.toJson());
BulkResponse bulkResponse = client.bulk(request, RequestOptions.DEFAULT);
System.out.println("indexing to ES record completed "+bulkResponse.status());
log.info("Indexing to ES completed with status: "+bulkResponse.status());
if (bulkResponse.hasFailures()) {
System.out.println("FailureMessage: "+bulkResponse.buildFailureMessage());
log.error("FailureMessage: "+bulkResponse.buildFailureMessage());
}
} catch (ClassNotFoundException e) {

View File

@ -358,103 +358,4 @@ public class AriadneCatalogEntry {
public String toJson(){
return new Gson().toJson(this);
}
// public static AriadneCatalogEntry fromRDFJson(JsonElement json, String identifier, Map<String, JsonElement> map){
// AriadneCatalogEntry acim = new AriadneCatalogEntry();
// acim.setIdentifier(identifier.substring(identifier.lastIndexOf("/") + 1));
// JsonObject content = json.getAsJsonObject();
// for (Map.Entry<String, JsonElement> stringJsonElementEntry : content.entrySet()) {
// switch (stringJsonElementEntry.getKey()){
// case "https://www.ariadne-infrastructure.eu/property/accessPolicy":
// acim.setAccessPolicy(stringJsonElementEntry.getValue().getAsJsonArray().get(0).getAsJsonObject().get("value").getAsString());
// break;
// case "https://www.ariadne-infrastructure.eu/property/accessRights":
// acim.setAccessRights(stringJsonElementEntry.getValue().getAsJsonArray().get(0).getAsJsonObject().get("value").getAsString());
// break;
// case "https://www.ariadne-infrastructure.eu/property/contributor":
// JsonArray contributor_array = stringJsonElementEntry.getValue().getAsJsonArray();
// List<AgentInfo> contributor_list = new ArrayList();
// for (int i = 0; i < contributor_array.size() ; i++ ){
// String map_key = contributor_array.get(i).getAsJsonObject().get("value").getAsString();
// contributor_list.add(AgentInfo.fromRDFJson(map.get(map_key)));
//
// }
// acim.setContributor(contributor_list);
// break;
// case "https://www.ariadne-infrastructure.eu/property/description":
// acim.setDescription(stringJsonElementEntry.getValue().getAsJsonArray().get(0).getAsJsonObject().get("value").getAsString());
// break;
// case "https://www.ariadne-infrastructure.eu/property/isPartOf":
// acim.setPartOf(stringJsonElementEntry.getValue().getAsJsonArray().get(0).getAsJsonObject().get("value").getAsString());
// break;
// case "https://www.ariadne-infrastructure.eu/property/issued":
// acim.setIssued(stringJsonElementEntry.getValue().getAsJsonArray().get(0).getAsJsonObject().get("value").getAsString());
// break;
// case "https://www.ariadne-infrastructure.eu/property/landingPage":
// acim.setLandingPage(stringJsonElementEntry.getValue().getAsJsonArray().get(0).getAsJsonObject().get("value").getAsString());
// break;
// case "https://www.ariadne-infrastructure.eu/property/language":
// acim.setLanguage(stringJsonElementEntry.getValue().getAsJsonArray().get(0).getAsJsonObject().get("value").getAsString());
// break;
// case "https://www.ariadne-infrastructure.eu/property/modified":
// acim.setModified(stringJsonElementEntry.getValue().getAsJsonArray().get(0).getAsJsonObject().get("value").getAsString());
// break;
// case "https://www.ariadne-infrastructure.eu/property/nativeSubject":
// JsonArray nativeSubject_array = stringJsonElementEntry.getValue().getAsJsonArray();
// List<NativeSubject> nativeSubject_list = new ArrayList();
// for (int i = 0; i < nativeSubject_array.size() ; i++ ){
// String map_key = nativeSubject_array.get(i).getAsJsonObject().get("value").getAsString();
// nativeSubject_list.add(NativeSubject.fromRDFJson(map.get(map_key)));
//
// }
// acim.setNativeSubject(nativeSubject_list);
// break;
// case "https://www.ariadne-infrastructure.eu/property/originalId":
// acim.setOriginalId(stringJsonElementEntry.getValue().getAsJsonArray().get(0).getAsJsonObject().get("value").getAsString());
// break;
// case "https://www.ariadne-infrastructure.eu/property/resourceType":
// acim.setResourceType(stringJsonElementEntry.getValue().getAsJsonArray().get(0).getAsJsonObject().get("value").getAsString());
// break;
// case "https://www.ariadne-infrastructure.eu/property/spatial":
// JsonArray spatial_array = stringJsonElementEntry.getValue().getAsJsonArray();
// List<Spatial> spatial_list = new ArrayList();
// for (int i = 0; i < spatial_array.size() ; i++ ){
// String map_key = spatial_array.get(i).getAsJsonObject().get("value").getAsString();
// spatial_list.add(Spatial.fromRDFJson(map.get(map_key), map));
//
// }
// acim.setSpatial(spatial_list);
// break;
// case "https://www.ariadne-infrastructure.eu/property/temporal":
// JsonArray temporal_array = stringJsonElementEntry.getValue().getAsJsonArray();
// List<AriadneTemporal> temporal_list = new ArrayList<>();
// for(int i=0; i < temporal_array.size(); i++){
// String map_key = temporal_array.get(i).getAsJsonObject().get("value").getAsString();
// temporal_list.add(AriadneTemporal.fromRDFJson(map.get(map_key)));
// }
// acim.setTemporal(temporal_list);
// break;
// case "https://www.ariadne-infrastructure.eu/property/title":
// acim.setTitle(stringJsonElementEntry.getValue().getAsJsonArray().get(0).getAsJsonObject().get("value").getAsString());
// break;
// case "https://www.ariadne-infrastructure.eu/property/publisher":
// JsonArray publisher_array = stringJsonElementEntry.getValue().getAsJsonArray();
// List<AgentInfo> publisher_list = new ArrayList();
// for (int i = 0; i < publisher_array.size() ; i++ ){
// String map_key = publisher_array.get(i).getAsJsonObject().get("value").getAsString();
// publisher_list.add(AgentInfo.fromRDFJson(map.get(map_key)));
//
// }
// acim.setPublisher(publisher_list);
// break;
// case "https://www.ariadne-infrastructure.eu/property/archeologicalResourceType":
// acim.setArcheologicalResourceType(ArcheologicalResourceType.fromRDFJson(map.get(stringJsonElementEntry.getValue().getAsJsonArray().get(0).getAsJsonObject().get("value").getAsString())));
// }
//
//
// }
//
// System.out.println(acim.toJson());
// return acim;
// }
}

View File

@ -2,8 +2,16 @@ package eu.dnetlib.ariadneplus.graphdb;
import java.io.File;
import java.net.URL;
import java.nio.charset.StandardCharsets;
import java.time.LocalDateTime;
import java.util.Arrays;
import java.util.List;
import java.util.Properties;
import eu.dnetlib.ariadneplus.elasticsearch.BulkUpload;
import eu.dnetlib.ariadneplus.reader.ResourceManager;
import eu.dnetlib.ariadneplus.reader.RunSPARQLQueryService;
import eu.dnetlib.ariadneplus.reader.json.ParseRDFJSON;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang3.StringUtils;
@ -23,19 +31,32 @@ import org.eclipse.rdf4j.rio.RDFFormat;
import eu.dnetlib.ariadneplus.publisher.AriadnePlusPublisherException;
import eu.dnetlib.ariadneplus.rdf.RecordParserHelper;
import net.sf.saxon.s9api.SaxonApiException;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.core.io.ClassPathResource;
import org.springframework.stereotype.Component;
/**
* @author enrico.ottonello
*
*/
@Component
public class GraphDBClient {
private static final Log log = LogFactory.getLog(GraphDBClient.class);
public static final String PROVENANCE_NS = "http://www.d-net.research-infrastructures.eu/provenance/";
public static final int NUM_RECORDS_THRESHOLD = 10;
public static final int NUM_RECORDS_THRESHOLD = 10;
@Autowired
private RunSPARQLQueryService runSPQRLQuery;
@Autowired
private ParseRDFJSON parseRDFJSON;
@Autowired
private ResourceManager resourceManager;
@Autowired
private BulkUpload bulkUpload;
private RecordParserHelper recordParserHelper;
private String graphDBServerUrl;
private String graphDBBaseURI;
@ -43,7 +64,7 @@ public class GraphDBClient {
private String writerPwd;
private String repository;
protected GraphDBClient(final RecordParserHelper recordParserHelper,
protected void setup(final RecordParserHelper recordParserHelper,
final String graphDBServerUrl, final String graphDBBaseURI, final String writerUser, final String writerPwd, final String repository) {
this.recordParserHelper = recordParserHelper;
this.graphDBServerUrl = graphDBServerUrl;
@ -318,4 +339,31 @@ public class GraphDBClient {
throw new AriadnePlusPublisherException(e);
}
}
public RunSPARQLQueryService getRunSPQRLQuery() {
return runSPQRLQuery;
}
public void setRunSPQRLQuery(RunSPARQLQueryService runSPQRLQuery) {
this.runSPQRLQuery = runSPQRLQuery;
}
public String indexOnES(String datasource, String collectionId) throws AriadnePlusPublisherException {
try {
runSPQRLQuery.setupConnection( getWriterUser(), getWriterPwd(), this.graphDBServerUrl, getRepository());
runSPQRLQuery.setParser(parseRDFJSON);
runSPQRLQuery.setResourceManager(resourceManager);
runSPQRLQuery.setBulkUpload(bulkUpload);
String recordId = "<https://ariadne-infrastructure.eu/aocat/Resource/02E4F4B5-24B7-3AD7-B460-CFA8B1F0BD1F>";
List<String> recordIds = Arrays.asList(recordId);
// List<String> recordIds = runSPQRLQuery.selectRecordIds(datasource, collectionId);
final ClassPathResource queryTemplateResource = new ClassPathResource("eu/dnetlib/ariadneplus/sparql/read_record_data_template.sparql");
String queryTemplate = IOUtils.toString(queryTemplateResource.getInputStream(), StandardCharsets.UTF_8.name());
runSPQRLQuery.executeMultipleQueryGraph(queryTemplate, recordIds, datasource, collectionId);
}catch(Throwable e){
log.error(e);
throw new AriadnePlusPublisherException(e);
}
return "ok";
}
}

View File

@ -31,9 +31,12 @@ public class GraphDBClientFactory {
@Autowired
private RecordParserHelper recordParserHelper;
@Autowired
private GraphDBClient graphDBClient;
public GraphDBClient getGraphDBClient() {
log.debug("Creating GraphDBClient for "+graphDBServerUrl);
return new GraphDBClient(recordParserHelper, graphDBServerUrl, graphDBBaseURI, writerUser, writerPwd, repository);
graphDBClient.setup(recordParserHelper, graphDBServerUrl, graphDBBaseURI, writerUser, writerPwd, repository);
return graphDBClient;
}
public RecordParserHelper getRecordParserHelper() {

View File

@ -69,4 +69,8 @@ public class AriadnePlusPublisherController {
return getAriadnePlusPublisherHelper().feedFromURL(dataUrl, context, getTarget(DEFAULT_TARGET_ENDPOINT));
}
@RequestMapping(value = "/indexOnES", method = RequestMethod.POST)
public String indexOnES(@RequestParam final String datasource, @RequestParam final String collectionId) throws AriadnePlusPublisherException {
return getAriadnePlusPublisherHelper().indexOnES(datasource, collectionId, getTarget(DEFAULT_TARGET_ENDPOINT));
}
}

View File

@ -88,6 +88,17 @@ public class AriadnePlusPublisherHelper {
return res;
}
public String indexOnES(final String datasource, final String collectionId, final AriadnePlusTargets target) throws AriadnePlusPublisherException {
String res;
switch(target){
case GRAPHDB:
res = indexOnES(datasource, collectionId);
break;
default: throw new AriadnePlusPublisherException("Target "+target+" not supported yet");
}
return res;
}
private void publishGraphDB(final String record) throws AriadnePlusPublisherException {
log.debug("Publishing on graphdb");
GraphDBClient graphDBClient = this.graphdbClientFactory.getGraphDBClient();
@ -124,4 +135,10 @@ public class AriadnePlusPublisherHelper {
GraphDBClient graphDBClient = this.graphdbClientFactory.getGraphDBClient();
return graphDBClient.feedFromURL(dataUrl, context);
}
private String indexOnES(final String datasource, final String collectionId) throws AriadnePlusPublisherException {
log.info("indexOnES "+datasource + " " + collectionId);
GraphDBClient graphDBClient = this.graphdbClientFactory.getGraphDBClient();
return graphDBClient.indexOnES(datasource, collectionId);
}
}

View File

@ -14,6 +14,8 @@ import eu.dnetlib.ariadneplus.reader.json.ParseRDFJSON;
import eu.dnetlib.ariadneplus.reader.utils.ClassSpec;
import eu.dnetlib.ariadneplus.reader.utils.Mappings;
import eu.dnetlib.ariadneplus.reader.utils.PropertiesMap;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
@ -26,6 +28,8 @@ import net.minidev.json.JSONObject;
@Service
public class ResourceManager {
private static final Log log = LogFactory.getLog(ResourceManager.class);
@Value("${type.path:undefined}")
private String type_path;
@Value("${general.classpath:undefined}")
@ -53,6 +57,8 @@ public class ResourceManager {
public void init(){
Type listType = new TypeToken<ArrayList<String>>(){}.getType();
not_parsable = new Gson().fromJson(exclude_predicates, listType);
propertiesMap = new PropertiesMap();
propertiesMap.fill(spec);
}
@ -87,11 +93,8 @@ public class ResourceManager {
}
}
if (entry == null) {
System.out.println("entry NULL " );
return null;
return null;
}
System.out.println("class_name: " + class_name);
Class<?> c = Class.forName(general_classpath + class_name);
Object class_instance = c.newInstance();
ClassSpec class_spec = propertiesMap.get(class_name);
@ -143,6 +146,35 @@ public class ResourceManager {
return class_instance;
}
public String getType_path() {
return type_path;
}
public void setType_path(String type_path) {
this.type_path = type_path;
}
public String getGeneral_classpath() {
return general_classpath;
}
public void setGeneral_classpath(String general_classpath) {
this.general_classpath = general_classpath;
}
public String getExclude_predicates() {
return exclude_predicates;
}
public void setExclude_predicates(String exclude_predicates) {
this.exclude_predicates = exclude_predicates;
}
public String getSpec() {
return spec;
}
public void setSpec(String spec) {
this.spec = spec;
}
}

View File

@ -1,10 +1,10 @@
package eu.dnetlib.ariadneplus.reader;
import eu.dnetlib.ariadneplus.elasticsearch.BulkUpload;
import eu.dnetlib.ariadneplus.reader.ResourceManager;
import eu.dnetlib.ariadneplus.reader.json.ParseRDFJSON;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.eclipse.rdf4j.model.Model;
import org.eclipse.rdf4j.model.Statement;
import org.eclipse.rdf4j.query.*;
import org.eclipse.rdf4j.repository.Repository;
import org.eclipse.rdf4j.repository.RepositoryConnection;
@ -12,23 +12,17 @@ import org.eclipse.rdf4j.repository.manager.RemoteRepositoryManager;
import org.eclipse.rdf4j.rio.RDFFormat;
import org.eclipse.rdf4j.rio.RDFWriter;
import org.eclipse.rdf4j.rio.Rio;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
import java.io.StringWriter;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Collections;
import java.util.List;
@Service
public class RunSPARQLQueryService {
@Value("${sparql.query:undefined}")
private String query;
@Value("${repository.url:undefined}")
private String repository_url;
private static final Log log = LogFactory.getLog(RunSPARQLQueryService.class);
private RepositoryConnection connection;
private RemoteRepositoryManager manager;
@ -76,27 +70,26 @@ public class RunSPARQLQueryService {
}
private String executeQueryGraph(String selectQueryTemplate, String recordId){
query = selectQueryTemplate.replaceAll("%record", recordId);
System.out.println("execute query "+query);
openConnection();
log.debug("Retrieving "+recordId+" ...");
String query = selectQueryTemplate.replaceAll("%record", recordId);
openConnection();
StringWriter recordWriter = null;
Model resultsModel = null;
String jsonRecord = null;
try {
System.out.println("Start connection Time: "+Calendar.getInstance().getTime().toString());
log.debug("Started at: "+Calendar.getInstance().getTime().toString());
GraphQuery graphQuery = connection.prepareGraphQuery(QueryLanguage.SPARQL, query);
GraphQueryResult graphQueryResult = graphQuery.evaluate();
resultsModel = QueryResults.asModel(graphQueryResult);
graphQueryResult.close();
System.out.println("End connection Time: "+Calendar.getInstance().getTime().toString());
System.out.println("count statements: " + resultsModel.size());
log.debug("Finished at: "+Calendar.getInstance().getTime().toString());
log.debug("Statements retrieved: " + resultsModel.size());
if (resultsModel.size()==0) {
return "noresult";
}
recordWriter = new StringWriter();
RDFWriter rdfRecordWriter = Rio.createWriter(RDFFormat.RDFJSON, recordWriter);
Rio.write(resultsModel, rdfRecordWriter);
System.out.println("RDF > json record: "+recordWriter.toString());
parser.parse(recordWriter.toString());
resourceManager.manage(parser);
bulkUpload.index(resourceManager);
@ -111,38 +104,26 @@ public class RunSPARQLQueryService {
return jsonRecord;
}
private void dumpModel(Model model) {
System.out.print(" [ dump model ] ");
for (Statement stmt: model) {
System.out.println(stmt.toString());
}
}
public ParseRDFJSON getParser() {
return parser;
}
public void setParser(ParseRDFJSON parser) {
this.parser = parser;
}
public ResourceManager getResourceManager() {
return resourceManager;
}
public void setResourceManager(ResourceManager resourceManager) {
this.resourceManager = resourceManager;
}
public BulkUpload getBulkUpload() {
return bulkUpload;
}
public void setBulkUpload(BulkUpload bulkUpload) {
this.bulkUpload = bulkUpload;
}
@ -179,33 +160,31 @@ public class RunSPARQLQueryService {
RunSPARQLQueryService.graphDBRepository = graphDBRepository;
}
public List<String> selectRecordIds(){
query = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
public List<String> selectRecordIds(String datasource, String collectionId){
log.debug("Retrieving recordIds from GraphDB ...");
String queryTemplate = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
"select * \n" +
"from <https://ariadne-infrastructure.eu/api_________::ariadne_plus::ads::271>\n" +
"from <https://ariadne-infrastructure.eu/api_________::ariadne_plus::%datasource::%collectionId>\n" +
"where { \n" +
"\t?recordId rdf:type <https://www.ariadne-infrastructure.eu/resource/ao/cat/1.1/AO_Individual_Data_Resource> .\n" +
"} \n";
if (query.equals("undefined"))
return null;
// query = query.replaceAll("%s", "<https://ariadne-infrastructure.eu/aocat/Resource/02E4F4B5-24B7-3AD7-B460-CFA8B1F0BD1F>");
String query = queryTemplate.replaceAll("%datasource", datasource).replaceAll("%collectionId", collectionId);
openConnection();
String jsonRecord = null;
List<String> recordIds = new ArrayList<>();
try {
System.out.println("Start connection Time: "+Calendar.getInstance().getTime().toString());
log.debug("Started at: "+Calendar.getInstance().getTime().toString());
TupleQuery selectQuery = connection.prepareTupleQuery(QueryLanguage.SPARQL, query);
TupleQueryResult selectQueryResult = selectQuery.evaluate();
int counter = 0;
while (selectQueryResult.hasNext()) {
BindingSet recordSet = selectQueryResult.next();
org.eclipse.rdf4j.model.Value recordIdValue = recordSet.getValue("recordId");
System.out.println(recordIdValue.stringValue());
recordIds.add(recordIdValue.stringValue());
counter++;
}
System.out.println("Total records: "+counter);
System.out.println("End connection Time: "+Calendar.getInstance().getTime().toString());
log.debug("Total recordIds retrieved: "+counter);
log.debug("Finished at: "+Calendar.getInstance().getTime().toString());
} catch(Exception e){
e.printStackTrace();
} finally{

View File

@ -20,6 +20,7 @@ public class ParseRDFJSON {
static JSONObject map ;
@Value("${catalog.entry.path}")
private String catalogEntryJsonPath;
private String json;

View File

@ -13,8 +13,8 @@ import java.util.Map;
@Component
public class PropertiesMap {
// @Value("${class.map.specifications:undefined}")
// private String spec;
@Value("${class.map.specifications:undefined}")
private String spec;
private static Map<String, ClassSpec> map;
@ -22,10 +22,16 @@ public class PropertiesMap {
return map.get(key);
}
@PostConstruct
public void fill(String spec){
Type mapType = new TypeToken<HashMap<String,ClassSpec>>(){}.getType();
map = new Gson().fromJson(spec,mapType);
}
@PostConstruct
public void fill(){
Type mapType = new TypeToken<HashMap<String,ClassSpec>>(){}.getType();
map = new Gson().fromJson(spec,mapType);
}
}

View File

@ -3,15 +3,14 @@
server.servlet.context-path=/ariadneplus-graphdb
server.port=8281
graphdb.serverUrl=http://localhost:7200/
graphdb.serverUrl=http://graphdb-test.ariadne.d4science.org:7200
graphdb.writer.user=writer
graphdb.writer.pwd=Writer01
graphdb.writer.pwd=********
graphdb.repository=ariadneplus-ts01
graphdb.sparqlUrl = http://localhost:7200/sparql
graphdb.baseURI=https://ariadne-infrastructure.eu/
repository.url=http://graphdb-test.ariadne.d4science.org:7200
elasticsearch.hostname=elastic-test.ariadne.d4science.org
elasticsearch.indexname=catalog_test
catalog.entry.path=$[*][?(@['https://www.ariadne-infrastructure.eu/property/resourceType'][0]['value']=='provided record')]
general.classpath=eu.dnetlib.ariadneplus.elasticsearch.model.

View File

@ -26,7 +26,7 @@ public class GraphDbReaderAndESIndexTest {
private RunSPARQLQueryService runSPQRLQuery;
@Test
// @Ignore
@Ignore
public void readAndIndexTest() throws Exception {
final ClassPathResource resource = new ClassPathResource("application.properties");
Properties appProps = new Properties();