handle the error upon commit

This commit is contained in:
Claudio Atzori 2024-11-06 09:49:20 +01:00
parent e4a2c559ee
commit 30c27cea26
3 changed files with 71 additions and 6 deletions

View File

@ -0,0 +1,63 @@
package eu.dnetlib.dhp.solr;
import org.apache.solr.client.solrj.SolrClient;
import org.apache.solr.client.solrj.SolrQuery;
import org.apache.solr.client.solrj.SolrServerException;
import org.apache.solr.client.solrj.impl.Http2SolrClient;
import org.apache.solr.client.solrj.impl.LBHttp2SolrClient;
import org.apache.solr.client.solrj.impl.LBSolrClient;
import org.apache.solr.client.solrj.response.QueryResponse;
import org.apache.solr.client.solrj.response.SolrPingResponse;
import org.apache.solr.common.params.CommonParams;
import org.apache.solr.common.params.ModifiableSolrParams;
import org.junit.jupiter.api.*;
import java.io.IOException;
import static org.junit.jupiter.api.Assertions.*;
@TestMethodOrder(MethodOrderer.OrderAnnotation.class)
class SolrClientTest {
private static final String DEFAULT_COLLECTION = "DMF-index-openaire";
public static final String SOLR_HTTP_URL_LB1 = String.format("http://localhost:8981/solr/%s", DEFAULT_COLLECTION);
public static final String SOLR_HTTP_URL_LB2 = String.format("http://localhost:8982/solr/%s", DEFAULT_COLLECTION);
public static final String SOLR_HTTP_URL_LB3 = String.format("http://localhost:8983/solr/%s", DEFAULT_COLLECTION);
private static SolrClient client;
@BeforeAll
static void setUp() {
final Http2SolrClient http2SolrClient = new Http2SolrClient.Builder().build();
client = new LBHttp2SolrClient.Builder(
http2SolrClient,
new LBSolrClient.Endpoint(SOLR_HTTP_URL_LB1),
new LBSolrClient.Endpoint(SOLR_HTTP_URL_LB2),
new LBSolrClient.Endpoint(SOLR_HTTP_URL_LB3))
.build();
}
@AfterAll
static void tearDown() throws IOException {
client.close();
}
@Test
@Order(1)
void testPing() throws SolrServerException, IOException {
SolrPingResponse rsp = client.ping();
assertEquals(0, rsp.getStatus());
}
@Test
@Order(2)
void testQuery() throws SolrServerException, IOException {
final ModifiableSolrParams query = new SolrQuery().add(CommonParams.Q, "*:*");
QueryResponse rsp = client.query(query);
assertEquals(52247, rsp.getResults().getNumFound());
}
}

View File

@ -35,8 +35,6 @@ public class RecordImporterApplication {
} }
} }
System.out.println();
log.info("**** EXECUTING - {} ***", APPLICATION_TITLE); log.info("**** EXECUTING - {} ***", APPLICATION_TITLE);
Boolean isSparkSessionManaged = Optional Boolean isSparkSessionManaged = Optional
@ -71,11 +69,8 @@ public class RecordImporterApplication {
RecordImporter.importRecords(conf, zkHost, collection, path, RecordImporter.BATCH_SIZE); RecordImporter.importRecords(conf, zkHost, collection, path, RecordImporter.BATCH_SIZE);
log.info("**** DONE ***"); log.info("**** DONE ***");
System.out.println();
} }
private static ArgumentApplicationParser parseArguments(String[] args) throws IOException, ParseException { private static ArgumentApplicationParser parseArguments(String[] args) throws IOException, ParseException {
return ArgumentApplicationParser.parse( return ArgumentApplicationParser.parse(
IOUtils.toString( IOUtils.toString(

View File

@ -8,6 +8,7 @@ import org.apache.solr.client.solrj.SolrClient;
import org.apache.solr.client.solrj.SolrServerException; import org.apache.solr.client.solrj.SolrServerException;
import org.apache.solr.client.solrj.impl.CloudSolrClient; import org.apache.solr.client.solrj.impl.CloudSolrClient;
import org.apache.solr.client.solrj.request.UpdateRequest; import org.apache.solr.client.solrj.request.UpdateRequest;
import org.apache.solr.client.solrj.response.UpdateResponse;
import org.apache.solr.common.SolrException; import org.apache.solr.common.SolrException;
import org.apache.solr.common.SolrInputDocument; import org.apache.solr.common.SolrInputDocument;
import org.apache.spark.SparkConf; import org.apache.spark.SparkConf;
@ -36,7 +37,13 @@ public class RecordImporter implements Serializable {
CloudClientParams params = new CloudClientParams(zkHost, collection); CloudClientParams params = new CloudClientParams(zkHost, collection);
indexDocs(params, batchSize, spark.read().json(path)); indexDocs(params, batchSize, spark.read().json(path));
CloudSolrClient client = CacheCloudSolrClient.getCachedCloudClient(params); CloudSolrClient client = CacheCloudSolrClient.getCachedCloudClient(params);
client.commit(collection); UpdateResponse commitRsp = client.commit(collection);
if (commitRsp.getStatus() != 0) {
log.error("got exception during commit operation", commitRsp.getException());
throw commitRsp.getException();
} else {
log.info("Done");
}
}); });
} }