record to publish now are sent to spring module inside body request, because of htmlsimple harvesting can produce large record (>10MB)

This commit is contained in:
Enrico Ottonello 2021-12-17 11:39:37 +01:00
parent 0ab59c10f4
commit a90f02487a
2 changed files with 10 additions and 6 deletions

View File

@ -1,5 +1,6 @@
package eu.dnetlib.ariadneplus.publisher;
import eu.dnetlib.ariadneplus.reader.utils.GZIPCompression;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.beans.factory.annotation.Autowired;
@ -7,6 +8,7 @@ import org.springframework.web.bind.annotation.*;
import eu.dnetlib.ariadneplus.publisher.AriadnePlusPublisherHelper.AriadnePlusTargets;
import java.io.IOException;
import java.util.List;
/**
@ -26,14 +28,14 @@ public class AriadnePlusPublisherController {
@RequestMapping(value = "/version", method = RequestMethod.GET)
public String version() throws AriadnePlusPublisherException {
return "3.1.0-SNAPSHOT";
return "4.0.0-SNAPSHOT";
}
@RequestMapping(value = "/publish", method = RequestMethod.POST)
public void publish(@RequestParam final String record) throws AriadnePlusPublisherException {
public void publish(@RequestBody final String record) throws AriadnePlusPublisherException, IOException {
getAriadnePlusPublisherHelper().publish(record, getTarget(DEFAULT_TARGET_ENDPOINT));
}
@RequestMapping(value = "/feedProvenance", method = RequestMethod.POST)
public void feedProvenance(@RequestParam final String datasource, @RequestParam final String datasourceApi) throws AriadnePlusPublisherException {
getAriadnePlusPublisherHelper().feedProvenance(datasource, datasourceApi, getTarget(DEFAULT_TARGET_ENDPOINT));

View File

@ -2,18 +2,22 @@ package eu.dnetlib.ariadneplus.workflows.nodes;
import java.io.IOException;
import java.net.ConnectException;
import java.nio.charset.StandardCharsets;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import eu.dnetlib.ariadneplus.utils.GZIPCompression;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.http.NameValuePair;
import org.apache.http.client.entity.UrlEncodedFormEntity;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.entity.ByteArrayEntity;
import org.apache.http.entity.StringEntity;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClients;
import org.apache.http.impl.conn.PoolingHttpClientConnectionManager;
@ -126,9 +130,7 @@ public class PublishGraphDBJobNode extends AsyncJobNode {
CloseableHttpResponse responsePPOST = null;
try {
HttpPost post = new HttpPost(getPublishEndpoint());
List<NameValuePair> params = Lists.newArrayList();
params.add(new BasicNameValuePair("record", record));
UrlEncodedFormEntity ent = new UrlEncodedFormEntity(params, "UTF-8");
StringEntity ent = new StringEntity(record);
post.setEntity(ent);
responsePPOST = client.execute(post);
int statusCode = responsePPOST.getStatusLine().getStatusCode();