Removed CKAN utils library

This commit is contained in:
Luca Frosini 2019-09-18 16:03:40 +02:00
parent 3c95a26ad2
commit 58e035998f
4 changed files with 40 additions and 37 deletions

View File

@ -103,9 +103,9 @@
</dependency>
<dependency>
<groupId>org.gcube.data-catalogue</groupId>
<artifactId>ckan-util-library</artifactId>
<version>[2.0.0-SNAPSHOT,3.0.0-SNAPSHOT)</version>
<groupId>org.gcube.data-publishing</groupId>
<artifactId>gcat-client</artifactId>
<version>[1.0.0-SNAPSHOT, 2.0.0-SNAPSHOT)</version>
<scope>compile</scope>
</dependency>

View File

@ -4,6 +4,7 @@ import java.text.ParseException;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Properties;
@ -11,18 +12,17 @@ import java.util.Set;
import java.util.SortedSet;
import java.util.TreeSet;
import org.apache.commons.lang.Validate;
import org.gcube.common.authorization.client.exceptions.ObjectNotFound;
import org.gcube.datacatalogue.ckanutillibrary.server.DataCatalogueFactory;
import org.gcube.datacatalogue.ckanutillibrary.server.DataCatalogueImpl;
import org.gcube.dataharvest.AccountingDataHarvesterPlugin;
import org.gcube.dataharvest.datamodel.HarvestedDataKey;
import org.gcube.dataharvest.harvester.BasicHarvester;
import org.gcube.dataharvest.utils.Utils;
import org.gcube.gcat.client.Group;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import eu.trentorise.opendata.jackan.model.CkanGroup;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
/**
* The Class SoBigDataHarvester.
@ -47,9 +47,6 @@ public abstract class SoBigDataHarvester extends BasicHarvester {
//Added by Francesco
protected HashMap<String,String> mapWsFolderNameToVRE;
//Added by Francesco
private DataCatalogueFactory catalogueFactory;
protected SortedSet<String> contexts;
/**
@ -64,8 +61,6 @@ public abstract class SoBigDataHarvester extends BasicHarvester {
public SoBigDataHarvester(Date start, Date end, SortedSet<String> contexts) throws Exception {
super(start, end);
this.catalogueFactory = DataCatalogueFactory.getFactory();
String currentContext = Utils.getCurrentContext();
// Truncating the context to the last / (the last / is retained for filtering issues)
@ -100,10 +95,8 @@ public abstract class SoBigDataHarvester extends BasicHarvester {
logger.info("Built from properties the mapping 'SystemType' to 'DB entry' {}", mapSystemTypeToDBEntry);
String currentContext = Utils.getCurrentContext();
//GET CATALOGUE'S GROUPS
List<String> groups = loadGroupsFromCKAN(currentContext);
List<String> groups = listGroup();
//NORMALIZING THE GROUP NAME TO MATCH WITH VRE NAME
Map<String,String> mapNormalizedGroups = normalizeGroups(groups);
logger.debug("Map of Normalized Groups is {} ", mapNormalizedGroups);
@ -153,28 +146,18 @@ public abstract class SoBigDataHarvester extends BasicHarvester {
return listNGroups;
}
/**
* Load groups from ckan.
*
* @param scope the scope
* @return the list
*/
private List<String> loadGroupsFromCKAN(String scope) {
List<String> groups = new ArrayList<String>();
String ckanURL = "";
try {
DataCatalogueImpl utils = catalogueFactory.getUtilsPerScope(scope);
ckanURL = utils.getCatalogueUrl();
List<CkanGroup> theGroups = utils.getGroups();
Validate.notNull(theGroups, "The list of Groups is null");
for(CkanGroup ckanGroup : theGroups) {
groups.add(ckanGroup.getName());
}
} catch(Exception e) {
logger.error("Error occurred on getting CKAN groups for scope {} and CKAN URL {}", scope, ckanURL, e);
public static List<String> listGroup() throws Exception {
List<String> groupList = new ArrayList<>();
Group group = new Group();
String groups = group.list(1000, 0);
ObjectMapper objectMapper = new ObjectMapper();
JsonNode jsonNodeGroups = objectMapper.readTree(groups);
Iterator<JsonNode> iterator = jsonNodeGroups.elements();
while(iterator.hasNext()){
JsonNode jsonNode = iterator.next();
groupList.add(jsonNode.asText());
}
return groups;
return groupList;
}
/**

View File

@ -0,0 +1,21 @@
package org.gcube.dataharvest.harvester.sobigdata;
import java.util.List;
import org.gcube.dataharvest.utils.ContextTest;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class SoBigDataHarvesterTest extends ContextTest {
private static Logger logger = LoggerFactory.getLogger(SoBigDataHarvesterTest.class);
@Test
public void testGroupList() throws Exception {
ContextTest.setContextByName("/d4science.research-infrastructures.eu/D4Research/AGINFRAplusDev");
List<String> groups = SoBigDataHarvester.listGroup();
logger.debug("{}", groups);
}
}

View File

@ -5,7 +5,6 @@ import java.io.InputStream;
import java.util.Properties;
import org.gcube.dataharvest.AccountingDataHarvesterPlugin;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;