diff --git a/pom.xml b/pom.xml index 3369141..a4bbafa 100644 --- a/pom.xml +++ b/pom.xml @@ -103,9 +103,9 @@ - org.gcube.data-catalogue - ckan-util-library - [2.0.0-SNAPSHOT,3.0.0-SNAPSHOT) + org.gcube.data-publishing + gcat-client + [1.0.0-SNAPSHOT, 2.0.0-SNAPSHOT) compile diff --git a/src/main/java/org/gcube/dataharvest/harvester/sobigdata/SoBigDataHarvester.java b/src/main/java/org/gcube/dataharvest/harvester/sobigdata/SoBigDataHarvester.java index e20b704..d1a6dd5 100644 --- a/src/main/java/org/gcube/dataharvest/harvester/sobigdata/SoBigDataHarvester.java +++ b/src/main/java/org/gcube/dataharvest/harvester/sobigdata/SoBigDataHarvester.java @@ -4,6 +4,7 @@ import java.text.ParseException; import java.util.ArrayList; import java.util.Date; import java.util.HashMap; +import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Properties; @@ -11,18 +12,17 @@ import java.util.Set; import java.util.SortedSet; import java.util.TreeSet; -import org.apache.commons.lang.Validate; import org.gcube.common.authorization.client.exceptions.ObjectNotFound; -import org.gcube.datacatalogue.ckanutillibrary.server.DataCatalogueFactory; -import org.gcube.datacatalogue.ckanutillibrary.server.DataCatalogueImpl; import org.gcube.dataharvest.AccountingDataHarvesterPlugin; import org.gcube.dataharvest.datamodel.HarvestedDataKey; import org.gcube.dataharvest.harvester.BasicHarvester; import org.gcube.dataharvest.utils.Utils; +import org.gcube.gcat.client.Group; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import eu.trentorise.opendata.jackan.model.CkanGroup; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; /** * The Class SoBigDataHarvester. @@ -47,9 +47,6 @@ public abstract class SoBigDataHarvester extends BasicHarvester { //Added by Francesco protected HashMap mapWsFolderNameToVRE; - //Added by Francesco - private DataCatalogueFactory catalogueFactory; - protected SortedSet contexts; /** @@ -64,8 +61,6 @@ public abstract class SoBigDataHarvester extends BasicHarvester { public SoBigDataHarvester(Date start, Date end, SortedSet contexts) throws Exception { super(start, end); - this.catalogueFactory = DataCatalogueFactory.getFactory(); - String currentContext = Utils.getCurrentContext(); // Truncating the context to the last / (the last / is retained for filtering issues) @@ -100,10 +95,8 @@ public abstract class SoBigDataHarvester extends BasicHarvester { logger.info("Built from properties the mapping 'SystemType' to 'DB entry' {}", mapSystemTypeToDBEntry); - String currentContext = Utils.getCurrentContext(); - //GET CATALOGUE'S GROUPS - List groups = loadGroupsFromCKAN(currentContext); + List groups = listGroup(); //NORMALIZING THE GROUP NAME TO MATCH WITH VRE NAME Map mapNormalizedGroups = normalizeGroups(groups); logger.debug("Map of Normalized Groups is {} ", mapNormalizedGroups); @@ -153,28 +146,18 @@ public abstract class SoBigDataHarvester extends BasicHarvester { return listNGroups; } - /** - * Load groups from ckan. - * - * @param scope the scope - * @return the list - */ - private List loadGroupsFromCKAN(String scope) { - List groups = new ArrayList(); - String ckanURL = ""; - try { - DataCatalogueImpl utils = catalogueFactory.getUtilsPerScope(scope); - ckanURL = utils.getCatalogueUrl(); - List theGroups = utils.getGroups(); - Validate.notNull(theGroups, "The list of Groups is null"); - for(CkanGroup ckanGroup : theGroups) { - groups.add(ckanGroup.getName()); - } - } catch(Exception e) { - logger.error("Error occurred on getting CKAN groups for scope {} and CKAN URL {}", scope, ckanURL, e); + public static List listGroup() throws Exception { + List groupList = new ArrayList<>(); + Group group = new Group(); + String groups = group.list(1000, 0); + ObjectMapper objectMapper = new ObjectMapper(); + JsonNode jsonNodeGroups = objectMapper.readTree(groups); + Iterator iterator = jsonNodeGroups.elements(); + while(iterator.hasNext()){ + JsonNode jsonNode = iterator.next(); + groupList.add(jsonNode.asText()); } - - return groups; + return groupList; } /** diff --git a/src/test/java/org/gcube/dataharvest/harvester/sobigdata/SoBigDataHarvesterTest.java b/src/test/java/org/gcube/dataharvest/harvester/sobigdata/SoBigDataHarvesterTest.java new file mode 100644 index 0000000..943b4d4 --- /dev/null +++ b/src/test/java/org/gcube/dataharvest/harvester/sobigdata/SoBigDataHarvesterTest.java @@ -0,0 +1,21 @@ +package org.gcube.dataharvest.harvester.sobigdata; + +import java.util.List; + +import org.gcube.dataharvest.utils.ContextTest; +import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class SoBigDataHarvesterTest extends ContextTest { + + private static Logger logger = LoggerFactory.getLogger(SoBigDataHarvesterTest.class); + + @Test + public void testGroupList() throws Exception { + ContextTest.setContextByName("/d4science.research-infrastructures.eu/D4Research/AGINFRAplusDev"); + List groups = SoBigDataHarvester.listGroup(); + logger.debug("{}", groups); + } + +} diff --git a/src/test/java/org/gcube/dataharvest/utils/ContextAuthorizationTest.java b/src/test/java/org/gcube/dataharvest/utils/ContextAuthorizationTest.java index b716a1f..5126fc2 100644 --- a/src/test/java/org/gcube/dataharvest/utils/ContextAuthorizationTest.java +++ b/src/test/java/org/gcube/dataharvest/utils/ContextAuthorizationTest.java @@ -5,7 +5,6 @@ import java.io.InputStream; import java.util.Properties; import org.gcube.dataharvest.AccountingDataHarvesterPlugin; -import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory;