Refs #13079: Add the create/update/delete operation in Profile Collection in gCat

Task-Url: https://support.d4science.org/issues/13079

git-svn-id: https://svn.d4science.research-infrastructures.eu/gcube/trunk/data-catalogue/gcubedatacatalogue-metadata-discovery@176268 82a268e6-3cf1-43bd-a215-b396298e98cf
This commit is contained in:
Luca Frosini 2018-12-28 15:21:26 +00:00
parent 7aea3e9910
commit c90ab867ff
9 changed files with 237 additions and 55 deletions

View File

@ -1,11 +1,12 @@
<?xml version="1.0" encoding="UTF-8"?> <?xml version="1.0" encoding="UTF-8"?>
<classpath> <classpath>
<classpathentry including="**/*.java" kind="src" output="target/gcubedatacatalogue-metadata-discovery-3.1.0-SNAPSHOT/WEB-INF/classes" path="src/main/java"> <classpathentry including="**/*.java" kind="src" output="target/gcubedatacatalogue-metadata-discovery-3.4.0-SNAPSHOT/WEB-INF/classes" path="src/main/java">
<attributes> <attributes>
<attribute name="optional" value="true"/> <attribute name="optional" value="true"/>
<attribute name="maven.pomderived" value="true"/> <attribute name="maven.pomderived" value="true"/>
</attributes> </attributes>
</classpathentry> </classpathentry>
<classpathentry kind="src" path="src/main/resources"/>
<classpathentry kind="src" output="target/test-classes" path="src/test/java"> <classpathentry kind="src" output="target/test-classes" path="src/test/java">
<attributes> <attributes>
<attribute name="optional" value="true"/> <attribute name="optional" value="true"/>
@ -27,5 +28,5 @@
<attribute name="maven.pomderived" value="true"/> <attribute name="maven.pomderived" value="true"/>
</attributes> </attributes>
</classpathentry> </classpathentry>
<classpathentry kind="output" path="target/gcubedatacatalogue-metadata-discovery-3.1.0-SNAPSHOT/WEB-INF/classes"/> <classpathentry kind="output" path="target/gcubedatacatalogue-metadata-discovery-3.4.0-SNAPSHOT/WEB-INF/classes"/>
</classpath> </classpath>

10
pom.xml
View File

@ -13,7 +13,7 @@
<groupId>org.gcube.data-catalogue</groupId> <groupId>org.gcube.data-catalogue</groupId>
<artifactId>gcubedatacatalogue-metadata-discovery</artifactId> <artifactId>gcubedatacatalogue-metadata-discovery</artifactId>
<packaging>jar</packaging> <packaging>jar</packaging>
<version>3.3.0-SNAPSHOT</version> <version>3.4.0-SNAPSHOT</version>
<name>The gCube data catalogue metadata discovery library</name> <name>The gCube data catalogue metadata discovery library</name>
<description>The gCube data catalogue metadata discovery library</description> <description>The gCube data catalogue metadata discovery library</description>
<scm> <scm>
@ -112,6 +112,14 @@
<version>4.8.1</version> <version>4.8.1</version>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<!-- Required to log on tests -->
<dependency>
<groupId>ch.qos.logback</groupId>
<artifactId>logback-classic</artifactId>
<version>1.0.13</version>
<scope>test</scope>
</dependency>
</dependencies> </dependencies>

View File

@ -1,9 +1,21 @@
package org.gcube.datacatalogue.metadatadiscovery; package org.gcube.datacatalogue.metadatadiscovery;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.StringReader;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.HashMap; import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.stream.Collectors;
import javax.xml.XMLConstants;
import javax.xml.transform.stream.StreamSource;
import javax.xml.validation.Schema;
import javax.xml.validation.SchemaFactory;
import javax.xml.validation.Validator;
import org.gcube.common.scope.api.ScopeProvider; import org.gcube.common.scope.api.ScopeProvider;
import org.gcube.common.scope.impl.ScopeBean; import org.gcube.common.scope.impl.ScopeBean;
@ -15,8 +27,7 @@ import org.gcube.datacatalogue.metadatadiscovery.reader.MetadataFormatReader;
import org.gcube.datacatalogue.metadatadiscovery.reader.NamespaceCategoryReader; import org.gcube.datacatalogue.metadatadiscovery.reader.NamespaceCategoryReader;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import org.xml.sax.SAXException;
/** /**
* The Class DataCalogueMetadataReader. * The Class DataCalogueMetadataReader.
@ -24,135 +35,167 @@ import org.slf4j.LoggerFactory;
* @author Francesco Mangiacrapa francesco.mangiacrapa@isti.cnr.it * @author Francesco Mangiacrapa francesco.mangiacrapa@isti.cnr.it
* Jun 8, 2016 * Jun 8, 2016
*/ */
public class DataCalogueMetadataFormatReader implements DataCatalogueMetadataDiscovery{ public class DataCalogueMetadataFormatReader implements DataCatalogueMetadataDiscovery {
private static String SCHEMA_FILENAME = "Gdcmetadataprofilev3.xsd";
private MetadataFormatDiscovery medataFormatDiscovery; private MetadataFormatDiscovery medataFormatDiscovery;
private ScopeBean scope; private ScopeBean scope;
private Map<String, MetadataFormat> hashMetadataFormats = null; private Map<String,MetadataFormat> hashMetadataFormats = null;
private List<NamespaceCategory> namespaceCategories = null; private List<NamespaceCategory> namespaceCategories = null;
private String profileSchema = null;
private static Logger logger = LoggerFactory.getLogger(DataCalogueMetadataFormatReader.class); private static Logger logger = LoggerFactory.getLogger(DataCalogueMetadataFormatReader.class);
/** /**
* Instantiates a new data calogue metadata format reader. * Instantiates a new data calogue metadata format reader.
* *
* @throws Exception the exception * @throws Exception the exception
*/ */
public DataCalogueMetadataFormatReader() throws Exception { public DataCalogueMetadataFormatReader() throws Exception {
String scopeString = ScopeProvider.instance.get(); String scopeString = ScopeProvider.instance.get();
logger.debug("Read scope "+scopeString +" from ScopeProvider"); logger.debug("Read scope " + scopeString + " from ScopeProvider");
if(scopeString==null || scopeString.isEmpty()) if(scopeString == null || scopeString.isEmpty())
throw new Exception("Please set a valid scope into ScopeProvider"); throw new Exception("Please set a valid scope into ScopeProvider");
scope = new ScopeBean(scopeString); scope = new ScopeBean(scopeString);
readNamespaces(); readNamespaces();
readMetadaFormats(); readMetadaFormats();
} }
/** /**
* Read metada formats. * Read metada formats.
* *
* @throws Exception the exception * @throws Exception the exception
*/ */
private void readMetadaFormats() throws Exception{ private void readMetadaFormats() throws Exception {
medataFormatDiscovery = new MetadataFormatDiscovery(scope); medataFormatDiscovery = new MetadataFormatDiscovery(scope);
logger.info("MedataFormatDiscovery has retrieved: "+medataFormatDiscovery.getMetadataProfiles().size() +" metadata type/s"); logger.info("MedataFormatDiscovery has retrieved: " + medataFormatDiscovery.getMetadataProfiles().size()
+ " metadata type/s");
logger.debug("filling cache for MedataFormat"); logger.debug("filling cache for MedataFormat");
hashMetadataFormats = new HashMap<String, MetadataFormat>(medataFormatDiscovery.getMetadataProfiles().size()); hashMetadataFormats = new HashMap<String,MetadataFormat>(medataFormatDiscovery.getMetadataProfiles().size());
for (MetadataProfile mT : medataFormatDiscovery.getMetadataProfiles()) { for(MetadataProfile mT : medataFormatDiscovery.getMetadataProfiles()) {
if(mT==null) if(mT == null)
continue; continue;
MetadataFormatReader reader = new MetadataFormatReader(scope, mT.getId()); MetadataFormatReader reader = new MetadataFormatReader(scope, mT.getId());
hashMetadataFormats.put(mT.getId(), reader.getMetadataFormat()); hashMetadataFormats.put(mT.getId(), reader.getMetadataFormat());
logger.debug("MetadataType id: "+mT.getId() +" cached as: "+reader.getMetadataFormat()); logger.debug("MetadataType id: " + mT.getId() + " cached as: " + reader.getMetadataFormat());
} }
} }
/** /**
* Read namespaces. * Read namespaces.
*/ */
private void readNamespaces() { private void readNamespaces() {
try{ try {
if(namespaceCategories == null || namespaceCategories.isEmpty()){ if(namespaceCategories == null || namespaceCategories.isEmpty()) {
if(namespaceCategories == null) if(namespaceCategories == null)
namespaceCategories = new ArrayList<NamespaceCategory>(); namespaceCategories = new ArrayList<NamespaceCategory>();
NamespaceCategoryReader rd = new NamespaceCategoryReader(scope); NamespaceCategoryReader rd = new NamespaceCategoryReader(scope);
namespaceCategories.addAll(rd.getNamespaces().getNamespaceCategories()); namespaceCategories.addAll(rd.getNamespaces().getNamespaceCategories());
} }
}catch(Exception e){ } catch(Exception e) {
logger.debug("An error occurred during read namespaces for categories: ", e); logger.debug("An error occurred during read namespaces for categories: ", e);
} }
} }
/* (non-Javadoc) /* (non-Javadoc)
* @see org.gcube.datacatalogue.metadatadiscovery.DataCatalogueMetadataDiscovery#getMetadataFormatForMetadataType(org.gcube.datacatalogue.metadatadiscovery.bean.MetadataType) * @see org.gcube.datacatalogue.metadatadiscovery.DataCatalogueMetadataDiscovery#getMetadataFormatForMetadataType(org.gcube.datacatalogue.metadatadiscovery.bean.MetadataType)
*/ */
@Override @Override
public MetadataFormat getMetadataFormatForMetadataProfile(MetadataProfile profile) throws Exception { public MetadataFormat getMetadataFormatForMetadataProfile(MetadataProfile profile) throws Exception {
if(profile==null) if(profile == null)
throw new Exception("Input "+ MetadataProfile.class.getSimpleName() + " is null"); throw new Exception("Input " + MetadataProfile.class.getSimpleName() + " is null");
MetadataFormat format = hashMetadataFormats.get(profile.getId()); MetadataFormat format = hashMetadataFormats.get(profile.getId());
if(format!=null) if(format != null)
return format; return format;
MetadataFormatReader reader = new MetadataFormatReader(scope, profile.getId()); MetadataFormatReader reader = new MetadataFormatReader(scope, profile.getId());
return reader.getMetadataFormat(); return reader.getMetadataFormat();
} }
/* (non-Javadoc) /* (non-Javadoc)
* @see org.gcube.datacatalogue.metadatadiscovery.DataCatalogueMetadataDiscovery#getListOfMetadataTypes() * @see org.gcube.datacatalogue.metadatadiscovery.DataCatalogueMetadataDiscovery#getListOfMetadataTypes()
*/ */
@Override @Override
public List<MetadataProfile> getListOfMetadataProfiles() throws Exception { public List<MetadataProfile> getListOfMetadataProfiles() throws Exception {
if(medataFormatDiscovery==null) if(medataFormatDiscovery == null)
readMetadaFormats(); readMetadaFormats();
return medataFormatDiscovery.getMetadataProfiles(); return medataFormatDiscovery.getMetadataProfiles();
} }
/* (non-Javadoc) /* (non-Javadoc)
* @see org.gcube.datacatalogue.metadatadiscovery.DataCatalogueMetadataDiscovery#getListOfNamespaceCategories() * @see org.gcube.datacatalogue.metadatadiscovery.DataCatalogueMetadataDiscovery#getListOfNamespaceCategories()
*/ */
@Override @Override
public List<NamespaceCategory> getListOfNamespaceCategories() throws Exception { public List<NamespaceCategory> getListOfNamespaceCategories() throws Exception {
if(namespaceCategories == null) if(namespaceCategories == null)
readNamespaces(); readNamespaces();
return namespaceCategories; return namespaceCategories;
} }
/* (non-Javadoc) /* (non-Javadoc)
* @see org.gcube.datacatalogue.metadatadiscovery.DataCatalogueMetadataDiscovery#resetMetadataProfile() * @see org.gcube.datacatalogue.metadatadiscovery.DataCatalogueMetadataDiscovery#resetMetadataProfile()
*/ */
@Override @Override
public void resetMetadataProfile() { public void resetMetadataProfile() {
medataFormatDiscovery = null; medataFormatDiscovery = null;
hashMetadataFormats = null; hashMetadataFormats = null;
} }
/* (non-Javadoc) /* (non-Javadoc)
* @see org.gcube.datacatalogue.metadatadiscovery.DataCatalogueMetadataDiscovery#resetNamespaceCategories() * @see org.gcube.datacatalogue.metadatadiscovery.DataCatalogueMetadataDiscovery#resetNamespaceCategories()
*/ */
@Override @Override
public void resetNamespaceCategories() { public void resetNamespaceCategories() {
namespaceCategories = null; namespaceCategories = null;
} }
@Override
public String getProfileSchema() {
if(profileSchema == null) {
InputStream inputStream = getProfileSchemaInputStream();
profileSchema = new BufferedReader(new InputStreamReader(inputStream)).lines()
.collect(Collectors.joining("\n"));
}
return profileSchema;
}
static InputStream getProfileSchemaInputStream() {
return DataCalogueMetadataFormatReader.class.getClassLoader().getResourceAsStream(SCHEMA_FILENAME);
}
static void validateAgainstXSD(StreamSource xml, StreamSource xsd) throws SAXException, IOException {
SchemaFactory factory = SchemaFactory.newInstance(XMLConstants.W3C_XML_SCHEMA_NS_URI);
Schema schema = factory.newSchema(xsd);
Validator validator = schema.newValidator();
validator.validate(xml);
}
@Override
public void validateProfile(String xmlProfile) throws Exception {
validateAgainstXSD(new StreamSource(xmlProfile), new StreamSource(getProfileSchema()));
}
public static void validateProfile(InputStream xml) throws Exception {
validateAgainstXSD(new StreamSource(xml), new StreamSource(getProfileSchemaInputStream()));
}
} }

View File

@ -3,11 +3,13 @@
*/ */
package org.gcube.datacatalogue.metadatadiscovery; package org.gcube.datacatalogue.metadatadiscovery;
import java.io.IOException;
import java.util.List; import java.util.List;
import org.gcube.datacatalogue.metadatadiscovery.bean.MetadataProfile; import org.gcube.datacatalogue.metadatadiscovery.bean.MetadataProfile;
import org.gcube.datacatalogue.metadatadiscovery.bean.jaxb.MetadataFormat; import org.gcube.datacatalogue.metadatadiscovery.bean.jaxb.MetadataFormat;
import org.gcube.datacatalogue.metadatadiscovery.bean.jaxb.NamespaceCategory; import org.gcube.datacatalogue.metadatadiscovery.bean.jaxb.NamespaceCategory;
import org.xml.sax.SAXException;
@ -20,7 +22,6 @@ import org.gcube.datacatalogue.metadatadiscovery.bean.jaxb.NamespaceCategory;
*/ */
public interface DataCatalogueMetadataDiscovery { public interface DataCatalogueMetadataDiscovery {
/** /**
* Gets the list of metadata types. * Gets the list of metadata types.
* *
@ -62,4 +63,19 @@ public interface DataCatalogueMetadataDiscovery {
*/ */
void resetNamespaceCategories(); void resetNamespaceCategories();
/**
* Return the XSD of the profile schema
* @return the String representation of the XSD containing the schema for a profile
*/
String getProfileSchema();
/**
* Validate the xml provided as argument
* @param xmlProfile the string representation of the XML to validate again the schema
* @throws IOException
* @throws SAXException
*/
void validateProfile(String xmlProfile) throws Exception;
} }

View File

@ -3,7 +3,11 @@
*/ */
package org.gcube.datacatalogue.metadatadiscovery; package org.gcube.datacatalogue.metadatadiscovery;
import java.io.BufferedReader;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.util.List; import java.util.List;
import java.util.stream.Collectors;
import org.gcube.common.scope.api.ScopeProvider; import org.gcube.common.scope.api.ScopeProvider;
import org.gcube.datacatalogue.metadatadiscovery.bean.MetadataProfile; import org.gcube.datacatalogue.metadatadiscovery.bean.MetadataProfile;
@ -55,4 +59,15 @@ public class TestDataCatalogueMetadataFormatReader {
e.printStackTrace(); e.printStackTrace();
} }
} }
public static String PROFILE_EXAMPLE_FILENAME = "profileExample.xml";
@Test
public void validateAgainstProfileSchema() throws Exception {
InputStream inputStream = this.getClass().getClassLoader().getResourceAsStream(PROFILE_EXAMPLE_FILENAME);
DataCalogueMetadataFormatReader.validateProfile(inputStream);
}
} }

View File

@ -0,0 +1,19 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE xml>
<configuration>
<appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
<encoder>
<pattern>%d{HH:mm:ss.SSS} [%thread] %-5level %logger{0}: %msg%n</pattern>
</encoder>
</appender>
<logger name="org.gcube" level="ERROR" />
<logger name="org.gcube.datacatalogue" level="TRACE" />
<root level="WARN">
<appender-ref ref="STDOUT" />
</root>
</configuration>

View File

@ -0,0 +1,80 @@
<metadataformat type="Dataset">
<metadatafield>
<fieldName>spatial</fieldName>
<mandatory>false</mandatory>
<dataType>GeoJSON</dataType>
<maxOccurs>1</maxOccurs>
<defaultValue />
<note>Insert a geoJSON characterising the spatial coverage of the
dataset.
</note>
<vocabulary />
<validator />
<tagging create="false" />
<grouping create="false" />
</metadatafield>
<metadatafield>
<fieldName>timeField</fieldName>
<mandatory>false</mandatory>
<dataType>Time</dataType>
<maxOccurs>1</maxOccurs>
<defaultValue />
<note>Insert a time value characterising the temporal coverage of the
dataset.
</note>
<vocabulary />
<validator />
<tagging create="false" />
<grouping create="false" />
</metadatafield>
<metadatafield>
<fieldName>timeIntervalField</fieldName>
<mandatory>false</mandatory>
<dataType>Time_Interval</dataType>
<maxOccurs>1</maxOccurs>
<defaultValue />
<note>Insert a time interval value characterising the temporal
coverage of the dataset.
</note>
<vocabulary />
<validator />
<tagging create="false" />
<grouping create="false" />
</metadatafield>
<metadatafield>
<fieldName>timeListField</fieldName>
<mandatory>false</mandatory>
<dataType>Times_ListOf</dataType>
<maxOccurs>1</maxOccurs>
<defaultValue />
<note>Insert a time list characterising the temporal coverage of the
dataset.
</note>
<vocabulary />
<validator />
<tagging create="false" />
<grouping create="false" />
</metadatafield>
<metadatafield>
<fieldName>GACSTerms</fieldName>
<mandatory>true</mandatory>
<dataType>String</dataType>
<maxOccurs>*</maxOccurs>
<defaultValue />
<note>Select one or more terms</note>
<vocabulary isMultiSelection="true">
<vocabularyField>GACS.Term1</vocabularyField>
<vocabularyField>GACS.Term2</vocabularyField>
<vocabularyField>GACS.Term3</vocabularyField>
<vocabularyField>GACS.Term4</vocabularyField>
<vocabularyField>GACS.Term5</vocabularyField>
<vocabularyField>GACS.Term6</vocabularyField>
<vocabularyField>GACS.Term7</vocabularyField>
<vocabularyField>GACS.Term8</vocabularyField>
<vocabularyField>GACS.Term9</vocabularyField>
</vocabulary>
<validator />
<tagging create="true">onValue</tagging>
<grouping create="false" />
</metadatafield>
</metadataformat>