Refs #13079: Add the create/update/delete operation in Profile Collection in gCat

Task-Url: https://support.d4science.org/issues/13079

git-svn-id: https://svn.d4science.research-infrastructures.eu/gcube/trunk/data-catalogue/gcubedatacatalogue-metadata-discovery@176268 82a268e6-3cf1-43bd-a215-b396298e98cf
feature/17423
Luca Frosini 5 years ago
parent 7aea3e9910
commit c90ab867ff

@ -1,11 +1,12 @@
<?xml version="1.0" encoding="UTF-8"?>
<classpath>
<classpathentry including="**/*.java" kind="src" output="target/gcubedatacatalogue-metadata-discovery-3.1.0-SNAPSHOT/WEB-INF/classes" path="src/main/java">
<classpathentry including="**/*.java" kind="src" output="target/gcubedatacatalogue-metadata-discovery-3.4.0-SNAPSHOT/WEB-INF/classes" path="src/main/java">
<attributes>
<attribute name="optional" value="true"/>
<attribute name="maven.pomderived" value="true"/>
</attributes>
</classpathentry>
<classpathentry kind="src" path="src/main/resources"/>
<classpathentry kind="src" output="target/test-classes" path="src/test/java">
<attributes>
<attribute name="optional" value="true"/>
@ -27,5 +28,5 @@
<attribute name="maven.pomderived" value="true"/>
</attributes>
</classpathentry>
<classpathentry kind="output" path="target/gcubedatacatalogue-metadata-discovery-3.1.0-SNAPSHOT/WEB-INF/classes"/>
<classpathentry kind="output" path="target/gcubedatacatalogue-metadata-discovery-3.4.0-SNAPSHOT/WEB-INF/classes"/>
</classpath>

@ -13,7 +13,7 @@
<groupId>org.gcube.data-catalogue</groupId>
<artifactId>gcubedatacatalogue-metadata-discovery</artifactId>
<packaging>jar</packaging>
<version>3.3.0-SNAPSHOT</version>
<version>3.4.0-SNAPSHOT</version>
<name>The gCube data catalogue metadata discovery library</name>
<description>The gCube data catalogue metadata discovery library</description>
<scm>
@ -112,6 +112,14 @@
<version>4.8.1</version>
<scope>test</scope>
</dependency>
<!-- Required to log on tests -->
<dependency>
<groupId>ch.qos.logback</groupId>
<artifactId>logback-classic</artifactId>
<version>1.0.13</version>
<scope>test</scope>
</dependency>
</dependencies>

@ -1,9 +1,21 @@
package org.gcube.datacatalogue.metadatadiscovery;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.StringReader;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import javax.xml.XMLConstants;
import javax.xml.transform.stream.StreamSource;
import javax.xml.validation.Schema;
import javax.xml.validation.SchemaFactory;
import javax.xml.validation.Validator;
import org.gcube.common.scope.api.ScopeProvider;
import org.gcube.common.scope.impl.ScopeBean;
@ -15,8 +27,7 @@ import org.gcube.datacatalogue.metadatadiscovery.reader.MetadataFormatReader;
import org.gcube.datacatalogue.metadatadiscovery.reader.NamespaceCategoryReader;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.xml.sax.SAXException;
/**
* The Class DataCalogueMetadataReader.
@ -24,135 +35,167 @@ import org.slf4j.LoggerFactory;
* @author Francesco Mangiacrapa francesco.mangiacrapa@isti.cnr.it
* Jun 8, 2016
*/
public class DataCalogueMetadataFormatReader implements DataCatalogueMetadataDiscovery{
public class DataCalogueMetadataFormatReader implements DataCatalogueMetadataDiscovery {
private static String SCHEMA_FILENAME = "Gdcmetadataprofilev3.xsd";
private MetadataFormatDiscovery medataFormatDiscovery;
private ScopeBean scope;
private Map<String, MetadataFormat> hashMetadataFormats = null;
private Map<String,MetadataFormat> hashMetadataFormats = null;
private List<NamespaceCategory> namespaceCategories = null;
private String profileSchema = null;
private static Logger logger = LoggerFactory.getLogger(DataCalogueMetadataFormatReader.class);
/**
* Instantiates a new data calogue metadata format reader.
*
* @throws Exception the exception
*/
public DataCalogueMetadataFormatReader() throws Exception {
String scopeString = ScopeProvider.instance.get();
logger.debug("Read scope "+scopeString +" from ScopeProvider");
if(scopeString==null || scopeString.isEmpty())
logger.debug("Read scope " + scopeString + " from ScopeProvider");
if(scopeString == null || scopeString.isEmpty())
throw new Exception("Please set a valid scope into ScopeProvider");
scope = new ScopeBean(scopeString);
scope = new ScopeBean(scopeString);
readNamespaces();
readMetadaFormats();
}
/**
* Read metada formats.
*
* @throws Exception the exception
*/
private void readMetadaFormats() throws Exception{
private void readMetadaFormats() throws Exception {
medataFormatDiscovery = new MetadataFormatDiscovery(scope);
logger.info("MedataFormatDiscovery has retrieved: "+medataFormatDiscovery.getMetadataProfiles().size() +" metadata type/s");
logger.info("MedataFormatDiscovery has retrieved: " + medataFormatDiscovery.getMetadataProfiles().size()
+ " metadata type/s");
logger.debug("filling cache for MedataFormat");
hashMetadataFormats = new HashMap<String, MetadataFormat>(medataFormatDiscovery.getMetadataProfiles().size());
for (MetadataProfile mT : medataFormatDiscovery.getMetadataProfiles()) {
if(mT==null)
hashMetadataFormats = new HashMap<String,MetadataFormat>(medataFormatDiscovery.getMetadataProfiles().size());
for(MetadataProfile mT : medataFormatDiscovery.getMetadataProfiles()) {
if(mT == null)
continue;
MetadataFormatReader reader = new MetadataFormatReader(scope, mT.getId());
hashMetadataFormats.put(mT.getId(), reader.getMetadataFormat());
logger.debug("MetadataType id: "+mT.getId() +" cached as: "+reader.getMetadataFormat());
logger.debug("MetadataType id: " + mT.getId() + " cached as: " + reader.getMetadataFormat());
}
}
/**
* Read namespaces.
*/
private void readNamespaces() {
try{
if(namespaceCategories == null || namespaceCategories.isEmpty()){
try {
if(namespaceCategories == null || namespaceCategories.isEmpty()) {
if(namespaceCategories == null)
namespaceCategories = new ArrayList<NamespaceCategory>();
NamespaceCategoryReader rd = new NamespaceCategoryReader(scope);
namespaceCategories.addAll(rd.getNamespaces().getNamespaceCategories());
}
}catch(Exception e){
} catch(Exception e) {
logger.debug("An error occurred during read namespaces for categories: ", e);
}
}
/* (non-Javadoc)
* @see org.gcube.datacatalogue.metadatadiscovery.DataCatalogueMetadataDiscovery#getMetadataFormatForMetadataType(org.gcube.datacatalogue.metadatadiscovery.bean.MetadataType)
*/
@Override
public MetadataFormat getMetadataFormatForMetadataProfile(MetadataProfile profile) throws Exception {
if(profile==null)
throw new Exception("Input "+ MetadataProfile.class.getSimpleName() + " is null");
if(profile == null)
throw new Exception("Input " + MetadataProfile.class.getSimpleName() + " is null");
MetadataFormat format = hashMetadataFormats.get(profile.getId());
if(format!=null)
if(format != null)
return format;
MetadataFormatReader reader = new MetadataFormatReader(scope, profile.getId());
return reader.getMetadataFormat();
}
/* (non-Javadoc)
* @see org.gcube.datacatalogue.metadatadiscovery.DataCatalogueMetadataDiscovery#getListOfMetadataTypes()
*/
@Override
public List<MetadataProfile> getListOfMetadataProfiles() throws Exception {
if(medataFormatDiscovery==null)
if(medataFormatDiscovery == null)
readMetadaFormats();
return medataFormatDiscovery.getMetadataProfiles();
}
/* (non-Javadoc)
* @see org.gcube.datacatalogue.metadatadiscovery.DataCatalogueMetadataDiscovery#getListOfNamespaceCategories()
*/
@Override
public List<NamespaceCategory> getListOfNamespaceCategories() throws Exception {
if(namespaceCategories == null)
readNamespaces();
return namespaceCategories;
}
/* (non-Javadoc)
* @see org.gcube.datacatalogue.metadatadiscovery.DataCatalogueMetadataDiscovery#resetMetadataProfile()
*/
@Override
public void resetMetadataProfile() {
medataFormatDiscovery = null;
hashMetadataFormats = null;
}
/* (non-Javadoc)
* @see org.gcube.datacatalogue.metadatadiscovery.DataCatalogueMetadataDiscovery#resetNamespaceCategories()
*/
@Override
public void resetNamespaceCategories() {
namespaceCategories = null;
}
@Override
public String getProfileSchema() {
if(profileSchema == null) {
InputStream inputStream = getProfileSchemaInputStream();
profileSchema = new BufferedReader(new InputStreamReader(inputStream)).lines()
.collect(Collectors.joining("\n"));
}
return profileSchema;
}
static InputStream getProfileSchemaInputStream() {
return DataCalogueMetadataFormatReader.class.getClassLoader().getResourceAsStream(SCHEMA_FILENAME);
}
static void validateAgainstXSD(StreamSource xml, StreamSource xsd) throws SAXException, IOException {
SchemaFactory factory = SchemaFactory.newInstance(XMLConstants.W3C_XML_SCHEMA_NS_URI);
Schema schema = factory.newSchema(xsd);
Validator validator = schema.newValidator();
validator.validate(xml);
}
@Override
public void validateProfile(String xmlProfile) throws Exception {
validateAgainstXSD(new StreamSource(xmlProfile), new StreamSource(getProfileSchema()));
}
public static void validateProfile(InputStream xml) throws Exception {
validateAgainstXSD(new StreamSource(xml), new StreamSource(getProfileSchemaInputStream()));
}
}

@ -3,11 +3,13 @@
*/
package org.gcube.datacatalogue.metadatadiscovery;
import java.io.IOException;
import java.util.List;
import org.gcube.datacatalogue.metadatadiscovery.bean.MetadataProfile;
import org.gcube.datacatalogue.metadatadiscovery.bean.jaxb.MetadataFormat;
import org.gcube.datacatalogue.metadatadiscovery.bean.jaxb.NamespaceCategory;
import org.xml.sax.SAXException;
@ -20,7 +22,6 @@ import org.gcube.datacatalogue.metadatadiscovery.bean.jaxb.NamespaceCategory;
*/
public interface DataCatalogueMetadataDiscovery {
/**
* Gets the list of metadata types.
*
@ -62,4 +63,19 @@ public interface DataCatalogueMetadataDiscovery {
*/
void resetNamespaceCategories();
/**
* Return the XSD of the profile schema
* @return the String representation of the XSD containing the schema for a profile
*/
String getProfileSchema();
/**
* Validate the xml provided as argument
* @param xmlProfile the string representation of the XML to validate again the schema
* @throws IOException
* @throws SAXException
*/
void validateProfile(String xmlProfile) throws Exception;
}

@ -3,7 +3,11 @@
*/
package org.gcube.datacatalogue.metadatadiscovery;
import java.io.BufferedReader;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.util.List;
import java.util.stream.Collectors;
import org.gcube.common.scope.api.ScopeProvider;
import org.gcube.datacatalogue.metadatadiscovery.bean.MetadataProfile;
@ -55,4 +59,15 @@ public class TestDataCatalogueMetadataFormatReader {
e.printStackTrace();
}
}
public static String PROFILE_EXAMPLE_FILENAME = "profileExample.xml";
@Test
public void validateAgainstProfileSchema() throws Exception {
InputStream inputStream = this.getClass().getClassLoader().getResourceAsStream(PROFILE_EXAMPLE_FILENAME);
DataCalogueMetadataFormatReader.validateProfile(inputStream);
}
}

@ -0,0 +1,19 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE xml>
<configuration>
<appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
<encoder>
<pattern>%d{HH:mm:ss.SSS} [%thread] %-5level %logger{0}: %msg%n</pattern>
</encoder>
</appender>
<logger name="org.gcube" level="ERROR" />
<logger name="org.gcube.datacatalogue" level="TRACE" />
<root level="WARN">
<appender-ref ref="STDOUT" />
</root>
</configuration>

@ -0,0 +1,80 @@
<metadataformat type="Dataset">
<metadatafield>
<fieldName>spatial</fieldName>
<mandatory>false</mandatory>
<dataType>GeoJSON</dataType>
<maxOccurs>1</maxOccurs>
<defaultValue />
<note>Insert a geoJSON characterising the spatial coverage of the
dataset.
</note>
<vocabulary />
<validator />
<tagging create="false" />
<grouping create="false" />
</metadatafield>
<metadatafield>
<fieldName>timeField</fieldName>
<mandatory>false</mandatory>
<dataType>Time</dataType>
<maxOccurs>1</maxOccurs>
<defaultValue />
<note>Insert a time value characterising the temporal coverage of the
dataset.
</note>
<vocabulary />
<validator />
<tagging create="false" />
<grouping create="false" />
</metadatafield>
<metadatafield>
<fieldName>timeIntervalField</fieldName>
<mandatory>false</mandatory>
<dataType>Time_Interval</dataType>
<maxOccurs>1</maxOccurs>
<defaultValue />
<note>Insert a time interval value characterising the temporal
coverage of the dataset.
</note>
<vocabulary />
<validator />
<tagging create="false" />
<grouping create="false" />
</metadatafield>
<metadatafield>
<fieldName>timeListField</fieldName>
<mandatory>false</mandatory>
<dataType>Times_ListOf</dataType>
<maxOccurs>1</maxOccurs>
<defaultValue />
<note>Insert a time list characterising the temporal coverage of the
dataset.
</note>
<vocabulary />
<validator />
<tagging create="false" />
<grouping create="false" />
</metadatafield>
<metadatafield>
<fieldName>GACSTerms</fieldName>
<mandatory>true</mandatory>
<dataType>String</dataType>
<maxOccurs>*</maxOccurs>
<defaultValue />
<note>Select one or more terms</note>
<vocabulary isMultiSelection="true">
<vocabularyField>GACS.Term1</vocabularyField>
<vocabularyField>GACS.Term2</vocabularyField>
<vocabularyField>GACS.Term3</vocabularyField>
<vocabularyField>GACS.Term4</vocabularyField>
<vocabularyField>GACS.Term5</vocabularyField>
<vocabularyField>GACS.Term6</vocabularyField>
<vocabularyField>GACS.Term7</vocabularyField>
<vocabularyField>GACS.Term8</vocabularyField>
<vocabularyField>GACS.Term9</vocabularyField>
</vocabulary>
<validator />
<tagging create="true">onValue</tagging>
<grouping create="false" />
</metadatafield>
</metadataformat>
Loading…
Cancel
Save