Removed dependency on xerces

git-svn-id: http://svn.research-infrastructures.eu/public/d4science/gcube/trunk/application-support-layer/applicationSupportLayerCore@96733 82a268e6-3cf1-43bd-a215-b396298e98cf
This commit is contained in:
nikolas.laskaris 2014-06-05 10:03:56 +00:00
parent c103b97609
commit a68ed9e98b
3 changed files with 150 additions and 133 deletions

View File

@ -71,13 +71,14 @@
<version>1.6.4</version> <version>1.6.4</version>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<!--
should be removed after release 3.2.0
<dependency> <dependency>
<groupId>xerces</groupId> <groupId>xerces</groupId>
<artifactId>xerces</artifactId> <artifactId>xerces</artifactId>
<version>[2.0.0, 3.0.0]</version> <version>[2.0.0, 3.0.0)</version>
</dependency> </dependency>
-->
<dependency> <dependency>
<groupId>org.gcube.resources</groupId> <groupId>org.gcube.resources</groupId>
<artifactId>registry-publisher</artifactId> <artifactId>registry-publisher</artifactId>
@ -97,6 +98,7 @@
<version>2.3.0</version> <version>2.3.0</version>
</dependency> </dependency>
<!-- <!--
should be removed after release 3.2.0
<dependency> <dependency>
<groupId>net.sourceforge.addressing</groupId> <groupId>net.sourceforge.addressing</groupId>
<artifactId>addressing</artifactId> <artifactId>addressing</artifactId>

View File

@ -33,6 +33,7 @@ import org.gcube.informationsystem.publisher.stubs.registry.faults.PublisherExce
import org.gcube.resources.discovery.client.api.DiscoveryClient; import org.gcube.resources.discovery.client.api.DiscoveryClient;
import org.gcube.resources.discovery.client.queries.api.SimpleQuery; import org.gcube.resources.discovery.client.queries.api.SimpleQuery;
import org.w3c.dom.Document; import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.NodeList; import org.w3c.dom.NodeList;
import org.xml.sax.InputSource; import org.xml.sax.InputSource;
import org.slf4j.Logger; import org.slf4j.Logger;
@ -89,7 +90,6 @@ public class GenericResource implements GenericResourceInfoI {
try { try {
client = clientFor(org.gcube.common.resources.gcore.GenericResource.class); //GHNContext.getImplementation(ISClient.class); client = clientFor(org.gcube.common.resources.gcore.GenericResource.class); //GHNContext.getImplementation(ISClient.class);
} catch (Exception e) { } catch (Exception e) {
// TODO Auto-generated catch block
logger.error("Exception:", e); logger.error("Exception:", e);
client = null; client = null;
} }
@ -158,19 +158,27 @@ public class GenericResource implements GenericResourceInfoI {
SimpleQuery query = null; SimpleQuery query = null;
try { try {
query = queryFor(org.gcube.common.resources.gcore.GenericResource.class); query = queryFor(org.gcube.common.resources.gcore.GenericResource.class);
query.addCondition("$resource/Profile/SecondaryType eq 'DataSource'"); query.addCondition("$resource/Profile/SecondaryType eq 'DataSource'"); //this brings all collections, need to filter out the opensearch ones.
if(onlyUserCollections) if(onlyUserCollections)
query.addCondition("$resource/Profile/Body/SourceProperties/user eq 'true'"); query.addCondition("$resource/Profile/Body/SourceProperties/user eq 'true'");
List<org.gcube.common.resources.gcore.GenericResource> results = client.submit(query); List<org.gcube.common.resources.gcore.GenericResource> results = client.submit(query);
if (results == null || results.size() == 0) if (results == null || results.size() == 0)
logger.debug("Couldn't find any tree collections within that scope! Will return empty list."); logger.debug("Couldn't find any tree collections within that scope! Will return empty list.");
else
logger.debug("# of Tree Collections found: "+ results.size());
for (org.gcube.common.resources.gcore.GenericResource gr : results) for (org.gcube.common.resources.gcore.GenericResource gr : results)
pairs.put(gr.id(), gr); pairs.put(gr.id(), gr);
} catch (Exception e) { } catch (Exception e) {
logger.debug("Remote Exception:" + e.toString()); logger.debug("Remote Exception:" + e.toString());
} }
//remove from all collections set, the opensearch ones !
for(String key : pairs.keySet()){
org.gcube.common.resources.gcore.GenericResource collection = pairs.get(key);
Element body = collection.profile().body();
if(body.getElementsByTagName("type").getLength()==0)
pairs.remove(key);
else
logger.debug("Found tree collection: "+key);
}
logger.debug("# of Tree Collections found: "+ pairs.size());
return pairs; return pairs;
} }
@ -185,7 +193,9 @@ public class GenericResource implements GenericResourceInfoI {
SimpleQuery query = null; SimpleQuery query = null;
try { try {
query = queryFor(org.gcube.common.resources.gcore.GenericResource.class); query = queryFor(org.gcube.common.resources.gcore.GenericResource.class);
query.addCondition("$resource/Profile/SecondaryType eq 'GCUBECollection'"); // query.addCondition("$resource/Profile/SecondaryType eq 'GCUBECollection'");
query.addCondition("$resource/Profile/SecondaryType eq 'DataSource'"); //changed from GCUBECollection to DataSource (2 be same as the tree collections)
query.addCondition("$resource/Profile/Body/SourceProperties/type eq 'opensearch'");
if(onlyUserCollections) if(onlyUserCollections)
query.addCondition("$resource/Profile/Body/CollectionInfo/user eq 'true'"); query.addCondition("$resource/Profile/Body/CollectionInfo/user eq 'true'");
List<org.gcube.common.resources.gcore.GenericResource> results = client.submit(query); List<org.gcube.common.resources.gcore.GenericResource> results = client.submit(query);

View File

@ -1,127 +1,132 @@
package org.gcube.application.framework.core.util; ///////
// should be removed after release 3.2.0
//////
import java.io.BufferedReader;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.InputStreamReader;
import java.io.Reader;
import java.io.StringWriter;
import javax.xml.parsers.DocumentBuilderFactory; //package org.gcube.application.framework.core.util;
import javax.xml.transform.Transformer; //
import javax.xml.transform.TransformerFactory; //import java.io.BufferedReader;
import javax.xml.transform.stream.StreamResult; //import java.io.ByteArrayInputStream;
import javax.xml.transform.stream.StreamSource; //import java.io.ByteArrayOutputStream;
//import java.io.InputStreamReader;
import org.apache.xml.serialize.OutputFormat; //import java.io.Reader;
import org.apache.xml.serialize.XMLSerializer; //import java.io.StringWriter;
import org.w3c.dom.Document; //
import org.slf4j.Logger; //import javax.xml.parsers.DocumentBuilderFactory;
import org.slf4j.LoggerFactory; //import javax.xml.transform.Transformer;
//import javax.xml.transform.TransformerFactory;
/** //import javax.xml.transform.stream.StreamResult;
* @author Valia Tsagkalidou (NKUA) //import javax.xml.transform.stream.StreamSource;
* @author Nikolas Laskaris (NKUA) //
* //import org.apache.xml.serialize.OutputFormat;
*/ //import org.apache.xml.serialize.XMLSerializer;
public class TransformXSLT { //import org.w3c.dom.Document;
//import org.slf4j.Logger;
/** The logger. */ //import org.slf4j.LoggerFactory;
private static final Logger logger = LoggerFactory.getLogger(TransformXSLT.class); //
///**
/** // * @author Valia Tsagkalidou (NKUA)
* Transforms an xml document based on the given xslt // * @author Nikolas Laskaris (NKUA)
* @param xslt the xslt for transforming the xml // *
* @param xml the xml to be transformed // */
* @return a string containing the transformed xml (output of the transformation) //public class TransformXSLT {
*/ //
public static String transform(String xslt, String xml) // /** The logger. */
{ // private static final Logger logger = LoggerFactory.getLogger(TransformXSLT.class);
Transformer transformer; //
try // /**
{//Retrieve the XSLT from the DIS (generic resource), and create the transformer // * Transforms an xml document based on the given xslt
ByteArrayInputStream xsltStream = new ByteArrayInputStream(xslt.getBytes()); // * @param xslt the xslt for transforming the xml
TransformerFactory tFactory = TransformerFactory.newInstance(); // * @param xml the xml to be transformed
transformer = tFactory.newTransformer(new StreamSource(xsltStream)); // * @return a string containing the transformed xml (output of the transformation)
// */
DocumentBuilderFactory dfactory = DocumentBuilderFactory.newInstance(); // public static String transform(String xslt, String xml)
Document doc = null; // {
// Transformer transformer;
doc = dfactory.newDocumentBuilder().parse(xml); // try
// Apply the transformation // {//Retrieve the XSLT from the DIS (generic resource), and create the transformer
ByteArrayOutputStream ba_stream = new ByteArrayOutputStream(); // ByteArrayInputStream xsltStream = new ByteArrayInputStream(xslt.getBytes());
OutputFormat format = new OutputFormat(doc); // TransformerFactory tFactory = TransformerFactory.newInstance();
format.setIndenting(false); // transformer = tFactory.newTransformer(new StreamSource(xsltStream));
format.setOmitDocumentType(true); //
format.setOmitXMLDeclaration(true); // DocumentBuilderFactory dfactory = DocumentBuilderFactory.newInstance();
StringWriter writer = new StringWriter(); // Document doc = null;
XMLSerializer serial = new XMLSerializer(writer,format); //
serial.serialize(doc); // doc = dfactory.newDocumentBuilder().parse(xml);
transformer.transform(new StreamSource(new ByteArrayInputStream(writer.toString().getBytes())), new StreamResult(ba_stream)); // // Apply the transformation
//Prepares the object to be returned // ByteArrayOutputStream ba_stream = new ByteArrayOutputStream();
StringBuffer buffer = new StringBuffer(); // OutputFormat format = new OutputFormat(doc);
try { // format.setIndenting(false);
InputStreamReader isr = new InputStreamReader( new ByteArrayInputStream(ba_stream.toByteArray()), // format.setOmitDocumentType(true);
"UTF8"); // format.setOmitXMLDeclaration(true);
Reader in2 = new BufferedReader(isr); // StringWriter writer = new StringWriter();
int ch; // XMLSerializer serial = new XMLSerializer(writer,format);
while ((ch = in2.read()) > -1) { // serial.serialize(doc);
buffer.append((char)ch); // transformer.transform(new StreamSource(new ByteArrayInputStream(writer.toString().getBytes())), new StreamResult(ba_stream));
} // //Prepares the object to be returned
in2.close(); // StringBuffer buffer = new StringBuffer();
return buffer.toString(); // try {
} catch (Exception e) { // InputStreamReader isr = new InputStreamReader( new ByteArrayInputStream(ba_stream.toByteArray()),
logger.error("Exception:", e); // "UTF8");
} // Reader in2 = new BufferedReader(isr);
} // int ch;
catch (Exception e) { // while ((ch = in2.read()) > -1) {
logger.error("Exception:", e); // buffer.append((char)ch);
} // }
return null; // in2.close();
} // return buffer.toString();
// } catch (Exception e) {
/** // logger.error("Exception:", e);
* Transforms an xml document based on the given transformer // }
* @param transformer the transformer based on which the transformation will be applied // }
* @param xml the xml document to be transformed // catch (Exception e) {
* @return a string containing the transformed xml (output of the transformation) // logger.error("Exception:", e);
*/ // }
public static String transform(Transformer transformer, String xml) // return null;
{ // }
DocumentBuilderFactory dfactory = DocumentBuilderFactory.newInstance(); //
Document doc = null; // /**
// * Transforms an xml document based on the given transformer
try // * @param transformer the transformer based on which the transformation will be applied
{ // * @param xml the xml document to be transformed
doc = dfactory.newDocumentBuilder().parse(xml); // * @return a string containing the transformed xml (output of the transformation)
ByteArrayOutputStream ba_stream = new ByteArrayOutputStream(); // */
OutputFormat format = new OutputFormat(doc); // public static String transform(Transformer transformer, String xml)
format.setIndenting(false); // {
format.setOmitDocumentType(true); // DocumentBuilderFactory dfactory = DocumentBuilderFactory.newInstance();
format.setOmitXMLDeclaration(true); // Document doc = null;
StringWriter writer = new StringWriter(); //
XMLSerializer serial = new XMLSerializer(writer,format); // try
serial.serialize(doc); // {
transformer.transform(new StreamSource(new ByteArrayInputStream(writer.toString().getBytes())), new StreamResult(ba_stream)); // doc = dfactory.newDocumentBuilder().parse(xml);
//Prepares the object to be returned // ByteArrayOutputStream ba_stream = new ByteArrayOutputStream();
StringBuffer buffer = new StringBuffer(); // OutputFormat format = new OutputFormat(doc);
try { // format.setIndenting(false);
InputStreamReader isr = new InputStreamReader( new ByteArrayInputStream(ba_stream.toByteArray()), // format.setOmitDocumentType(true);
"UTF8"); // format.setOmitXMLDeclaration(true);
Reader in2 = new BufferedReader(isr); // StringWriter writer = new StringWriter();
int ch; // XMLSerializer serial = new XMLSerializer(writer,format);
while ((ch = in2.read()) > -1) { // serial.serialize(doc);
buffer.append((char)ch); // transformer.transform(new StreamSource(new ByteArrayInputStream(writer.toString().getBytes())), new StreamResult(ba_stream));
} // //Prepares the object to be returned
in2.close(); // StringBuffer buffer = new StringBuffer();
return buffer.toString(); // try {
} catch (Exception e) { // InputStreamReader isr = new InputStreamReader( new ByteArrayInputStream(ba_stream.toByteArray()),
logger.error("Exception:", e); // "UTF8");
} // Reader in2 = new BufferedReader(isr);
} // int ch;
catch (Exception e) { // while ((ch = in2.read()) > -1) {
logger.error("Exception:", e); // buffer.append((char)ch);
} // }
return null; // in2.close();
} // return buffer.toString();
} // } catch (Exception e) {
// logger.error("Exception:", e);
// }
// }
// catch (Exception e) {
// logger.error("Exception:", e);
// }
// return null;
// }
//}