- some corrections added (algorithms gettablesdetails, listdbinfo,listschemas,listtables, randomsampling,smartsampling,sampling,submitquery). check added in order to use trim function on a not null object.
- bug fixed and founded with the TabularData Database resource that has 2 access points with the same endpoint, username and password values. In component DatabasesResourcesManager getAccessPoints method in DBResource class modified and equals method defined in AccessPoint class. DBResource, Normalizer, Decider e Guesser classes modified in order manage the exception generated in equals method. Exception managed also in algorithms (except ListNames) in database-rm-algorithms components. - tag version changed in pom file from 1.1.0 to 1.2.0 - classes in regressiontest package updated in previous operations and committed git-svn-id: https://svn.d4science.research-infrastructures.eu/gcube/trunk/data-access/DatabasesResourcesManagerAlgorithms@100744 82a268e6-3cf1-43bd-a215-b396298e98cf
This commit is contained in:
parent
41935e8750
commit
cf08416252
2
pom.xml
2
pom.xml
|
@ -11,7 +11,7 @@
|
|||
|
||||
<groupId>org.gcube.dataaccess.algorithms</groupId>
|
||||
<artifactId>database-rm-algorithms</artifactId>
|
||||
<version>1.1.0-SNAPSHOT</version>
|
||||
<version>1.2.0-SNAPSHOT</version>
|
||||
<name>DatabasesResourcesManagerAlgorithms</name>
|
||||
<description>Databases Resources Manager Algorithms</description>
|
||||
|
||||
|
|
|
@ -215,20 +215,24 @@ public class GetTableDetails extends StandardLocalExternalAlgorithm {
|
|||
private List<String> retrieveInfo() throws Exception,
|
||||
IllegalStateException, DiscoveryException, InvalidResultException {
|
||||
|
||||
resourceName = getInputParameter("ResourceName").trim();
|
||||
|
||||
if ((resourceName == null) || (resourceName.equals(""))) {
|
||||
|
||||
throw new Exception("Warning: insert the resource name");
|
||||
|
||||
resourceName = getInputParameter("ResourceName");
|
||||
|
||||
if(resourceName != null){
|
||||
resourceName = getInputParameter("ResourceName").trim();
|
||||
}
|
||||
|
||||
databaseName = getInputParameter("DatabaseName").trim();
|
||||
if ((resourceName == null) || (resourceName.equals(""))) {
|
||||
throw new Exception("Warning: insert the resource name");
|
||||
}
|
||||
|
||||
|
||||
databaseName = getInputParameter("DatabaseName");
|
||||
|
||||
if(databaseName != null){
|
||||
databaseName = getInputParameter("DatabaseName").trim();
|
||||
}
|
||||
if ((databaseName == null) || (databaseName.equals(""))) {
|
||||
|
||||
throw new Exception("Warning: insert the database name");
|
||||
|
||||
}
|
||||
|
||||
// retrieve the chosen resource
|
||||
|
@ -375,22 +379,24 @@ public class GetTableDetails extends StandardLocalExternalAlgorithm {
|
|||
private LinkedHashMap<String, StatisticalType> getDetails()
|
||||
throws Exception {
|
||||
|
||||
tableName = getInputParameter("TableName").trim();
|
||||
|
||||
|
||||
tableName = getInputParameter("TableName");
|
||||
|
||||
if(tableName != null){
|
||||
tableName = getInputParameter("TableName").trim();
|
||||
}
|
||||
if ((tableName == null) || (tableName.equals(""))) {
|
||||
|
||||
throw new Exception("Warning: insert the table name");
|
||||
|
||||
}
|
||||
|
||||
if (driverInfo.toLowerCase().contains("postgres")) {
|
||||
|
||||
schemaName = getInputParameter("SchemaName").trim();
|
||||
|
||||
|
||||
schemaName = getInputParameter("SchemaName");
|
||||
if(schemaName != null){
|
||||
schemaName = getInputParameter("SchemaName").trim();
|
||||
}
|
||||
if ((schemaName == null) || (schemaName.equals(""))) {
|
||||
|
||||
throw new Exception("Warning: insert the schema name");
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -502,26 +508,41 @@ public class GetTableDetails extends StandardLocalExternalAlgorithm {
|
|||
|
||||
}
|
||||
|
||||
private void normalizeDBInfo(DBResource resource) throws IOException {
|
||||
private void normalizeDBInfo(DBResource resource) throws Exception {
|
||||
|
||||
try{
|
||||
int ap = resource.getAccessPoints().size();
|
||||
|
||||
int ap = resource.getAccessPoints().size();
|
||||
|
||||
for (int i = 0; i < ap; i++) {
|
||||
|
||||
try {
|
||||
for (int i = 0; i < ap; i++) {
|
||||
resource.normalize(i);
|
||||
} catch (IOException e) {
|
||||
|
||||
// e.printStackTrace();
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In GetTableDetails->: Error in normalization process"
|
||||
+ e.getMessage());
|
||||
|
||||
throw e;
|
||||
|
||||
}
|
||||
|
||||
|
||||
}catch (Exception e) {
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In GetTableDetails->: Error in normalization process"
|
||||
+ e.getMessage());
|
||||
|
||||
throw e;
|
||||
}
|
||||
|
||||
// int ap = resource.getAccessPoints().size();
|
||||
//
|
||||
// for (int i = 0; i < ap; i++) {
|
||||
//
|
||||
// try {
|
||||
// resource.normalize(i);
|
||||
// } catch (IOException e) {
|
||||
//
|
||||
// // e.printStackTrace();
|
||||
// AnalysisLogger.getLogger().debug(
|
||||
// "In GetTableDetails->: Error in normalization process"
|
||||
// + e.getMessage());
|
||||
//
|
||||
// throw e;
|
||||
//
|
||||
// }
|
||||
//
|
||||
// }
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -208,12 +208,14 @@ public class ListDBInfo extends StandardLocalExternalAlgorithm {
|
|||
|
||||
private List<AccessPoint> retrieveInfo() throws Exception, IllegalStateException, DiscoveryException, InvalidResultException {
|
||||
|
||||
resourceName = getInputParameter("ResourceName").trim();
|
||||
resourceName = getInputParameter("ResourceName");
|
||||
|
||||
if ((resourceName == null) || (resourceName.equals(""))){
|
||||
if (resourceName != null) {
|
||||
resourceName = getInputParameter("ResourceName").trim();
|
||||
}
|
||||
|
||||
if ((resourceName == null) || (resourceName.equals(""))) {
|
||||
throw new Exception("Warning: insert the resource name");
|
||||
|
||||
}
|
||||
|
||||
// retrieve information about the chosen resource
|
||||
|
@ -246,26 +248,43 @@ public class ListDBInfo extends StandardLocalExternalAlgorithm {
|
|||
|
||||
}
|
||||
|
||||
private void normalizeDBInfo(DBResource resource) throws IOException {
|
||||
private void normalizeDBInfo(DBResource resource) throws Exception {
|
||||
|
||||
try{
|
||||
int ap = resource.getAccessPoints().size();
|
||||
|
||||
int ap = resource.getAccessPoints().size();
|
||||
|
||||
for (int i = 0; i < ap; i++) {
|
||||
|
||||
try {
|
||||
for (int i = 0; i < ap; i++) {
|
||||
resource.normalize(i);
|
||||
} catch (IOException e) {
|
||||
// TODO Auto-generated catch block
|
||||
// e.printStackTrace();
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In ListDBInfo->: Error in normalization process"
|
||||
+ e.getMessage());
|
||||
throw e;
|
||||
|
||||
}
|
||||
|
||||
|
||||
}catch (Exception e) {
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In ListDBInfo->: Error in normalization process"
|
||||
+ e.getMessage());
|
||||
|
||||
throw e;
|
||||
}
|
||||
|
||||
// int ap = resource.getAccessPoints().size();
|
||||
//
|
||||
// for (int i = 0; i < ap; i++) {
|
||||
//
|
||||
// try {
|
||||
// resource.normalize(i);
|
||||
// }
|
||||
//// catch (IOException e) {
|
||||
// catch (IOException e) {
|
||||
// // TODO Auto-generated catch block
|
||||
//// e.printStackTrace();
|
||||
// AnalysisLogger.getLogger().debug(
|
||||
// "In ListDBInfo->: Error in normalization process"
|
||||
// + e.getMessage());
|
||||
// throw e;
|
||||
//
|
||||
// }
|
||||
//
|
||||
// }
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -106,9 +106,9 @@ public class ListSchemas extends StandardLocalExternalAlgorithm {
|
|||
|
||||
map.put(String.valueOf(i), val);
|
||||
|
||||
// AnalysisLogger.getLogger().debug(
|
||||
// "In ListSchemas->getting schema's name: "
|
||||
// + val.getContent());
|
||||
// AnalysisLogger.getLogger().debug(
|
||||
// "In ListSchemas->getting schema's name: "
|
||||
// + val.getContent());
|
||||
|
||||
}
|
||||
|
||||
|
@ -164,7 +164,7 @@ public class ListSchemas extends StandardLocalExternalAlgorithm {
|
|||
|
||||
} finally {
|
||||
|
||||
if (sf!=null && sf.isClosed() == false) {
|
||||
if (sf != null && sf.isClosed() == false) {
|
||||
mgt.closeConnection();
|
||||
}
|
||||
|
||||
|
@ -175,7 +175,7 @@ public class ListSchemas extends StandardLocalExternalAlgorithm {
|
|||
@Override
|
||||
protected void setInputParameters() {
|
||||
|
||||
// AnalysisLogger.getLogger().debug("In ListSchemas->setting inputs");
|
||||
// AnalysisLogger.getLogger().debug("In ListSchemas->setting inputs");
|
||||
|
||||
// resource and database's name specified by the user
|
||||
|
||||
|
@ -197,7 +197,8 @@ public class ListSchemas extends StandardLocalExternalAlgorithm {
|
|||
|
||||
// generate a primitive type for the collection
|
||||
PrimitiveType output = new PrimitiveType(LinkedHashMap.class.getName(),
|
||||
map, PrimitiveTypes.MAP, "ResultsMap"+UUID.randomUUID(), "Results Map");
|
||||
map, PrimitiveTypes.MAP, "ResultsMap" + UUID.randomUUID(),
|
||||
"Results Map");
|
||||
|
||||
return output;
|
||||
|
||||
|
@ -240,20 +241,23 @@ public class ListSchemas extends StandardLocalExternalAlgorithm {
|
|||
|
||||
// the user specifies the resource and the database'name
|
||||
|
||||
resourceName = getInputParameter("ResourceName").trim();
|
||||
|
||||
if ((resourceName == null) || (resourceName.equals(""))) {
|
||||
|
||||
throw new Exception("Warning: insert the resource name");
|
||||
resourceName = getInputParameter("ResourceName");
|
||||
|
||||
if (resourceName != null) {
|
||||
resourceName = getInputParameter("ResourceName").trim();
|
||||
}
|
||||
|
||||
databaseName = getInputParameter("DatabaseName").trim();
|
||||
if ((resourceName == null) || (resourceName.equals(""))) {
|
||||
throw new Exception("Warning: insert the resource name");
|
||||
}
|
||||
|
||||
databaseName = getInputParameter("DatabaseName");
|
||||
|
||||
if (databaseName != null) {
|
||||
databaseName = getInputParameter("DatabaseName").trim();
|
||||
}
|
||||
if ((databaseName == null) || (databaseName.equals(""))) {
|
||||
|
||||
throw new Exception("Warning: insert the database name");
|
||||
|
||||
}
|
||||
|
||||
// retrieve the chosen resource
|
||||
|
@ -440,26 +444,41 @@ public class ListSchemas extends StandardLocalExternalAlgorithm {
|
|||
|
||||
}
|
||||
|
||||
private void normalizeDBInfo(DBResource resource) throws IOException {
|
||||
private void normalizeDBInfo(DBResource resource) throws Exception {
|
||||
|
||||
try{
|
||||
int ap = resource.getAccessPoints().size();
|
||||
|
||||
int ap = resource.getAccessPoints().size();
|
||||
|
||||
for (int i = 0; i < ap; i++) {
|
||||
|
||||
try {
|
||||
for (int i = 0; i < ap; i++) {
|
||||
resource.normalize(i);
|
||||
} catch (IOException e) {
|
||||
|
||||
// e.printStackTrace();
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In ListTables->: Error in normalization process"
|
||||
+ e.getMessage());
|
||||
|
||||
throw e;
|
||||
|
||||
}
|
||||
|
||||
|
||||
}catch (Exception e) {
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In ListSchemas->: Error in normalization process"
|
||||
+ e.getMessage());
|
||||
|
||||
throw e;
|
||||
}
|
||||
|
||||
// int ap = resource.getAccessPoints().size();
|
||||
//
|
||||
// for (int i = 0; i < ap; i++) {
|
||||
//
|
||||
//// try {
|
||||
// resource.normalize(i);
|
||||
//// } catch (IOException e) {
|
||||
//
|
||||
// // e.printStackTrace();
|
||||
//// AnalysisLogger.getLogger().debug(
|
||||
//// "In ListTables->: Error in normalization process"
|
||||
//// + e.getMessage());
|
||||
//
|
||||
//// throw e;
|
||||
//
|
||||
//// }
|
||||
//
|
||||
// }
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -91,12 +91,12 @@ public class ListTables extends StandardLocalExternalAlgorithm {
|
|||
|
||||
if (driverInfo.toLowerCase().contains("postgres")) {
|
||||
|
||||
schemaName = getInputParameter("SchemaName").trim();
|
||||
|
||||
schemaName = getInputParameter("SchemaName");
|
||||
if(schemaName != null){
|
||||
schemaName = getInputParameter("SchemaName").trim();
|
||||
}
|
||||
if ((schemaName == null) || (schemaName.equals(""))) {
|
||||
|
||||
throw new Exception("Warning: insert the schema name");
|
||||
|
||||
}
|
||||
|
||||
// if (!schemaName.equals("")) {
|
||||
|
@ -204,7 +204,7 @@ public class ListTables extends StandardLocalExternalAlgorithm {
|
|||
|
||||
} finally {
|
||||
|
||||
if (sf!=null && sf.isClosed() == false) {
|
||||
if (sf != null && sf.isClosed() == false) {
|
||||
mgt.closeConnection();
|
||||
}
|
||||
|
||||
|
@ -215,7 +215,7 @@ public class ListTables extends StandardLocalExternalAlgorithm {
|
|||
@Override
|
||||
protected void setInputParameters() {
|
||||
|
||||
// AnalysisLogger.getLogger().debug("In ListTables->setting inputs");
|
||||
// AnalysisLogger.getLogger().debug("In ListTables->setting inputs");
|
||||
|
||||
// parameters specified by the user
|
||||
addStringInput("ResourceName", "The name of the resource", "");
|
||||
|
@ -237,7 +237,8 @@ public class ListTables extends StandardLocalExternalAlgorithm {
|
|||
|
||||
// generate a primitive type for the collection
|
||||
PrimitiveType output = new PrimitiveType(LinkedHashMap.class.getName(),
|
||||
map, PrimitiveTypes.MAP, "ResultsMap"+UUID.randomUUID(), "Results Map");
|
||||
map, PrimitiveTypes.MAP, "ResultsMap" + UUID.randomUUID(),
|
||||
"Results Map");
|
||||
|
||||
return output;
|
||||
|
||||
|
@ -247,20 +248,23 @@ public class ListTables extends StandardLocalExternalAlgorithm {
|
|||
IllegalStateException, DiscoveryException, InvalidResultException {
|
||||
|
||||
// parameters specified by the user
|
||||
resourceName = getInputParameter("ResourceName").trim();
|
||||
|
||||
if ((resourceName == null) || (resourceName.equals(""))) {
|
||||
|
||||
throw new Exception("Warning: insert the resource name");
|
||||
resourceName = getInputParameter("ResourceName");
|
||||
|
||||
if (resourceName != null) {
|
||||
resourceName = getInputParameter("ResourceName").trim();
|
||||
}
|
||||
|
||||
databaseName = getInputParameter("DatabaseName").trim();
|
||||
if ((resourceName == null) || (resourceName.equals(""))) {
|
||||
throw new Exception("Warning: insert the resource name");
|
||||
}
|
||||
|
||||
databaseName = getInputParameter("DatabaseName");
|
||||
|
||||
if (databaseName != null) {
|
||||
databaseName = getInputParameter("DatabaseName").trim();
|
||||
}
|
||||
if ((databaseName == null) || (databaseName.equals(""))) {
|
||||
|
||||
throw new Exception("Warning: insert the database name");
|
||||
|
||||
}
|
||||
|
||||
// retrieve the chosen resource
|
||||
|
@ -427,25 +431,40 @@ public class ListTables extends StandardLocalExternalAlgorithm {
|
|||
|
||||
}
|
||||
|
||||
private void normalizeDBInfo(DBResource resource) throws IOException {
|
||||
private void normalizeDBInfo(DBResource resource) throws Exception {
|
||||
|
||||
try{
|
||||
int ap = resource.getAccessPoints().size();
|
||||
|
||||
int ap = resource.getAccessPoints().size();
|
||||
|
||||
for (int i = 0; i < ap; i++) {
|
||||
|
||||
try {
|
||||
for (int i = 0; i < ap; i++) {
|
||||
resource.normalize(i);
|
||||
} catch (IOException e) {
|
||||
// e.printStackTrace();
|
||||
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In ListTables->: Error in normalization process"
|
||||
+ e.getMessage());
|
||||
|
||||
throw e;
|
||||
}
|
||||
|
||||
|
||||
}catch (Exception e) {
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In ListTables->: Error in normalization process"
|
||||
+ e.getMessage());
|
||||
|
||||
throw e;
|
||||
}
|
||||
|
||||
// int ap = resource.getAccessPoints().size();
|
||||
//
|
||||
// for (int i = 0; i < ap; i++) {
|
||||
//
|
||||
// try {
|
||||
// resource.normalize(i);
|
||||
// } catch (IOException e) {
|
||||
// // e.printStackTrace();
|
||||
//
|
||||
// AnalysisLogger.getLogger().debug(
|
||||
// "In ListTables->: Error in normalization process"
|
||||
// + e.getMessage());
|
||||
//
|
||||
// throw e;
|
||||
// }
|
||||
//
|
||||
// }
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -81,14 +81,20 @@ public class RandomSampleOnTable extends StandardLocalExternalAlgorithm {
|
|||
List<String> Info = retrieveInfo();
|
||||
|
||||
// check on table name field
|
||||
tableName = getInputParameter("TableName").trim();
|
||||
tableName = getInputParameter("TableName");
|
||||
if(tableName != null){
|
||||
tableName = getInputParameter("TableName").trim();
|
||||
}
|
||||
if ((tableName == null) || (tableName.equals(""))) {
|
||||
throw new Exception("Warning: insert the table name");
|
||||
}
|
||||
|
||||
// check on schema name field
|
||||
if (driverInfo.toLowerCase().contains("postgres")) {
|
||||
schemaName = getInputParameter("SchemaName").trim();
|
||||
schemaName = getInputParameter("SchemaName");
|
||||
if(schemaName != null){
|
||||
schemaName = getInputParameter("SchemaName").trim();
|
||||
}
|
||||
if ((schemaName == null) || (schemaName.equals(""))) {
|
||||
throw new Exception("Warning: insert the schema name");
|
||||
}
|
||||
|
@ -142,7 +148,7 @@ public class RandomSampleOnTable extends StandardLocalExternalAlgorithm {
|
|||
|
||||
} finally {
|
||||
// close the connection
|
||||
if (sf!=null && sf.isClosed() == false) {
|
||||
if (sf != null && sf.isClosed() == false) {
|
||||
mgt.closeConnection();
|
||||
}
|
||||
}
|
||||
|
@ -163,7 +169,8 @@ public class RandomSampleOnTable extends StandardLocalExternalAlgorithm {
|
|||
"In RandomSampleOnTable->retrieving outputs");
|
||||
// generate a primitive type for the collection
|
||||
PrimitiveType output = new PrimitiveType(LinkedHashMap.class.getName(),
|
||||
map, PrimitiveTypes.MAP, "ResultsMap"+UUID.randomUUID(), "Results Map");
|
||||
map, PrimitiveTypes.MAP, "ResultsMap" + UUID.randomUUID(),
|
||||
"Results Map");
|
||||
return output;
|
||||
}
|
||||
|
||||
|
@ -176,12 +183,21 @@ public class RandomSampleOnTable extends StandardLocalExternalAlgorithm {
|
|||
private List<String> retrieveInfo() throws Exception,
|
||||
IllegalStateException, DiscoveryException, InvalidResultException {
|
||||
|
||||
resourceName = getInputParameter("ResourceName").trim();
|
||||
resourceName = getInputParameter("ResourceName");
|
||||
|
||||
if (resourceName != null) {
|
||||
resourceName = getInputParameter("ResourceName").trim();
|
||||
}
|
||||
|
||||
if ((resourceName == null) || (resourceName.equals(""))) {
|
||||
throw new Exception("Warning: insert the resource name");
|
||||
}
|
||||
|
||||
databaseName = getInputParameter("DatabaseName").trim();
|
||||
databaseName = getInputParameter("DatabaseName");
|
||||
|
||||
if (databaseName != null) {
|
||||
databaseName = getInputParameter("DatabaseName").trim();
|
||||
}
|
||||
if ((databaseName == null) || (databaseName.equals(""))) {
|
||||
throw new Exception("Warning: insert the database name");
|
||||
}
|
||||
|
@ -266,19 +282,35 @@ public class RandomSampleOnTable extends StandardLocalExternalAlgorithm {
|
|||
}
|
||||
|
||||
// to normalize the information related to a database
|
||||
private void normalizeDBInfo(DBResource resource) throws IOException {
|
||||
int ap = resource.getAccessPoints().size();
|
||||
for (int i = 0; i < ap; i++) {
|
||||
try {
|
||||
private void normalizeDBInfo(DBResource resource) throws Exception {
|
||||
|
||||
try{
|
||||
int ap = resource.getAccessPoints().size();
|
||||
|
||||
for (int i = 0; i < ap; i++) {
|
||||
resource.normalize(i);
|
||||
} catch (IOException e) {
|
||||
// e.printStackTrace();
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In RandomSampleOnTable->: Error in normalization process"
|
||||
+ e.getMessage());
|
||||
throw e;
|
||||
}
|
||||
|
||||
}catch (Exception e) {
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In RandomSampleOnTable->: Error in normalization process"
|
||||
+ e.getMessage());
|
||||
|
||||
throw e;
|
||||
}
|
||||
|
||||
// int ap = resource.getAccessPoints().size();
|
||||
// for (int i = 0; i < ap; i++) {
|
||||
// try {
|
||||
// resource.normalize(i);
|
||||
// } catch (IOException e) {
|
||||
// // e.printStackTrace();
|
||||
// AnalysisLogger.getLogger().debug(
|
||||
// "In RandomSampleOnTable->: Error in normalization process"
|
||||
// + e.getMessage());
|
||||
// throw e;
|
||||
// }
|
||||
// }
|
||||
}
|
||||
|
||||
// create the database's connection
|
||||
|
@ -373,10 +405,10 @@ public class RandomSampleOnTable extends StandardLocalExternalAlgorithm {
|
|||
}
|
||||
|
||||
// to add the file
|
||||
// PrimitiveType fileResult = new PrimitiveType(File.class.getName(),
|
||||
// mgt.getFileSampleTableResult(), PrimitiveTypes.FILE, "File",
|
||||
// "File");
|
||||
// mapResults.put("File", fileResult);
|
||||
// PrimitiveType fileResult = new PrimitiveType(File.class.getName(),
|
||||
// mgt.getFileSampleTableResult(), PrimitiveTypes.FILE, "File",
|
||||
// "File");
|
||||
// mapResults.put("File", fileResult);
|
||||
|
||||
return mapResults;
|
||||
|
||||
|
|
|
@ -79,14 +79,20 @@ public class SampleOnTable extends StandardLocalExternalAlgorithm {
|
|||
List<String> Info = retrieveInfo();
|
||||
|
||||
// check on table name field
|
||||
tableName = getInputParameter("TableName").trim();
|
||||
tableName = getInputParameter("TableName");
|
||||
if(tableName != null){
|
||||
tableName = getInputParameter("TableName").trim();
|
||||
}
|
||||
if ((tableName == null) || (tableName.equals(""))) {
|
||||
throw new Exception("Warning: insert the table name");
|
||||
}
|
||||
|
||||
// check on schema name field
|
||||
if (driverInfo.toLowerCase().contains("postgres")) {
|
||||
schemaName = getInputParameter("SchemaName").trim();
|
||||
schemaName = getInputParameter("SchemaName");
|
||||
if(schemaName != null){
|
||||
schemaName = getInputParameter("SchemaName").trim();
|
||||
}
|
||||
if ((schemaName == null) || (schemaName.equals(""))) {
|
||||
throw new Exception("Warning: insert the schema name");
|
||||
}
|
||||
|
@ -177,12 +183,21 @@ public class SampleOnTable extends StandardLocalExternalAlgorithm {
|
|||
private List<String> retrieveInfo() throws Exception,
|
||||
IllegalStateException, DiscoveryException, InvalidResultException {
|
||||
|
||||
resourceName = getInputParameter("ResourceName").trim();
|
||||
resourceName = getInputParameter("ResourceName");
|
||||
|
||||
if (resourceName != null) {
|
||||
resourceName = getInputParameter("ResourceName").trim();
|
||||
}
|
||||
|
||||
if ((resourceName == null) || (resourceName.equals(""))) {
|
||||
throw new Exception("Warning: insert the resource name");
|
||||
}
|
||||
|
||||
databaseName = getInputParameter("DatabaseName").trim();
|
||||
databaseName = getInputParameter("DatabaseName");
|
||||
|
||||
if (databaseName != null) {
|
||||
databaseName = getInputParameter("DatabaseName").trim();
|
||||
}
|
||||
if ((databaseName == null) || (databaseName.equals(""))) {
|
||||
throw new Exception("Warning: insert the database name");
|
||||
}
|
||||
|
@ -267,19 +282,35 @@ public class SampleOnTable extends StandardLocalExternalAlgorithm {
|
|||
}
|
||||
|
||||
// to normalize the information related to a database
|
||||
private void normalizeDBInfo(DBResource resource) throws IOException {
|
||||
int ap = resource.getAccessPoints().size();
|
||||
for (int i = 0; i < ap; i++) {
|
||||
try {
|
||||
private void normalizeDBInfo(DBResource resource) throws Exception {
|
||||
|
||||
try{
|
||||
int ap = resource.getAccessPoints().size();
|
||||
|
||||
for (int i = 0; i < ap; i++) {
|
||||
resource.normalize(i);
|
||||
} catch (IOException e) {
|
||||
// e.printStackTrace();
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In SampleOnTable->: Error in normalization process"
|
||||
+ e.getMessage());
|
||||
throw e;
|
||||
}
|
||||
|
||||
}catch (Exception e) {
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In SampleOnTable->: Error in normalization process"
|
||||
+ e.getMessage());
|
||||
|
||||
throw e;
|
||||
}
|
||||
|
||||
// int ap = resource.getAccessPoints().size();
|
||||
// for (int i = 0; i < ap; i++) {
|
||||
// try {
|
||||
// resource.normalize(i);
|
||||
// } catch (IOException e) {
|
||||
// // e.printStackTrace();
|
||||
// AnalysisLogger.getLogger().debug(
|
||||
// "In SampleOnTable->: Error in normalization process"
|
||||
// + e.getMessage());
|
||||
// throw e;
|
||||
// }
|
||||
// }
|
||||
}
|
||||
|
||||
// create the database's connection
|
||||
|
|
|
@ -81,14 +81,20 @@ public class SmartSampleOnTable extends StandardLocalExternalAlgorithm {
|
|||
List<String> Info = retrieveInfo();
|
||||
|
||||
// check on table name field
|
||||
tableName = getInputParameter("TableName").trim();
|
||||
tableName = getInputParameter("TableName");
|
||||
if(tableName != null){
|
||||
tableName = getInputParameter("TableName").trim();
|
||||
}
|
||||
if ((tableName == null) || (tableName.equals(""))) {
|
||||
throw new Exception("Warning: insert the table name");
|
||||
}
|
||||
|
||||
// check on schema name field
|
||||
if (driverInfo.toLowerCase().contains("postgres")) {
|
||||
schemaName = getInputParameter("SchemaName").trim();
|
||||
schemaName = getInputParameter("SchemaName");
|
||||
if(schemaName != null){
|
||||
schemaName = getInputParameter("SchemaName").trim();
|
||||
}
|
||||
if ((schemaName == null) || (schemaName.equals(""))) {
|
||||
throw new Exception("Warning: insert the schema name");
|
||||
}
|
||||
|
@ -175,12 +181,21 @@ public class SmartSampleOnTable extends StandardLocalExternalAlgorithm {
|
|||
private List<String> retrieveInfo() throws Exception,
|
||||
IllegalStateException, DiscoveryException, InvalidResultException {
|
||||
|
||||
resourceName = getInputParameter("ResourceName").trim();
|
||||
resourceName = getInputParameter("ResourceName");
|
||||
|
||||
if (resourceName != null) {
|
||||
resourceName = getInputParameter("ResourceName").trim();
|
||||
}
|
||||
|
||||
if ((resourceName == null) || (resourceName.equals(""))) {
|
||||
throw new Exception("Warning: insert the resource name");
|
||||
}
|
||||
|
||||
databaseName = getInputParameter("DatabaseName").trim();
|
||||
databaseName = getInputParameter("DatabaseName");
|
||||
|
||||
if (databaseName != null) {
|
||||
databaseName = getInputParameter("DatabaseName").trim();
|
||||
}
|
||||
if ((databaseName == null) || (databaseName.equals(""))) {
|
||||
throw new Exception("Warning: insert the database name");
|
||||
}
|
||||
|
@ -268,19 +283,35 @@ public class SmartSampleOnTable extends StandardLocalExternalAlgorithm {
|
|||
}
|
||||
|
||||
// to normalize the information related to a database
|
||||
private void normalizeDBInfo(DBResource resource) throws IOException {
|
||||
int ap = resource.getAccessPoints().size();
|
||||
for (int i = 0; i < ap; i++) {
|
||||
try {
|
||||
private void normalizeDBInfo(DBResource resource) throws Exception {
|
||||
|
||||
try{
|
||||
int ap = resource.getAccessPoints().size();
|
||||
|
||||
for (int i = 0; i < ap; i++) {
|
||||
resource.normalize(i);
|
||||
} catch (IOException e) {
|
||||
// e.printStackTrace();
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In SmartSampleOnTable->: Error in normalization process"
|
||||
+ e.getMessage());
|
||||
throw e;
|
||||
}
|
||||
|
||||
}catch (Exception e) {
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In SmartSampleOnTable->: Error in normalization process"
|
||||
+ e.getMessage());
|
||||
|
||||
throw e;
|
||||
}
|
||||
|
||||
// int ap = resource.getAccessPoints().size();
|
||||
// for (int i = 0; i < ap; i++) {
|
||||
// try {
|
||||
// resource.normalize(i);
|
||||
// } catch (IOException e) {
|
||||
// // e.printStackTrace();
|
||||
// AnalysisLogger.getLogger().debug(
|
||||
// "In SmartSampleOnTable->: Error in normalization process"
|
||||
// + e.getMessage());
|
||||
// throw e;
|
||||
// }
|
||||
// }
|
||||
}
|
||||
|
||||
// create the database's connection
|
||||
|
|
|
@ -307,12 +307,21 @@ public class SubmitQuery extends StandardLocalExternalAlgorithm {
|
|||
private List<String> retrieveInfo() throws Exception,
|
||||
IllegalStateException, DiscoveryException, InvalidResultException {
|
||||
|
||||
resourceName = getInputParameter("ResourceName").trim();
|
||||
resourceName = getInputParameter("ResourceName");
|
||||
|
||||
if (resourceName != null) {
|
||||
resourceName = getInputParameter("ResourceName").trim();
|
||||
}
|
||||
|
||||
if ((resourceName == null) || (resourceName.equals(""))) {
|
||||
throw new Exception("Warning: insert the resource name");
|
||||
}
|
||||
|
||||
databaseName = getInputParameter("DatabaseName").trim();
|
||||
databaseName = getInputParameter("DatabaseName");
|
||||
|
||||
if (databaseName != null) {
|
||||
databaseName = getInputParameter("DatabaseName").trim();
|
||||
}
|
||||
if ((databaseName == null) || (databaseName.equals(""))) {
|
||||
throw new Exception("Warning: insert the database name");
|
||||
}
|
||||
|
@ -625,19 +634,36 @@ public class SubmitQuery extends StandardLocalExternalAlgorithm {
|
|||
return NotAllowed;
|
||||
}
|
||||
|
||||
private void normalizeDBInfo(DBResource resource) throws IOException {
|
||||
int ap = resource.getAccessPoints().size();
|
||||
for (int i = 0; i < ap; i++) {
|
||||
try {
|
||||
private void normalizeDBInfo(DBResource resource) throws Exception {
|
||||
|
||||
|
||||
try{
|
||||
int ap = resource.getAccessPoints().size();
|
||||
|
||||
for (int i = 0; i < ap; i++) {
|
||||
resource.normalize(i);
|
||||
} catch (IOException e) {
|
||||
// e.printStackTrace();
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In SubmitQuery->: Error in normalization process"
|
||||
+ e.getMessage());
|
||||
throw e;
|
||||
}
|
||||
|
||||
}catch (Exception e) {
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In SubmitQuery->: Error in normalization process"
|
||||
+ e.getMessage());
|
||||
|
||||
throw e;
|
||||
}
|
||||
|
||||
// int ap = resource.getAccessPoints().size();
|
||||
// for (int i = 0; i < ap; i++) {
|
||||
// try {
|
||||
// resource.normalize(i);
|
||||
// } catch (IOException e) {
|
||||
// // e.printStackTrace();
|
||||
// AnalysisLogger.getLogger().debug(
|
||||
// "In SubmitQuery->: Error in normalization process"
|
||||
// + e.getMessage());
|
||||
// throw e;
|
||||
// }
|
||||
// }
|
||||
}
|
||||
|
||||
// private void formatWithQuotes(String Query) {
|
||||
|
|
|
@ -11,13 +11,15 @@ import org.gcube.dataanalysis.ecoengine.test.regression.Regressor;
|
|||
|
||||
public class RegressionListDBInfo {
|
||||
|
||||
static String[] algorithms = { "Postgres", "NullInputValue"};
|
||||
// static String[] algorithms = { "Postgres", "NullInputValue"};
|
||||
|
||||
// static AlgorithmConfiguration[] configs = { testPostgres1(),
|
||||
// testPostgres2(), testPostgis(), Mysql(), NullInputValue(),
|
||||
// Postgres3() };
|
||||
|
||||
static AlgorithmConfiguration[] configs = { testPostgres1(), NullInputValue()};
|
||||
// static AlgorithmConfiguration[] configs = { testPostgres1(), NullInputValue()};
|
||||
static String[] algorithms = { "Postgres"};
|
||||
static AlgorithmConfiguration[] configs = { testPostgres1()};
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
|
||||
|
@ -57,8 +59,9 @@ public class RegressionListDBInfo {
|
|||
|
||||
// config.setParam("ResourceName", "AquaMaps Service DataBase");
|
||||
|
||||
config.setParam("ResourceName", "StatisticalManagerDataBase");
|
||||
|
||||
// config.setParam("ResourceName", "StatisticalManagerDataBase");
|
||||
config.setParam("ResourceName", "TabularData Database");
|
||||
|
||||
config.setGcubeScope("/gcube/devsec");
|
||||
|
||||
return config;
|
||||
|
|
|
@ -18,10 +18,10 @@ public class RegressionListSchemas {
|
|||
// Postgres3() };
|
||||
|
||||
|
||||
|
||||
|
||||
static AlgorithmConfiguration[] configs = {testPostgres1()};
|
||||
static String[] algorithms = { "Postgres1"};
|
||||
|
||||
static AlgorithmConfiguration[] configs = { testPostgres1()};
|
||||
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
|
||||
|
@ -71,6 +71,12 @@ public class RegressionListSchemas {
|
|||
config.setParam("ResourceName", "StatisticalManagerDataBase");
|
||||
config.setParam("DatabaseName", "testdb");
|
||||
|
||||
// config.setParam("ResourceName", "TimeSeriesDatabase");
|
||||
// config.setParam("DatabaseName", "timeseries");
|
||||
|
||||
// config.setParam("ResourceName", "Geoserver database ");
|
||||
// config.setParam("DatabaseName", "aquamapsdb");
|
||||
|
||||
|
||||
|
||||
config.setGcubeScope("/gcube/devsec");
|
||||
|
|
|
@ -17,7 +17,7 @@ public class RegressionListTables {
|
|||
|
||||
|
||||
static String[] algorithms = { "Postgres1"};
|
||||
static AlgorithmConfiguration[] configs = { testPostgres1()};
|
||||
static AlgorithmConfiguration[] configs = { Mysql()};
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
|
||||
|
@ -60,10 +60,12 @@ public class RegressionListTables {
|
|||
config.setParam("ResourceName", "StatisticalManagerDataBase");
|
||||
config.setParam("DatabaseName", "testdb");
|
||||
config.setParam("SchemaName", "public");
|
||||
// config.setParam("SchemaName", "aquamapsvre");
|
||||
|
||||
|
||||
// config.setParam("ResourceName", "CatalogOfLife2010");
|
||||
// config.setParam("DatabaseName", "col2oct2010");
|
||||
// config.setParam("SchemaName", "");
|
||||
//// config.setParam("SchemaName", "");
|
||||
|
||||
|
||||
|
||||
|
@ -84,7 +86,7 @@ public class RegressionListTables {
|
|||
// a test with a database mysql
|
||||
config.setParam("ResourceName", "CatalogOfLife2010");
|
||||
config.setParam("DatabaseName", "col2oct2010");
|
||||
config.setParam("SchemaName", "public");
|
||||
config.setParam("SchemaName", "");
|
||||
|
||||
|
||||
config.setGcubeScope("/gcube/devsec");
|
||||
|
|
|
@ -10,15 +10,18 @@ import org.gcube.dataanalysis.ecoengine.processing.factories.TransducerersFactor
|
|||
import org.gcube.dataanalysis.ecoengine.test.regression.Regressor;
|
||||
|
||||
public class RegressionRandomSampleOnTable {
|
||||
|
||||
//static AlgorithmConfiguration[] configs = { testPostgres1(), testPostgis(), testMysql1(),testMysql2(), NullInputValue1(), NullInputValue2(), NullInputValue3(), NullInputValue4()};
|
||||
|
||||
// static String[] algorithms = { "Postgres1", "Postgis", "Mysql1", "Mysql2", "NullInputValue1", "NullInputValue2", "NullInputValue3", "NullInputValue4"};
|
||||
|
||||
|
||||
static AlgorithmConfiguration[] configs = { testPostgres1()};
|
||||
static String[] algorithms = {"Postgres1"};
|
||||
|
||||
// static AlgorithmConfiguration[] configs = { testPostgres1(),
|
||||
// testPostgis(), testMysql1(),testMysql2(), NullInputValue1(),
|
||||
// NullInputValue2(), NullInputValue3(), NullInputValue4()};
|
||||
|
||||
// static String[] algorithms = { "Postgres1", "Postgis", "Mysql1",
|
||||
// "Mysql2", "NullInputValue1", "NullInputValue2", "NullInputValue3",
|
||||
// "NullInputValue4"};
|
||||
|
||||
static AlgorithmConfiguration[] configs = { testPostgres1() };
|
||||
static String[] algorithms = { "Postgres1" };
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
|
||||
// System.out.println("TEST 1");
|
||||
|
@ -55,41 +58,60 @@ public class RegressionRandomSampleOnTable {
|
|||
config.setAgent("RANDOMSAMPLEONTABLE");
|
||||
|
||||
// A test with a database postgres
|
||||
// config.setParam("ResourceName", "GP DB");
|
||||
//
|
||||
// config.setParam("DatabaseName", "aquamapsdb");
|
||||
// config.setParam("SchemaName", "public");
|
||||
// config.setParam("TableName", "area"); // it has not rows
|
||||
|
||||
config.setParam("ResourceName", "StatisticalManagerDataBase");
|
||||
config.setParam("DatabaseName", "testdb");
|
||||
config.setParam("SchemaName", "public");
|
||||
config.setParam("TableName", "hcaf_d");
|
||||
// config.setParam("TableName", "hspec_id_3f4c79fa_442e_42ba_9344_1b3e64dc3326");
|
||||
|
||||
// config.setParam("TableName", "hspec_id_c8e87e16_a0b4_4f9b_b48e_f1cf60ab104c");
|
||||
//// config.setParam("TableName", "generic_id037d302d_2ba0_4e43_b6e4_1a797bb91728");
|
||||
//// config.setParam("TableName", "bionymoutlevfaked2csvpreprcsv");
|
||||
//
|
||||
//// config.setParam("TableName", "timeseries_id08b3abb9_c7b0_4b82_8117_64b69055416f");
|
||||
//// config.setParam("TableName", "occurrence_species_idaf35d737_fb3e_43a7_b13a_611dfa97b064");
|
||||
// config.setParam("TableName", "generic_id037d302d_2ba0_4e43_b6e4_1a797bb91728");
|
||||
// config.setParam("ResourceName", "GP DB");
|
||||
//
|
||||
// config.setParam("DatabaseName", "aquamapsdb");
|
||||
// config.setParam("SchemaName", "public");
|
||||
// config.setParam("TableName", "area"); // it has not rows
|
||||
|
||||
// config.setParam("ResourceName", "StatisticalManagerDataBase");
|
||||
// config.setParam("DatabaseName", "testdb");
|
||||
// config.setParam("SchemaName", "public");
|
||||
// config.setParam("TableName", "hcaf_d");
|
||||
// // config.setParam("TableName",
|
||||
// "hspec_id_3f4c79fa_442e_42ba_9344_1b3e64dc3326");
|
||||
|
||||
// config.setParam("TableName",
|
||||
// "hspec_id_c8e87e16_a0b4_4f9b_b48e_f1cf60ab104c");
|
||||
// // config.setParam("TableName",
|
||||
// "generic_id037d302d_2ba0_4e43_b6e4_1a797bb91728");
|
||||
// // config.setParam("TableName", "bionymoutlevfaked2csvpreprcsv");
|
||||
//
|
||||
// // config.setParam("TableName",
|
||||
// "timeseries_id08b3abb9_c7b0_4b82_8117_64b69055416f");
|
||||
// // config.setParam("TableName",
|
||||
// "occurrence_species_idaf35d737_fb3e_43a7_b13a_611dfa97b064");
|
||||
// config.setParam("TableName",
|
||||
// "generic_id037d302d_2ba0_4e43_b6e4_1a797bb91728");
|
||||
|
||||
// config.setParam("ResourceName", "AquaMaps Service DataBase");
|
||||
// config.setParam("DatabaseName", "aquamapsorgupdated");
|
||||
// config.setParam("SchemaName", "public");
|
||||
// //
|
||||
// //
|
||||
// //
|
||||
// // config.setParam("TableName", "hspec2012_07_11_12_33_05_526");
|
||||
//
|
||||
// // config.setParam("TableName", "custom2013_12_04_15_27_16_493_cet");
|
||||
//
|
||||
// config.setParam("TableName", "hspec_suitable_peng_test_tbsp_1");
|
||||
|
||||
// config.setParam("ResourceName", "AquaMaps Service DataBase");
|
||||
// config.setParam("DatabaseName", "aquamapsorgupdated");
|
||||
// config.setParam("SchemaName", "public");
|
||||
////
|
||||
////
|
||||
////
|
||||
//// config.setParam("TableName", "hspec2012_07_11_12_33_05_526");
|
||||
//
|
||||
//// config.setParam("TableName", "custom2013_12_04_15_27_16_493_cet");
|
||||
//
|
||||
// config.setParam("TableName", "hspec_suitable_peng_test_tbsp_1");
|
||||
|
||||
|
||||
|
||||
// config.setParam("SchemaName", "public");
|
||||
//// config.setParam("TableName", "hspec_suitable_peng_test_tbsp_1");
|
||||
// config.setParam("TableName", "hspen");
|
||||
|
||||
config.setParam("ResourceName", "Geoserver database ");
|
||||
config.setParam("DatabaseName", "aquamapsdb");
|
||||
config.setParam("SchemaName", "public");
|
||||
config.setParam("TableName", "SeaVoX_sea_areas_polygons_v14");
|
||||
|
||||
// Obis
|
||||
// config.setParam("ResourceName", "Obis2Repository");
|
||||
// config.setParam("DatabaseName", "obis");
|
||||
// config.setParam("SchemaName", "calc");
|
||||
// config.setParam("TableName", "map1d");
|
||||
|
||||
// config.setParam("Query", "select * from area limit 3");
|
||||
|
||||
|
@ -137,8 +159,10 @@ public class RegressionRandomSampleOnTable {
|
|||
// A test with a database mysql
|
||||
config.setParam("ResourceName", "CatalogOfLife2010");
|
||||
config.setParam("DatabaseName", "col2oct2010");
|
||||
config.setParam("TableName", "Common_names"); // mysql is not case
|
||||
// sensitive
|
||||
// config.setParam("TableName", "Common_names"); // mysql is not case
|
||||
// sensitive
|
||||
config.setParam("TableName", "databases");
|
||||
// config.setParam("TableName", "simple_search");
|
||||
|
||||
config.setGcubeScope("/gcube/devsec");
|
||||
|
||||
|
@ -164,9 +188,9 @@ public class RegressionRandomSampleOnTable {
|
|||
return config;
|
||||
|
||||
}
|
||||
|
||||
private static AlgorithmConfiguration NullInputValue1() {
|
||||
|
||||
|
||||
private static AlgorithmConfiguration NullInputValue1() {
|
||||
|
||||
System.out.println("TEST 5: Postgis NullInputValue1");
|
||||
|
||||
AlgorithmConfiguration config = Regressor.getConfig();
|
||||
|
@ -174,17 +198,17 @@ private static AlgorithmConfiguration NullInputValue1() {
|
|||
config.setAgent("RANDOMSAMPLEONTABLE");
|
||||
|
||||
// a test with postgis
|
||||
// config.setParam("ResourceName", "Geoserver database ");
|
||||
// config.setParam("ResourceName", "Geoserver database ");
|
||||
config.setParam("DatabaseName", "aquamapsdb");
|
||||
config.setParam("SchemaName", "public");
|
||||
config.setParam("TableName", "Divisions");
|
||||
|
||||
|
||||
config.setGcubeScope("/gcube/devsec");
|
||||
|
||||
return config;
|
||||
|
||||
}
|
||||
|
||||
|
||||
private static AlgorithmConfiguration NullInputValue2() {
|
||||
System.out.println("TEST 6: Postgis NullInputValue2");
|
||||
|
||||
|
@ -194,18 +218,18 @@ private static AlgorithmConfiguration NullInputValue1() {
|
|||
|
||||
// a test with postgis
|
||||
config.setParam("ResourceName", "Geoserver database ");
|
||||
// config.setParam("DatabaseName", "aquamapsdb");
|
||||
// config.setParam("DatabaseName", "aquamapsdb");
|
||||
config.setParam("SchemaName", "public");
|
||||
config.setParam("TableName", "Divisions");
|
||||
|
||||
|
||||
config.setGcubeScope("/gcube/devsec");
|
||||
|
||||
return config;
|
||||
|
||||
}
|
||||
|
||||
private static AlgorithmConfiguration NullInputValue3() {
|
||||
|
||||
|
||||
System.out.println("TEST 7: Postgis NullInputValue3");
|
||||
|
||||
AlgorithmConfiguration config = Regressor.getConfig();
|
||||
|
@ -215,15 +239,15 @@ private static AlgorithmConfiguration NullInputValue1() {
|
|||
// a test with postgis
|
||||
config.setParam("ResourceName", "Geoserver database ");
|
||||
config.setParam("DatabaseName", "aquamapsdb");
|
||||
// config.setParam("SchemaName", "public");
|
||||
// config.setParam("SchemaName", "public");
|
||||
config.setParam("TableName", "Divisions");
|
||||
|
||||
|
||||
config.setGcubeScope("/gcube/devsec");
|
||||
|
||||
return config;
|
||||
|
||||
}
|
||||
|
||||
private static AlgorithmConfiguration NullInputValue4() {
|
||||
System.out.println("TEST 8: Postgis NullInputValue4");
|
||||
|
||||
|
@ -235,8 +259,7 @@ private static AlgorithmConfiguration NullInputValue1() {
|
|||
config.setParam("ResourceName", "Geoserver database ");
|
||||
config.setParam("DatabaseName", "aquamapsdb");
|
||||
config.setParam("SchemaName", "public");
|
||||
// config.setParam("TableName", "Divisions");
|
||||
|
||||
// config.setParam("TableName", "Divisions");
|
||||
|
||||
config.setGcubeScope("/gcube/devsec");
|
||||
|
||||
|
|
|
@ -51,13 +51,19 @@ public class RegressionSampleOnTable {
|
|||
AlgorithmConfiguration config = Regressor.getConfig();
|
||||
|
||||
config.setAgent("SAMPLEONTABLE");
|
||||
|
||||
config.setParam("ResourceName", "Geoserver database ");
|
||||
config.setParam("DatabaseName", "aquamapsdb");
|
||||
config.setParam("SchemaName", "public");
|
||||
config.setParam("TableName", "SeaVoX_sea_areas_polygons_v14");
|
||||
// config.setParam("TableName", "");
|
||||
|
||||
// A test with a database postgres
|
||||
// A test with a database postgres Geoserver
|
||||
// config.setParam("ResourceName", "GP DB");
|
||||
//
|
||||
// config.setParam("DatabaseName", "aquamapsdb");
|
||||
// config.setParam("SchemaName", "public");
|
||||
// config.setParam("TableName", "area"); // it has not rows
|
||||
//// config.setParam("TableName", "area"); // it has not rows
|
||||
// config.setParam("TableName", "Divisions");
|
||||
|
||||
// config.setParam("Query", "select * from area limit 3");
|
||||
|
||||
|
@ -70,18 +76,29 @@ public class RegressionSampleOnTable {
|
|||
// config.setParam("ResourceName", "DionysusDB");
|
||||
// config.setParam("DatabaseName", "World");
|
||||
// config.setParam("SchemaName", "public");
|
||||
// config.setParam("TableName", "countrylanguage"); //mysql is not case sensitive
|
||||
// config.setParam("TableNamefcatalog", "countrylanguage"); //mysql is not case sensitive
|
||||
|
||||
// config.setParam("ResourceName", "AquaMaps Service DataBase");
|
||||
// config.setParam("DatabaseName", "aquamapsorgupdated");
|
||||
// config.setParam("SchemaName", "public");
|
||||
// config.setParam("TableName", "hspec2012_07_11_12_33_05_526");
|
||||
//// config.setParam("TableName", "hspec2012_07_11_12_33_05_526");
|
||||
// config.setParam("TableName", "hspen");
|
||||
|
||||
//Statistical
|
||||
// config.setParam("ResourceName", "StatisticalManagerDataBase");
|
||||
// config.setParam("DatabaseName", "testdb");
|
||||
// config.setParam("SchemaName", "public");
|
||||
//// config.setParam("TableName", "hspec_id_3f4c79fa_442e_42ba_9344_1b3e64dc3326");
|
||||
//// config.setParam("TableName", "taxamatchinput");
|
||||
//// config.setParam("TableName", "Divisions");
|
||||
// config.setParam("TableName", "hcaf_d");
|
||||
|
||||
//Obis
|
||||
// config.setParam("ResourceName", "Obis2Repository");
|
||||
// config.setParam("DatabaseName", "obis");
|
||||
// config.setParam("SchemaName", "calc");
|
||||
// config.setParam("TableName", "map1d");
|
||||
|
||||
config.setParam("ResourceName", "StatisticalManagerDataBase");
|
||||
config.setParam("DatabaseName", "testdb");
|
||||
config.setParam("SchemaName", "public");
|
||||
// config.setParam("TableName", "hspec_id_3f4c79fa_442e_42ba_9344_1b3e64dc3326");
|
||||
config.setParam("TableName", "hcaf_d");
|
||||
|
||||
|
||||
|
||||
|
@ -128,7 +145,10 @@ public class RegressionSampleOnTable {
|
|||
// A test with a database mysql
|
||||
config.setParam("ResourceName", "CatalogOfLife2010");
|
||||
config.setParam("DatabaseName", "col2oct2010");
|
||||
config.setParam("TableName", "Common_names"); //mysql is not case sensitive
|
||||
// config.setParam("TableName", "Common_names"); //mysql is not case sensitive
|
||||
// config.setParam("TableName", "databases");
|
||||
// config.setParam("TableName", "families");
|
||||
config.setParam("TableName", "common_names");
|
||||
|
||||
|
||||
config.setGcubeScope("/gcube/devsec");
|
||||
|
|
|
@ -15,7 +15,7 @@ public class RegressionSmartSampleOnTable {
|
|||
|
||||
// static String[] algorithms = { "Postgres1", "Postgis", "Mysql1", "Mysql2", "NullInputValue1", "NullInputValue2", "NullInputValue3", "NullInputValue4"};
|
||||
|
||||
static AlgorithmConfiguration[] configs = { testPostgres1()};
|
||||
static AlgorithmConfiguration[] configs = { testMysql1()};
|
||||
static String[] algorithms = { "Postgres1"};
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
|
@ -95,6 +95,17 @@ public class RegressionSmartSampleOnTable {
|
|||
// config.setParam("SchemaName", "public");
|
||||
// config.setParam("TableName", "hspec_suitable_peng_test_tbsp_1");
|
||||
|
||||
// config.setParam("ResourceName", "AquaMaps Service DataBase");
|
||||
// config.setParam("DatabaseName", "aquamapsorgupdated");
|
||||
// config.setParam("SchemaName", "public");
|
||||
//// config.setParam("TableName", "hspec2012_07_11_12_33_05_526");
|
||||
// config.setParam("TableName", "hspen");
|
||||
|
||||
config.setParam("ResourceName", "Geoserver database ");
|
||||
config.setParam("DatabaseName", "aquamapsdb");
|
||||
config.setParam("SchemaName", "public");
|
||||
config.setParam("TableName", "SeaVoX_sea_areas_polygons_v14");
|
||||
|
||||
|
||||
////// config.setParam("TableName", "hspec2012_07_11_12_33_05_483");
|
||||
// config.setParam("TableName", "hspec_suitable_peng_test_tbsp_1");
|
||||
|
@ -109,18 +120,22 @@ public class RegressionSmartSampleOnTable {
|
|||
// config.setParam("SchemaName", "public");
|
||||
// config.setParam("TableName", "hspec2012_07_11_12_33_05_526");
|
||||
|
||||
config.setParam("ResourceName", "StatisticalManagerDataBase");
|
||||
config.setParam("DatabaseName", "testdb");
|
||||
config.setParam("SchemaName", "public");
|
||||
config.setParam("TableName", "hcaf_d");
|
||||
// config.setParam("TableName", "hspec_id_3f4c79fa_442e_42ba_9344_1b3e64dc3326");
|
||||
|
||||
|
||||
//// //statistical
|
||||
// config.setParam("ResourceName", "StatisticalManagerDataBase");
|
||||
// config.setParam("DatabaseName", "testdb");
|
||||
// config.setParam("SchemaName", "public");
|
||||
// config.setParam("TableName", "hcaf_d");
|
||||
////// config.setParam("TableName", "hspec_id_3f4c79fa_442e_42ba_9344_1b3e64dc3326");
|
||||
|
||||
//Obis
|
||||
// config.setParam("ResourceName", "Obis2Repository");
|
||||
// config.setParam("DatabaseName", "obis");
|
||||
// config.setParam("SchemaName", "calc");
|
||||
// config.setParam("TableName", "map1d");
|
||||
|
||||
config.setGcubeScope("/gcube/devsec");
|
||||
|
||||
return config;
|
||||
|
||||
}
|
||||
|
||||
private static AlgorithmConfiguration testPostgis() {
|
||||
|
@ -155,7 +170,12 @@ public class RegressionSmartSampleOnTable {
|
|||
// A test with a database mysql
|
||||
config.setParam("ResourceName", "CatalogOfLife2010");
|
||||
config.setParam("DatabaseName", "col2oct2010");
|
||||
config.setParam("TableName", "Common_names"); //mysql is not case sensitive
|
||||
// config.setParam("TableName", "Common_names"); //mysql is not case sensitive
|
||||
// config.setParam("TableName", "databases");
|
||||
// config.setParam("TableName", "simple_search");
|
||||
config.setParam("TableName", "scientific_names");
|
||||
|
||||
|
||||
|
||||
config.setGcubeScope("/gcube/devsec");
|
||||
|
||||
|
|
|
@ -56,12 +56,14 @@ public class RegressionSubmitQuery {
|
|||
config.setAgent("LISTSUBMITQUERY");
|
||||
|
||||
// A test with a database postgres
|
||||
config.setParam("ResourceName", "StatisticalManagerDataBase");
|
||||
config.setParam("DatabaseName", "testdb");
|
||||
// config.setParam("", "TRUE");
|
||||
config.setParam("Read-Only Query", "TRUE");
|
||||
config.setParam("Apply Smart Correction", "TRUE");
|
||||
config.setParam("Language", "POSTGRES");
|
||||
// config.setParam("ResourceName", "StatisticalManagerDataBase");
|
||||
// config.setParam("DatabaseName", "testdb");
|
||||
//// config.setParam("", "TRUE");
|
||||
// config.setParam("Read-Only Query", "FALSE");
|
||||
// config.setParam("Apply Smart Correction", "TRUE");
|
||||
// config.setParam("Language", "POSTGRES");
|
||||
|
||||
|
||||
|
||||
// config.setParam("SchemaName", "public");
|
||||
// config.setParam("TableName", "hcaf_d");
|
||||
|
@ -69,7 +71,11 @@ public class RegressionSubmitQuery {
|
|||
|
||||
//
|
||||
//
|
||||
config.setParam("Query", "select * from hcaf_d limit 3");
|
||||
// config.setParam("Query", "select * from hcaf_d limit 1");
|
||||
// config.setParam("Query", "select * from hspen limit 6");
|
||||
|
||||
// config.setParam("Query", "select * from bionymfaked18csvpreprcsv limit 2");
|
||||
|
||||
// config.setParam("Query", "select count (*)from (select csquarecode from hcaf_d)");
|
||||
|
||||
// config.setParam("Query", "select csquarecode,months,sum(effort) as effort, sum(total_yft_catch) as total_yft_catch from (select csquarecode,to_char(time,'MM') months,sum(effort) as effort,sum(total_yft_catch) as total_yft_catch from timeseries_idacdbb646_7500_4920_8e0d_aa38cc99a4a6 group by csquarecode,time order by time ASC) as a group by csquarecode,months order by csquarecode");
|
||||
|
@ -104,6 +110,31 @@ public class RegressionSubmitQuery {
|
|||
|
||||
// config.setParam("TableName", "all_world");
|
||||
// config.setParam("TableName", "biodiversity_lme");
|
||||
|
||||
//Obis
|
||||
// config.setParam("ResourceName", "Obis2Repository");
|
||||
// config.setParam("DatabaseName", "obis");
|
||||
// config.setParam("Read-Only Query", "TRUE");
|
||||
// config.setParam("Apply Smart Correction", "FALSE");
|
||||
// config.setParam("Language", "NONE");
|
||||
//// config.setParam("Query", "select id from fmap.randomdrs limit 1");
|
||||
// config.setParam("Query", "select lifestage from randomdrs");
|
||||
|
||||
|
||||
config.setParam("ResourceName", "AquaMaps Service DataBase");
|
||||
config.setParam("DatabaseName", "aquamapsorgupdated");
|
||||
config.setParam("Read-Only Query", "TRUE");
|
||||
config.setParam("Apply Smart Correction", "FALSE");
|
||||
config.setParam("Language", "NONE");
|
||||
// config.setParam("Query", "select id from fmap.randomdrs limit 1");
|
||||
// config.setParam("Query", "select * from (select * from maxminlat_hspen2012_02_28_17_45_49_572 as a join maxminlat_hspen2011_09_23_15_31_47_530 as b on a.maxclat=b.maxclat limit 2");
|
||||
|
||||
// config.setParam("Query", "select * from maxminlat_hspen2012_02_28_17_45_49_572 as a join maxminlat_hspen2011_09_23_15_31_47_530 as b on a.maxclat=b.maxclat limit 2");
|
||||
|
||||
// config.setParam("Query", "select * from hcaf_d_2018_linear_01341919234605 as a join hcaf_d_2024_linear_11341919235343 as b on a.csquarecode = b.csquarecode limit 1");
|
||||
|
||||
config.setParam("Query", "select * from hcaf_d_2018_linear_01341919234605 as a, hcaf_d_2024_linear_11341919235343 as b where a.csquarecode = b.csquarecode limit 1");
|
||||
|
||||
|
||||
config.setGcubeScope("/gcube/devsec");
|
||||
|
||||
|
@ -130,11 +161,15 @@ public class RegressionSubmitQuery {
|
|||
//a test with a database mysql
|
||||
config.setParam("ResourceName", "CatalogOfLife2010");
|
||||
config.setParam("DatabaseName", "col2oct2010");
|
||||
config.setParam("", "TRUE");
|
||||
config.setParam("TableName", "Common_names"); //mysql is not case sensitive
|
||||
config.setParam("Query", "select * from common_names limit 10");
|
||||
// config.setParam("Query", "select * from common_names limit 10");
|
||||
config.setParam("Read-Only Query", "TRUE");
|
||||
config.setParam("Apply Smart Correction", "FALSE");
|
||||
config.setParam("Language", "NONE");
|
||||
|
||||
// config.setParam("TableName", "specialists");
|
||||
// config.setParam("Query", "select a.name_code as uno, b.name_code as due from common_names as a join distribution as b on a.name_code=b.name_code limit 2");
|
||||
config.setParam("Query", "select * from common_names as a join distribution as b on a.name_code=b.name_code");
|
||||
// config.setParam("TableName", "specialists");
|
||||
|
||||
// config.setParam("Query", "select * from specialists limit 3");
|
||||
|
||||
|
|
|
@ -11,18 +11,16 @@ import org.gcube.dataanalysis.ecoengine.processing.factories.TransducerersFactor
|
|||
|
||||
public class RegressionTableDetails {
|
||||
|
||||
// static String[] algorithms = { "Postgres1", "Postgres2", "Postgis",
|
||||
// "Mysql", "NullInputValue", "Postgres3" };
|
||||
// static String[] algorithms = { "Postgres1", "Postgres2", "Postgis",
|
||||
// "Mysql", "NullInputValue", "Postgres3" };
|
||||
|
||||
// static AlgorithmConfiguration[] configs = { testPostgres1(),
|
||||
// testPostgres2(), testPostgis(), Mysql(), NullInputValue(), Postgres3()};
|
||||
|
||||
static String[] algorithms = { "Postgres1"};
|
||||
static AlgorithmConfiguration[] configs = { testPostgres1()};
|
||||
// static AlgorithmConfiguration[] configs = { testPostgres1(),
|
||||
// testPostgres2(), testPostgis(), Mysql(), NullInputValue(), Postgres3()};
|
||||
|
||||
static String[] algorithms = { "Postgres1" };
|
||||
static AlgorithmConfiguration[] configs = { Mysql() };
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
|
||||
|
||||
|
||||
// System.out.println("TEST 1");
|
||||
|
||||
|
@ -44,8 +42,6 @@ public class RegressionTableDetails {
|
|||
AnalysisLogger.getLogger().debug("ST:" + st);
|
||||
trans = null;
|
||||
}
|
||||
|
||||
|
||||
|
||||
}
|
||||
|
||||
|
@ -58,39 +54,50 @@ public class RegressionTableDetails {
|
|||
config.setAgent("GETTABLEDETAILS");
|
||||
|
||||
// A test with a database postgres
|
||||
// config.setParam("ResourceName", "GP DB");
|
||||
//
|
||||
// config.setParam("DatabaseName", "aquamapsdb");
|
||||
// config.setParam("SchemaName", "public");
|
||||
// config.setParam("TableName", "Divisions");
|
||||
// config.setParam("ResourceName", "GP DB");
|
||||
//
|
||||
// config.setParam("DatabaseName", "aquamapsdb");
|
||||
// config.setParam("SchemaName", "public");
|
||||
// config.setParam("TableName", "Divisions");
|
||||
|
||||
// config.setParam("TableName", "all_world");
|
||||
// config.setParam("TableName", "biodiversity_lme");
|
||||
|
||||
config.setParam("ResourceName", "StatisticalManagerDataBase");
|
||||
config.setParam("DatabaseName", "testdb");
|
||||
// config.setParam("SchemaName", "publicd");
|
||||
|
||||
config.setParam("SchemaName", "public");
|
||||
|
||||
config.setParam("TableName", "hcaf_d");
|
||||
|
||||
// config.setParam("TableName", "hspec_id_c8e87e16_a0b4_4f9b_b48e_f1cf60ab104c");
|
||||
|
||||
//// config.setParam("TableName", "timeseries_id08b3abb9_c7b0_4b82_8117_64b69055416f");
|
||||
//// config.setParam("TableName", "occurrence_species_idaf35d737_fb3e_43a7_b13a_611dfa97b064");
|
||||
//// config.setParam("TableName", "occcluster_id_15271993_5129_4eda_92a2_fe8d22737007");
|
||||
// config.setParam("TableName", "hspec_id_3f4c79fa_442e_42ba_9344_1b3e64dc3326");
|
||||
|
||||
// config.setParam("ResourceName", "AquaMaps Service DataBase");
|
||||
// config.setParam("DatabaseName", "aquamapsorgupdated");
|
||||
// config.setParam("SchemaName", "public");
|
||||
////// config.setParam("TableName", "hspec2012_07_11_12_33_05_483");
|
||||
// config.setParam("TableName", "hspec_suitable_peng_test_tbsp_1");
|
||||
|
||||
|
||||
// StatisticalManager
|
||||
// config.setParam("ResourceName", "StatisticalManagerDataBase");
|
||||
// config.setParam("DatabaseName", "testdb");
|
||||
// // config.setParam("SchemaName", "publicd");
|
||||
//
|
||||
// config.setParam("SchemaName", "public");
|
||||
//
|
||||
// config.setParam("TableName", "hcaf_d");
|
||||
|
||||
// config.setParam("TableName",
|
||||
// "hspec_id_c8e87e16_a0b4_4f9b_b48e_f1cf60ab104c");
|
||||
|
||||
// // config.setParam("TableName",
|
||||
// "timeseries_id08b3abb9_c7b0_4b82_8117_64b69055416f");
|
||||
// // config.setParam("TableName",
|
||||
// "occurrence_species_idaf35d737_fb3e_43a7_b13a_611dfa97b064");
|
||||
// // config.setParam("TableName",
|
||||
// "occcluster_id_15271993_5129_4eda_92a2_fe8d22737007");
|
||||
// config.setParam("TableName",
|
||||
// "hspec_id_3f4c79fa_442e_42ba_9344_1b3e64dc3326");
|
||||
|
||||
// config.setParam("ResourceName", "AquaMaps Service DataBase");
|
||||
// config.setParam("DatabaseName", "aquamapsorgupdated");
|
||||
// config.setParam("SchemaName", "public");
|
||||
// //// config.setParam("TableName", "hspec2012_07_11_12_33_05_483");
|
||||
// config.setParam("TableName", "hspec_suitable_peng_test_tbsp_1");
|
||||
|
||||
// Obis
|
||||
config.setParam("ResourceName", "Obis2Repository");
|
||||
config.setParam("DatabaseName", "obis");
|
||||
config.setParam("SchemaName", "calc");
|
||||
config.setParam("TableName", "map1d");
|
||||
|
||||
config.setGcubeScope("/gcube/devsec");
|
||||
|
||||
|
||||
return config;
|
||||
|
||||
}
|
||||
|
@ -108,7 +115,7 @@ public class RegressionTableDetails {
|
|||
|
||||
config.setParam("DatabaseName", "aquamapsdb");
|
||||
config.setParam("SchemaName", "public");
|
||||
config.setParam("TableName", "area"); // it has not rows
|
||||
config.setParam("TableName", "area"); // it has not rows
|
||||
|
||||
// config.setParam("TableName", "all_world");
|
||||
// config.setParam("TableName", "biodiversity_lme");
|
||||
|
@ -125,13 +132,16 @@ public class RegressionTableDetails {
|
|||
|
||||
AlgorithmConfiguration config = Regressor.getConfig();
|
||||
|
||||
config.setAgent("LISTTABLEDETAILS");
|
||||
|
||||
config.setAgent("GETTABLEDETAILS");
|
||||
|
||||
// a test with postgis
|
||||
config.setParam("ResourceName", "Geoserver database ");
|
||||
config.setParam("DatabaseName", "aquamapsdb");
|
||||
config.setParam("SchemaName", "public");
|
||||
config.setParam("TableName", "Divisions");
|
||||
// config.setParam("TableName", "Divisions");
|
||||
// config.setParam("TableName", "ContinentalMargins");
|
||||
|
||||
config.setParam("TableName", "SeaVoX_sea_areas_polygons_v14");
|
||||
|
||||
config.setGcubeScope("/gcube/devsec");
|
||||
|
||||
|
@ -145,27 +155,30 @@ public class RegressionTableDetails {
|
|||
|
||||
AlgorithmConfiguration config = Regressor.getConfig();
|
||||
|
||||
config.setAgent("LISTTABLEDETAILS");
|
||||
config.setAgent("GETTABLEDETAILS");
|
||||
|
||||
// // a test with postgis
|
||||
// config.setParam("ResourceName", "Geoserver database ");
|
||||
// config.setParam("SchemaName", "public");
|
||||
// config.setParam("DatabaseName", "aquamapsdb");
|
||||
// config.setParam("TableName", "Divisions");
|
||||
|
||||
//a test with a database mysql
|
||||
// // a test with postgis
|
||||
// config.setParam("ResourceName", "Geoserver database ");
|
||||
// config.setParam("SchemaName", "public");
|
||||
// config.setParam("DatabaseName", "aquamapsdb");
|
||||
// config.setParam("TableName", "Divisions");
|
||||
|
||||
// a test with a database mysql
|
||||
// config.setParam("ResourceName", "CatalogOfLife2010");
|
||||
config.setParam("ResourceName", "CatalogOfLife2010");
|
||||
config.setParam("DatabaseName", "col2oct2010");
|
||||
config.setParam("TableName", "Common_names"); //mysql is not case sensitive
|
||||
|
||||
config.setParam("Query", "select * from common_names limit 3");
|
||||
config.setParam("DatabaseName", "col2oct2010");
|
||||
// config.setParam("TableName", "Common_names"); //mysql is not case
|
||||
// sensitive
|
||||
|
||||
config.setParam("TableName", "databases");
|
||||
|
||||
// config.setParam("Query", "select * from common_names limit 3");
|
||||
|
||||
config.setGcubeScope("/gcube/devsec");
|
||||
|
||||
return config;
|
||||
|
||||
}
|
||||
|
||||
|
||||
private static AlgorithmConfiguration NullInputValue() {
|
||||
|
||||
|
@ -178,7 +191,7 @@ public class RegressionTableDetails {
|
|||
// a test with postgis
|
||||
config.setParam("ResourceName", "Geoserver database ");
|
||||
config.setParam("DatabaseName", "aquamapsdb");
|
||||
// config.setParam("SchemaName", "public");
|
||||
// config.setParam("SchemaName", "public");
|
||||
config.setParam("TableName", "Divisions");
|
||||
|
||||
config.setGcubeScope("/gcube/devsec");
|
||||
|
@ -186,7 +199,7 @@ public class RegressionTableDetails {
|
|||
return config;
|
||||
|
||||
}
|
||||
|
||||
|
||||
private static AlgorithmConfiguration Postgres3() {
|
||||
|
||||
System.out.println("TEST 6: Postgres");
|
||||
|
@ -199,16 +212,12 @@ public class RegressionTableDetails {
|
|||
config.setParam("ResourceName", "Geoserver database ");
|
||||
config.setParam("DatabaseName", "aquamapsdb");
|
||||
config.setParam("SchemaName", "public");
|
||||
config.setParam("TableName", "divisions"); //postgres is case sensitive
|
||||
config.setParam("TableName", "divisions"); // postgres is case sensitive
|
||||
|
||||
config.setGcubeScope("/gcube/devsec");
|
||||
|
||||
return config;
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue