move the library from data-analysis to data-access

git-svn-id: https://svn.d4science.research-infrastructures.eu/gcube/trunk/data-access/DatabasesResourcesManager@96611 82a268e6-3cf1-43bd-a215-b396298e98cf
This commit is contained in:
Loredana Liccardo 2014-06-04 13:56:35 +00:00
parent cad254b98b
commit e5c13f4050
62 changed files with 12393 additions and 0 deletions

33
.classpath Normal file
View File

@ -0,0 +1,33 @@
<?xml version="1.0" encoding="UTF-8"?>
<classpath>
<classpathentry kind="src" output="target/classes" path="src/main/java">
<attributes>
<attribute name="optional" value="true"/>
<attribute name="maven.pomderived" value="true"/>
</attributes>
</classpathentry>
<classpathentry excluding="**" kind="src" output="target/classes" path="src/main/resources">
<attributes>
<attribute name="maven.pomderived" value="true"/>
</attributes>
</classpathentry>
<classpathentry kind="src" output="target/test-classes" path="src/test/java">
<attributes>
<attribute name="optional" value="true"/>
<attribute name="maven.pomderived" value="true"/>
</attributes>
</classpathentry>
<classpathentry kind="con" path="org.eclipse.m2e.MAVEN2_CLASSPATH_CONTAINER">
<attributes>
<attribute name="maven.pomderived" value="true"/>
</attributes>
</classpathentry>
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER">
<attributes>
<attribute name="maven.pomderived" value="true"/>
</attributes>
</classpathentry>
<classpathentry kind="var" path="KEYS"/>
<classpathentry kind="var" path="JAVA_HOME"/>
<classpathentry kind="output" path="target/classes"/>
</classpath>

23
.project Normal file
View File

@ -0,0 +1,23 @@
<?xml version="1.0" encoding="UTF-8"?>
<projectDescription>
<name>DatabasesResourcesManager</name>
<comment></comment>
<projects>
</projects>
<buildSpec>
<buildCommand>
<name>org.eclipse.jdt.core.javabuilder</name>
<arguments>
</arguments>
</buildCommand>
<buildCommand>
<name>org.eclipse.m2e.core.maven2Builder</name>
<arguments>
</arguments>
</buildCommand>
</buildSpec>
<natures>
<nature>org.eclipse.m2e.core.maven2Nature</nature>
<nature>org.eclipse.jdt.core.javanature</nature>
</natures>
</projectDescription>

24
cfg/ALog.properties Normal file
View File

@ -0,0 +1,24 @@
#### Use two appenders, one to log to console, another to log to a file
#log4j.rootCategory= R
#### Second appender writes to a file
#log4j.appender.stdout=org.apache.log4j.ConsoleAppender
#log4j.appender.stdout.Threshold=trace
#log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
#log4j.appender.stdout.layout.ConversionPattern=%d{dd/MM/yyyy HH:mm:ss} %p %t %c - %m%n
log4j.logger.AnalysisLogger=TRACE,AR
log4j.appender.AR=org.apache.log4j.RollingFileAppender
log4j.appender.AR.Threshold=trace
log4j.appender.AR.File=./Analysis.log
log4j.appender.AR.MaxFileSize=50000KB
log4j.appender.AR.MaxBackupIndex=2
log4j.appender.AR.layout=org.apache.log4j.PatternLayout
log4j.appender.AR.layout.ConversionPattern=%d{dd/MM/yyyy HH:mm:ss} %p %t %c - %m%n
#### Third appender writes to a file
log4j.logger.org.hibernate=H
log4j.appender.H=org.apache.log4j.AsyncAppender
log4j.appender.H.Threshold=OFF
log4j.appender.H.layout=org.apache.log4j.PatternLayout
log4j.appender.H.layout.ConversionPattern=%d{dd/MM/yyyy HH:mm:ss} %p %t %c - %m%n

View File

View File

@ -0,0 +1,18 @@
<?xml version='1.0' encoding='UTF-8'?>
<hibernate-configuration>
<session-factory>
<property name="connection.driver_class">com.mysql.jdbc.Driver</property>
<property name="connection.provider_class">org.hibernate.connection.C3P0ConnectionProvider</property>
<property name="connection.url">jdbc:mysql://146.48.87.169/timeseries</property>
<property name="connection.username">root</property>
<property name="connection.password">test</property>
<!-- <property name="dialect">org.hibernatespatial.postgis.PostgisDialect</property>-->
<property name="dialect">org.hibernate.dialect.MySQLDialect</property>
<property name="transaction.factory_class">org.hibernate.transaction.JDBCTransactionFactory</property>
<property name="c3p0.timeout">0</property>
<property name="c3p0.max_size">1</property>
<property name="c3p0.max_statements">0</property>
<property name="c3p0.min_size">1</property>
<property name="current_session_context_class">thread</property>
</session-factory>
</hibernate-configuration>

View File

@ -0,0 +1,18 @@
<?xml version='1.0' encoding='UTF-8'?>
<hibernate-configuration>
<session-factory>
<property name="connection.driver_class">com.mysql.jdbc.Driver</property>
<property name="connection.provider_class">org.hibernate.connection.C3P0ConnectionProvider</property>
<property name="connection.url">jdbc:mysql://146.48.87.169/col2oct2010</property>
<property name="connection.username">root</property>
<property name="connection.password">test</property>
<!-- <property name="dialect">org.hibernatespatial.postgis.PostgisDialect</property>-->
<property name="dialect">org.hibernate.dialect.MySQLDialect</property>
<property name="transaction.factory_class">org.hibernate.transaction.JDBCTransactionFactory</property>
<property name="c3p0.timeout">0</property>
<property name="c3p0.max_size">1</property>
<property name="c3p0.max_statements">0</property>
<property name="c3p0.min_size">1</property>
<property name="current_session_context_class">thread</property>
</session-factory>
</hibernate-configuration>

View File

@ -0,0 +1,19 @@
<?xml version='1.0' encoding='UTF-8'?>
<hibernate-configuration>
<session-factory>
<property name="connection.driver_class">org.postgresql.Driver</property>
<property name="connection.provider_class">org.hibernate.connection.C3P0ConnectionProvider</property>
<property name="connection.url">jdbc:postgresql://statistical-manager.d.d4science.research-infrastructures.eu/testdb</property>
<property name="connection.username">utente</property>
<property name="connection.password">d4science</property>
<!-- <property name="dialect">org.hibernatespatial.postgis.PostgisDialect</property>-->
<property name="dialect">org.hibernate.dialect.PostgreSQLDialect</property>
<property name="transaction.factory_class">org.hibernate.transaction.JDBCTransactionFactory</property>
<property name="connection.schemaname">public</property>
<property name="c3p0.timeout">0</property>
<property name="c3p0.max_size">1</property>
<property name="c3p0.max_statements">0</property>
<property name="c3p0.min_size">1</property>
<property name="current_session_context_class">thread</property>
</session-factory>
</hibernate-configuration>

View File

@ -0,0 +1,19 @@
<?xml version='1.0' encoding='UTF-8'?>
<hibernate-configuration>
<session-factory>
<property name="connection.driver_class">org.postgresql.Driver</property>
<property name="connection.provider_class">org.hibernate.connection.C3P0ConnectionProvider</property>
<property name="connection.url">jdbc:postgresql://dbtest.research-infrastructures.eu/aquamapsorgupdated</property>
<property name="connection.username">utente</property>
<property name="connection.password">d4science</property>
<!-- <property name="dialect">org.hibernatespatial.postgis.PostgisDialect</property>-->
<property name="dialect">org.hibernate.dialect.PostgreSQLDialect</property>
<property name="transaction.factory_class">org.hibernate.transaction.JDBCTransactionFactory</property>
<property name="connection.schemaname">public</property>
<property name="c3p0.timeout">0</property>
<property name="c3p0.max_size">1</property>
<property name="c3p0.max_statements">0</property>
<property name="c3p0.min_size">1</property>
<property name="current_session_context_class">thread</property>
</session-factory>
</hibernate-configuration>

13
cfg/algorithms.properties Normal file
View File

@ -0,0 +1,13 @@
AQUAMAPS_SUITABLE=org.gcube.dataanalysis.ecoengine.spatialdistributions.AquamapsSuitable
AQUAMAPS_NATIVE=org.gcube.dataanalysis.ecoengine.spatialdistributions.AquamapsNative
AQUAMAPS_NATIVE_2050=org.gcube.dataanalysis.ecoengine.spatialdistributions.AquamapsNative2050
AQUAMAPS_SUITABLE_2050=org.gcube.dataanalysis.ecoengine.spatialdistributions.AquamapsSuitable2050
AQUAMAPS_NATIVE_NEURALNETWORK=org.gcube.dataanalysis.ecoengine.spatialdistributions.AquamapsNN
AQUAMAPS_SUITABLE_NEURALNETWORK=org.gcube.dataanalysis.ecoengine.spatialdistributions.AquamapsNNSuitable
FEED_FORWARD_A_N_N_DISTRIBUTION=org.gcube.dataanalysis.ecoengine.spatialdistributions.FeedForwardNeuralNetworkDistribution
LWR=org.gcube.dataanalysis.executor.nodes.algorithms.LWR
BIONYM_BIODIV=org.gcube.dataanalysis.executor.nodes.transducers.bionym.BionymBiodiv
BIONYM=org.gcube.dataanalysis.executor.nodes.transducers.bionym.BionymFlexibleWorkflowTransducer
OCCURRENCES_MERGER=org.gcube.dataanalysis.executor.nodes.transducers.OccurrenceMergingNode
OCCURRENCES_INTERSECTOR=org.gcube.dataanalysis.executor.nodes.transducers.OccurrenceIntersectionNode
OCCURRENCES_SUBTRACTION=org.gcube.dataanalysis.executor.nodes.transducers.OccurrenceSubtractionNode

View File

@ -0,0 +1,4 @@
DBSCAN=org.gcube.dataanalysis.ecoengine.clustering.DBScan
LOF=org.gcube.dataanalysis.ecoengine.clustering.LOF
KMEANS=org.gcube.dataanalysis.ecoengine.clustering.KMeans
XMEANS=org.gcube.dataanalysis.ecoengine.clustering.XMeansWrapper

View File

@ -0,0 +1,4 @@
MAPS_COMPARISON=org.gcube.dataanalysis.geo.algorithms.MapsComparator
DISCREPANCY_ANALYSIS=org.gcube.dataanalysis.ecoengine.evaluation.DiscrepancyAnalysis
QUALITY_ANALYSIS=org.gcube.dataanalysis.ecoengine.evaluation.DistributionQualityAnalysis
HRS=org.gcube.dataanalysis.ecoengine.evaluation.HabitatRepresentativeness

View File

@ -0,0 +1,3 @@
LOCAL_WITH_DATABASE=org.gcube.dataanalysis.ecoengine.processing.LocalSplitGenerator
SIMPLE_LOCAL=org.gcube.dataanalysis.ecoengine.processing.LocalSimpleSplitGenerator
D4SCIENCE=org.gcube.dataanalysis.executor.generators.D4ScienceDistributedProcessing

1
cfg/modelers.properties Normal file
View File

@ -0,0 +1 @@
HSPEN_MODELER=org.gcube.dataanalysis.ecoengine.modeling.SimpleModeler

4
cfg/models.properties Normal file
View File

@ -0,0 +1,4 @@
HSPEN=org.gcube.dataanalysis.ecoengine.models.ModelHSPEN
AQUAMAPSNN=org.gcube.dataanalysis.ecoengine.models.ModelAquamapsNN
FEED_FORWARD_ANN=org.gcube.dataanalysis.ecoengine.models.FeedForwardNN
#FEED_FORWARD_ANN_FILE=org.gcube.dataanalysis.ecoengine.models.testing.FeedForwardNNFile

View File

@ -0,0 +1,10 @@
AQUAMAPS_SUITABLE=org.gcube.dataanalysis.executor.nodes.algorithms.AquamapsSuitableNode
AQUAMAPS_NATIVE=org.gcube.dataanalysis.executor.nodes.algorithms.AquamapsNativeNode
AQUAMAPS_NATIVE_2050=org.gcube.dataanalysis.executor.nodes.algorithms.AquamapsNative2050Node
AQUAMAPS_SUITABLE_2050=org.gcube.dataanalysis.executor.nodes.algorithms.AquamapsSuitable2050Node
OCCURRENCES_MERGER=org.gcube.dataanalysis.executor.nodes.transducers.OccurrenceMergingNode
OCCURRENCES_INTERSECTOR=org.gcube.dataanalysis.executor.nodes.transducers.OccurrenceIntersectionNode
OCCURRENCES_SUBTRACTION=org.gcube.dataanalysis.executor.nodes.transducers.OccurrenceSubtractionNode
LWR=org.gcube.dataanalysis.executor.nodes.algorithms.LWR
BIONYM_BIODIV=org.gcube.dataanalysis.executor.nodes.transducers.bionym.BionymBiodiv
BIONYM=org.gcube.dataanalysis.executor.nodes.transducers.bionym.BionymFlexibleWorkflowTransducer

2620
cfg/operators.xml Normal file

File diff suppressed because it is too large Load Diff

0
cfg/table.txt Normal file
View File

View File

@ -0,0 +1,36 @@
BIOCLIMATE_HSPEC=org.gcube.dataanalysis.ecoengine.transducers.BioClimateHSPECTransducer
BIOCLIMATE_HCAF=org.gcube.dataanalysis.ecoengine.transducers.BioClimateHCAFTransducer
BIOCLIMATE_HSPEN=org.gcube.dataanalysis.ecoengine.transducers.BioClimateHSPENTransducer
HCAF_INTERPOLATION=org.gcube.dataanalysis.ecoengine.transducers.InterpolationTransducer
HCAF_FILTER=org.gcube.dataanalysis.ecoengine.transducers.simplequeryexecutors.HcafFilter
HSPEN_FILTER=org.gcube.dataanalysis.ecoengine.transducers.simplequeryexecutors.HspenFilter
ABSENCE_CELLS_FROM_AQUAMAPS=org.gcube.dataanalysis.ecoengine.transducers.simplequeryexecutors.MarineAbsencePointsFromAquamapsDistribution
PRESENCE_CELLS_GENERATION=org.gcube.dataanalysis.ecoengine.transducers.simplequeryexecutors.MarinePresencePoints
OCCURRENCES_MERGER=org.gcube.dataanalysis.ecoengine.transducers.OccurrencePointsMerger
OCCURRENCES_INTERSECTOR=org.gcube.dataanalysis.ecoengine.transducers.OccurrencePointsIntersector
OCCURRENCES_MARINE_TERRESTRIAL=org.gcube.dataanalysis.ecoengine.transducers.OccurrencePointsInSeaOnEarth
OCCURRENCES_DUPLICATES_DELETER=org.gcube.dataanalysis.ecoengine.transducers.OccurrencePointsDuplicatesDeleter
OCCURRENCES_SUBTRACTION=org.gcube.dataanalysis.ecoengine.transducers.OccurrencePointsSubtraction
FIN_TAXA_MATCH=org.gcube.dataanalysis.fin.taxamatch.TaxaMatchTransducer
OBIS_MOST_OBSERVED_SPECIES=org.gcube.dataanalysis.trendylyzeralgorithms.AbsoluteSpeciesBarChartsAlgorithm
OBIS_SINGLE_SPECIES_DISTRIBUTION_PER_AREA=org.gcube.dataanalysis.trendylyzeralgorithms.SpeciesObservationsAreaBarChart
OBIS_SPECIES_OBSERVATIONS_PER_YEAR=org.gcube.dataanalysis.trendylyzeralgorithms.SpeciesObservationsPerYear
OBIS_MOST_OBSERVED_TAXA=org.gcube.dataanalysis.trendylyzeralgorithms.TaxaObservationsBarChartAlgorithm
OBIS_TAXA_OBSERVATIONS_PER_YEAR=org.gcube.dataanalysis.trendylyzeralgorithms.TaxaObservationsPerYearLineChart
OBIS_SPECIES_OBSERVATIONS_PER_LME_AREA=org.gcube.dataanalysis.trendylyzeralgorithms.SpeciesObservationsPerLMEAreaPerYearLineChart
OBIS_SPECIES_OBSERVATIONS_PER_MEOW_AREA=org.gcube.dataanalysis.trendylyzeralgorithms.SpeciesObservationsPerMEOWAreaPerYearLineChart
MOST_OBSERVED_SPECIES=org.gcube.dataanalysis.trendylyzeralgorithms.AbsoluteSpeciesBarChartsAlgorithm
SPECIES_OBSERVATIONS_TREND_PER_YEAR=org.gcube.dataanalysis.trendylyzeralgorithms.SpeciesObservationsPerYear
MOST_OBSERVED_TAXA=org.gcube.dataanalysis.trendylyzeralgorithms.TaxaObservationsBarChartAlgorithm
TAXONOMY_OBSERVATIONS_TREND_PER_YEAR=org.gcube.dataanalysis.trendylyzeralgorithms.TaxaObservationsPerYearLineChart
SPECIES_OBSERVATIONS_PER_AREA=org.gcube.dataanalysis.trendylyzeralgorithms.SpeciesObservationsAreaBarChart
SPECIES_OBSERVATION_LME_AREA_PER_YEAR=org.gcube.dataanalysis.trendylyzeralgorithms.SpeciesObservationsPerLMEAreaPerYearLineChart
SPECIES_OBSERVATION_MEOW_AREA_PER_YEAR=org.gcube.dataanalysis.trendylyzeralgorithms.SpeciesObservationsPerMEOWAreaPerYearLineChart
GET_TAXA_ALGORITHM=org.gcube.dataanalysis.JobSMspd.TaxaProcedure
GET_OCCURRENCES_ALGORITHM=org.gcube.dataanalysis.JobSMspd.OccurencesProcedure
FIN_GSAY_MATCH=org.gcube.dataanalysis.fin.gsay.GSAYTransducer
POINTS_TO_MAP=org.gcube.dataanalysis.geo.algorithms.PointsMapsCreator
POLYGONS_TO_MAP=org.gcube.dataanalysis.geo.algorithms.PolygonMapsCreator
SPECIES_MAP_FROM_CSQUARES=org.gcube.dataanalysis.geo.algorithms.SpeciesDistributionsMapsCreatorFromCsquares
SPECIES_MAP_FROM_POINTS=org.gcube.dataanalysis.geo.algorithms.SpeciesDistributionsMapsCreatorFromPoints
BIONYM_LOCAL=org.gcube.dataanalysis.executor.nodes.transducers.bionym.BionymLocalTransducer

View File

@ -0,0 +1,18 @@
ANOMALIES_DETECTION=DBSCAN,LOF,KMEANS,XMEANS
CLASSIFICATION=FEED_FORWARD_A_N_N_DISTRIBUTION
CLIMATE=BIOCLIMATE_HSPEC,BIOCLIMATE_HCAF,BIOCLIMATE_HSPEN,HCAF_INTERPOLATION
CORRELATION_ANALYSIS=HRS
DATA_CLUSTERING=DBSCAN,LOF,KMEANS,XMEANS
FILTERING=HCAF_FILTER,HSPEN_FILTER
FUNCTION_SIMULATION=FEED_FORWARD_A_N_N_DISTRIBUTION
OCCURRENCES=ABSENCE_CELLS_FROM_AQUAMAPS,PRESENCE_CELLS_GENERATION,OCCURRENCES_MERGER,OCCURRENCES_INTERSECTOR,OCCURRENCES_MARINE_TERRESTRIAL,OCCURRENCES_DUPLICATES_DELETER,OCCURRENCES_SUBTRACTION
PERFORMANCES_EVALUATION=QUALITY_ANALYSIS,DISCREPANCY_ANALYSIS
SPECIES_SIMULATION=AQUAMAPS_SUITABLE,AQUAMAPS_NATIVE,AQUAMAPS_NATIVE_2050,AQUAMAPS_SUITABLE_2050,AQUAMAPS_NATIVE_NEURALNETWORK,AQUAMAPS_SUITABLE_NEURALNETWORK
TRAINING=HSPEN,AQUAMAPSNN,FEED_FORWARD_ANN
TIME_SERIES=HCAF_INTERPOLATION
TAXA=FIN_TAXA_MATCH,BIONYM,BIONYM_BIODIV,BIONYM_LOCAL,FIN_GSAY_MATCH
MAPS=MAPS_COMPARISON,DISCREPANCY_ANALYSIS,POINTS_TO_MAP,POLYGONS_TO_MAP,SPECIES_MAP_FROM_CSQUARES,SPECIES_MAP_FROM_POINTS
BAYESIAN_METHODS=LWR,FEED_FORWARD_A_N_N_DISTRIBUTION
OBIS_OBSERVATIONS_SPECIES_DATA=MOST_OBSERVED_SPECIES,MOST_OBSERVED_TAXA,SPECIES_OBSERVATIONS_PER_AREA
OBIS_OBSERVATIONS_TRENDS=SPECIES_OBSERVATIONS_TREND_PER_YEAR,TAXONOMY_OBSERVATIONS_TREND_PER_YEAR,SPECIES_OBSERVATION_MEOW_AREA_PER_YEAR,SPECIES_OBSERVATION_LME_AREA_PER_YEAR
SPD_PROCEDURES=GET_TAXA_ALGORITHM,GET_OCCURRENCES_ALGORITHM

View File

@ -0,0 +1,100 @@
3712:100:2,0,0,0,,,,,,,,,,,,,0,,,,,
3214:110:1,0,0,0,,,,,,,,,,,,,0,,,,,
3803:237:3,0,0,0,,,,,,,,,,,,,0,,,,,
1215:476:3,5889,6272,6065,73.92,24.7,0.45,29.31,19.41,9.9,,34.93,,35.07,34.79,,252,0,0,0,0,0
1701:131:1,1854,2090,1961,46.78,5.32,0.57,10.62,1.51,9.11,-0.96,35.04,,35.11,34.97,34.9099,369,4.7E-4,0,0,0,0
1208:383:3,0,0,0,,,,,,,,,,,,,0,,,,,
7007:384:4,0,0,0,,,,,,,,,,,,,0,,,,,
7509:237:3,0,0,0,,,,,,,,,,,,,0,,,,,
7012:360:2,3589,4357,4099,91.46,27.66,0.47,29.31,25.82,3.49,1.46,34.22,,34.6,33.91,34.6926,304,0,0,0,0,0
7008:455:3,1523,2841,2032,373.74,28.16,0.54,30.93,26.11,4.82,2.31,32.62,,33.21,31.66,34.6493,325,0,0,0,0,0
7806:114:4,0,600,369,179.81,-1.79,0,-1.54,-1.79,0.25,,30.45,,31.36,28.8,,169,0.88,0.95,0.78,0.84,0.94
3000:215:4,1333,3759,3360,393.52,26.43,0.45,29.62,22.16,7.46,2.46,34.34,,35.53,33.29,34.8958,701,0,0,0,0,0
5708:466:1,0,0,0,,,,,,,,,,,,,0,,,,,
3114:123:1,0,163,10,21.79,26.99,0.39,29.59,23.8,5.79,26.99,34.93,,35.58,34.15,34.8772,547,0,0,0,0,0
5213:392:3,4125,4339,4197,33.44,21.53,0.44,26.26,17.8,8.46,1.49,35.37,,35.64,34.99,34.6944,136,0,0,0,0,0
3608:229:2,3914,4276,4089,63.4,-1,0.1,1.37,-1.79,3.16,-0.26,33.81,,34.18,33.61,34.6651,304,0.33,0.05,0.64,0.61,0.01
3005:114:2,4168,4777,4590,79.03,28.22,0.23,30.76,25.35,5.41,1.34,35.33,,35.48,35.14,34.7173,376,0,0,0,0,0
1616:228:1,0,0,0,,,,,,,,,,,,,0,,,,,
7611:495:4,0,0,0,,,,,,,,,,,,,0,,,,,
7215:218:4,819,4520,2642,1134.08,25.38,0.3,27.73,23.36,4.37,1.72,35,,35.11,34.84,34.6492,227,0,0,0,0,0
7213:384:2,3825,4712,4419,180.49,20.31,0.41,23.63,17.74,5.89,1.56,34.79,,35.13,34.65,34.6861,229,0,0,0,0,0
1707:350:1,31,185,80,46.54,-1.67,0.13,0.64,-1.79,2.43,-1.34,29.53,,31.29,26.14,34.1963,521,0.69,0.95,0.44,0.39,0.96
3512:123:2,4065,4637,4319,121.94,4.79,0.29,7.48,2.46,5.02,0.18,33.94,,34.08,33.8,34.6884,293,9.667e-,0,0,0,0
5306:475:1,0,0,0,,,,,,,,,,,,,0,,,,,
3014:353:4,0,0,0,,,,,,,,,,,,,0,,,,,
1317:110:1,5391,5686,5527,54.8,21.65,0.43,27.17,16.48,10.69,1.5,34.78,,34.99,34.56,34.6963,324,0,0,0,0,0
7500:496:3,140,1029,372,199.17,10.06,0.23,13.8,7.6,6.2,8.53,35.22,,35.27,35.18,35.3121,619,0,0,0,0,0
3613:489:4,0,0,0,,,,,,,,,,,,,0,,,,,
5015:489:2,3549,5818,4716,610.91,29.02,0.38,30.11,27.67,2.44,1.26,35.51,,35.76,35.19,34.7055,279,0,0,0,0,0
1400:248:3,0,0,0,,,,,,,,,,,,,0,,,,,
1003:144:2,0,0,0,,,,,,,,,,,,,0,,,,,
1700:476:3,1560,2640,2125,197.33,0.22,0.91,4.57,-1.79,6.36,-0.97,34.58,,34.9,34.24,34.9096,484,0.00157,0.00333,0,0,0
1101:360:2,0,0,0,,,,,,,,,,,,,0,,,,,
3710:238:1,0,0,0,,,,,,,,,,,,,0,,,,,
1109:219:2,0,114,59,32.51,28.95,0.27,30.98,27.11,3.87,28.55,31.6,,32.61,29.1,32.7094,828,0,0,0,0,0
3005:206:3,4431,5008,4648,87.6,28.48,0.25,30.77,26.25,4.52,1.35,35.33,,35.49,35.07,34.7162,357,0,0,0,0,0
7514:475:1,3730,4019,3853,41.42,7.96,0.43,15.67,3.7,11.97,1.47,32.35,,32.65,31.8,34.696,438,4.9E-4,0,0,0,0
5304:475:2,5086,5177,5122,20.48,17.57,0.33,22.38,13.7,8.68,0.22,35.5,,35.89,35.12,34.6691,380,0,0,0,0,0
3512:495:1,4615,4673,4634,13.29,1.11,0.26,4.12,-1.72,5.84,-0.2,33.92,,34.08,33.8,34.6847,215,4.0E-5,0,0,0,0
5606:499:2,0,541,207,120.54,-1.76,0.07,-0.43,-1.79,1.36,-0.06,32.06,,32.4,31.54,34.4431,342,0.6,0.33,0.65,0.71,0.69
3717:457:4,475,623,513,42.39,-1.79,0.01,-1.63,-1.79,0.16,-0.06,34.32,,34.5,34.24,34.7085,716,0.56,0.66,0.83,0.7,0.02
3511:374:1,4512,4614,4550,15.01,1.71,0.3,4.44,-1.1,5.54,-0.11,33.95,,34.03,33.85,34.6796,210,9.667e-,0,0,0,0
7406:495:4,197,369,300,44.07,4.47,0.87,15.02,-1.79,16.81,,29.5,,30.96,27.95,,182,0.1,0.11,0,0.01,0.27
7511:465:1,0,0,0,,,,,,,,,,,,,0,,,,,
3107:103:3,4647,5465,5120,148.99,27.55,0.26,29.95,24.84,5.11,1.42,34.57,,35.06,34.12,34.715,273,0,0,0,0,0
3317:495:3,0,27,13,9.05,15.9,0.48,20.66,12.53,8.13,15.19,35,,35.24,34.81,34.9984,101,0,0,0,0,0
5806:468:4,0,0,0,,,,,,,,,,,,,0,,,,,
3312:144:2,8,3740,1665,1441.75,17.63,0.34,21.02,14.85,6.17,2.51,35.75,,35.95,35.58,34.6496,557,0,0,0,0,0
1202:360:2,0,0,0,,,,,,,,,,,,,0,,,,,
3801:351:3,0,0,0,,,,,,,,,,,,,0,,,,,
5505:371:3,2999,4241,3957,262.49,1.72,0.43,5.13,-1.51,6.64,0.03,33.99,,34.18,33.93,34.6731,265,0.01,0,0.03,0.01,0
5509:216:4,4492,4979,4686,82.95,7.16,0.29,10.07,4.95,5.12,0.71,34.17,,34.22,34.11,34.7095,237,0,0,0,0,0
1109:381:4,1921,2197,2041,49.84,28.08,0.3,30.62,24.75,5.87,2.69,30.67,,32.45,27.11,34.8055,701,0,0,0,0,0
3715:476:3,0,0,0,,,,,,,,,,,,,0,,,,,
5706:390:1,1,1,1,0,-1.79,0,-1.78,-1.79,0.01,-1.38,34.49,,34.79,33.9,34.4578,0,1.5,1.5,1.5,1.5,1.5
3305:206:4,3639,5326,4596,418.01,21.2,0.21,25.3,18,7.3,0.96,35.51,,35.6,35.4,34.7139,203,0,0,0,0,0
5805:392:4,0,0,0,,,,,,,,,,,,,0,,,,,
7200:226:3,0,0,0,,,,,,,,,,,,,0,,,,,
3201:111:2,1142,2372,1695,307.26,18.2,0.34,22.52,15.27,7.25,3.33,35.29,,35.41,35.18,34.8833,180,0,0,0,0,0
7003:372:4,2569,5496,4368,560.13,27.31,0.27,28.93,24.91,4.02,2.39,35.85,,36.12,35.49,34.9086,374,0,0,0,0,0
3317:111:3,2672,4221,3383,339.74,19.89,0.42,23.91,17.04,6.87,1.86,35.79,,35.98,35.71,34.6866,321,0,0,0,0,0
1313:373:2,300,1902,1376,307.88,17.07,0.58,27.99,9.06,18.93,0.16,33.88,,34.34,33,34.0637,525,0,0,0,0,0
5015:496:4,1828,5706,5026,962.02,28.95,0.37,30.13,27.51,2.62,1.29,35.56,,35.79,35.19,34.7062,238,0,0,0,0,0
5012:468:3,4236,4770,4565,101.81,26.93,0.53,29.99,24.99,5,1.46,35.52,,35.76,35.27,34.694,311,0,0,0,0,0
5408:478:1,3831,4264,4081,90.62,8.82,0.35,11.9,6.3,5.6,0.89,33.96,,34.08,33.7,34.7117,226,0,0,0,0,0
3206:456:2,3674,4807,4145,218.76,23.36,0.29,27.46,20.13,7.33,1.56,35.43,,35.56,35.26,34.7224,184,0,0,0,0,0
1807:239:1,2630,3163,2925,147.72,-1.79,0,-1.78,-1.79,0.01,-0.78,31.19,,32.81,27.81,34.9295,0,0.93,0.98,0.89,0.87,0.96
5608:130:2,4406,4617,4522,59.01,1.2,0.46,5.31,-1.39,6.7,0.46,33.89,,33.98,33.71,34.6962,170,0.00688,0,0.01,0.00666,0
7209:458:2,0,0,0,,,,,,,,,,,,,0,,,,,
7515:235:1,3936,4506,4302,105.49,7.16,0.4,13.88,3.18,10.7,1.51,32.55,,32.8,31.49,34.6837,491,6.7E-4,0,0,0,0
3406:475:3,1256,2363,1969,270.64,4.79,0.36,8.43,2.49,5.94,1.7,33.86,,34.03,33.72,34.722,297,0,0,0,0,0
1411:237:4,0,0,0,,,,,,,,,,,,,0,,,,,
1303:381:1,0,0,0,,,,,,,,,,,,,0,,,,,
3316:350:1,2366,4360,3305,506.59,19.16,0.38,24.43,15.73,8.7,1.28,35.7,,35.86,35.59,34.7234,374,0,0,0,0,0
3417:132:1,0,0,0,,,,,,,,,,,,,0,,,,,
7205:363:4,4699,6127,5466,334.74,25.31,0.2,28.84,22.24,6.6,2.11,36.98,,37.16,36.85,34.854,197,0,0,0,0,0
5313:384:3,4921,5181,5027,52.25,15.12,0.35,19.63,11.63,8,1.26,34.41,,34.55,34.28,34.7122,262,0,0,0,0,0
5417:248:1,411,1060,665,207.1,12.63,0.49,17.62,9.14,8.48,5.83,34.59,,34.74,34.26,34.3562,797,0,0,0,0,0
5804:207:3,0,1,1,0,-1.79,0,-1.78,-1.79,0.01,,34.5,,34.77,34.06,,106,1.5,1.5,1.5,1.5,1.5
7401:111:4,2738,4691,3679,499,16.44,0.34,20.57,13.05,7.52,2.63,35.81,,35.89,35.7,34.9243,541,0,0,0,0,0
3312:100:2,0,0,0,,,,,,,,,,,,,0,,,,,
1704:134:1,308,363,333,11.13,0.78,0.54,7.11,-1.79,8.9,-0.88,34.68,,34.84,34.47,34.9043,421,0.04,0.12,0.01,0,0.02
7306:102:1,4584,5254,5003,128.8,23.88,0.22,28.88,19.53,9.35,2.24,36.62,,36.72,36.47,34.8884,252,0,0,0,0,0
1315:465:3,5619,5801,5714,46.45,18.71,0.6,27.24,12.16,15.08,1.55,34.46,,34.64,34.26,34.693,424,0,0,0,0,0
5417:394:3,5248,5536,5351,53.39,10.28,0.4,14.15,7.32,6.83,1.21,34.45,,34.71,34.34,34.7159,383,0,0,0,0,0
5617:468:2,2942,3608,3293,209.01,-1.13,0.15,2.22,-1.79,4.01,0.09,34.02,,34.2,33.76,34.6875,261,0.34,0.04,0.6,0.66,0.05
1012:362:4,326,4077,2526,1093.78,28.86,0.12,30.1,27.25,2.85,3.58,33.88,,34.29,33.36,34.5877,616,0,0,0,0,0
1014:238:4,3404,5093,4312,372.23,29.41,0.22,30.24,28.21,2.03,1.53,34.38,,34.77,34.09,34.6879,247,0,0,0,0,0
3708:373:4,0,0,0,,,,,,,,,,,,,0,,,,,
1407:486:4,0,0,0,,,,,,,,,,,,,0,,,,,
7109:374:4,0,0,0,,,,,,,,,,,,,0,,,,,
5109:468:3,3645,4184,3924,121.58,22.85,0.34,25.17,20.54,4.63,1.83,35.89,,36.2,35.58,34.6884,256,0,0,0,0,0
3408:216:4,2439,2964,2707,95.29,12.37,0.33,15.67,10,5.67,2,34.78,,34.99,34.57,34.7453,389,0,0,0,0,0
7113:374:4,4989,5295,5141,64.43,24.61,0.42,27.21,21.73,5.48,1.45,34.46,,34.72,34.18,34.6938,247,0,0,0,0,0
7510:495:1,0,0,0,,,,,,,,,,,,,0,,,,,
3510:392:2,4090,4586,4384,64.91,0.02,0.29,2.8,-1.79,4.59,-0.15,33.95,,34.08,33.81,34.6813,219,0.02,0,0.01,0.06,0
1817:488:3,1387,3509,2297,642.94,-1.79,0,-1.78,-1.79,0.01,-0.52,31.15,,31.74,30.65,34.951,0,1,1,1,1,1
1207:353:3,0,0,0,,,,,,,,,,,,,0,,,,,
5512:206:4,3797,4078,3933,51.68,7.98,0.42,11.7,6.23,5.47,1.4,34.33,,34.41,34.2,34.7207,319,0,0,0,0,0
1804:476:1,2846,3866,3433,308.99,-1.79,0,-1.78,-1.79,0.01,-0.6,31.95,,32.67,31.28,34.9552,0,0.94,0.98,0.93,0.9,0.97

100
files/table.txt Normal file
View File

@ -0,0 +1,100 @@
3214:370:3,0,0,0,,,,,,,,,,,,,0,,,,,
1514:228:3,1080,1320,1217,49.45,3.95,0.46,14.12,-1.79,15.91,2.3,32.66,,32.95,32.3,34.4143,572,0.1,0.21,0,0,0.2
3302:459:4,4485,4698,4627,48.74,20.55,0.28,24,17.58,6.42,1.21,35.47,,35.52,35.35,34.7433,635,0,0,0,0,0
5511:245:2,3156,3347,3278,32.85,6.24,0.49,9.43,4.32,5.11,1.29,34.17,,34.25,34.06,34.7168,283,0,0,0,0,0
1007:456:4,2277,2607,2441,75.86,28.6,0.3,30.92,27.34,3.58,2.06,34.67,,35.43,33.71,34.7468,697,0,0,0,0,0
5006:134:3,0,0,0,,,,,,,,,,,,,0,,,,,
1602:238:1,0,0,0,,,,,,,,,,,,,0,,,,,
3102:393:2,0,0,0,,,,,,,,,,,,,0,,,,,
3307:102:2,3521,4511,3866,182.68,20.17,0.3,24.72,16.23,8.49,1.12,35.77,,35.98,35.67,34.7112,170,0,0,0,0,0
5716:248:4,3280,3498,3417,51.34,-1.79,0,-1.78,-1.79,0.01,0.31,34,,34.31,33.79,34.7005,388,0.75,0.81,0.93,0.91,0.39
3706:111:4,0,0,0,,,,,,,,,,,,,0,,,,,
3100:228:4,4469,4750,4598,77.81,23.32,0.47,28.18,18.91,9.27,2.39,35.9,,36.37,34.76,34.8874,662,0,0,0,0,0
3802:142:2,0,0,0,,,,,,,,,,,,,0,,,,,
7217:394:2,5097,5464,5317,67.82,22.27,0.5,27.24,17.19,10.05,1.55,35.1,,35.34,34.93,34.6917,267,0,0,0,0,0
7115:458:3,4413,6028,5527,238.07,26.16,0.35,27.89,24.23,3.66,1.43,34.62,,34.96,34.44,34.6986,203,0,0,0,0,0
3115:468:1,2646,3449,3112,197.07,27.13,0.37,29.63,24.39,5.24,1.88,35.07,,35.28,34.82,34.698,253,0,0,0,0,0
1810:235:3,2370,3650,3427,342.62,-1.79,0,-1.78,-1.79,0.01,-0.71,30.63,,32.43,28.69,34.939,0,0.93,0.98,0.89,0.88,0.97
3815:351:3,0,0,0,,,,,,,,,,,,,0,,,,,
7512:458:3,0,0,0,,,,,,,,,,,,,0,,,,,
5605:218:1,570,5146,2644,1252.42,-0.53,0.3,2.35,-1.79,4.14,0.43,33.88,,34.04,33.61,34.6985,220,0.05,0.00333,0.13,0.08,0
1603:133:1,0,0,0,,,,,,,,,,,,,0,,,,,
1110:465:2,0,0,0,,,,,,,,,,,,,0,,,,,
5005:238:4,0,0,0,,,,,,,,,,,,,0,,,,,
3711:114:3,0,0,0,,,,,,,,,,,,,0,,,,,
1600:104:1,271,304,288,6.96,9.48,0.39,17.71,5.12,12.59,6.97,33.4,,34.06,32.13,35.1901,862,0.01,0.02,0,0,0.03
7111:468:1,3034,4116,3825,202.95,25.98,0.31,28.57,23.68,4.89,1.5,34.17,,34.44,33.81,34.6885,332,0,0,0,0,0
5313:134:4,4413,4800,4604,87.77,18.5,0.39,23.68,14.94,8.74,1.3,34.99,,35.26,34.76,34.7075,179,0,0,0,0,0
7810:363:1,1596,2076,1832,188.49,-1.79,0,-1.78,-1.79,0.01,-0.4,30.94,,31.19,30.55,34.9299,0,0.97,0.99,0.96,0.95,0.99
1717:352:1,97,180,138,22.07,-1.79,0,-1.76,-1.79,0.03,-1.04,29.35,,30.13,28.53,33.4049,0,0.92,0.97,0.84,0.87,0.97
3404:382:3,3887,4494,4191,162.59,4.78,0.36,7.19,2.47,4.72,0.43,33.9,,34.12,33.77,34.651,322,8.25,0,0,0,0
7515:476:4,0,0,0,,,,,,,,,,,,,0,,,,,
1709:123:3,0,0,0,,,,,,,,,,,,,0,,,,,
5205:114:1,0,0,0,,,,,,,,,,,,,0,,,,,
7006:112:1,0,0,0,,,,,,,,,,,,,0,,,,,
3513:487:1,4294,4692,4475,90.74,1.95,0.28,4.89,-0.32,5.21,-0.05,33.82,,33.91,33.72,34.6773,206,1.3E-4,0,0,0,0
3102:370:2,0,0,0,,,,,,,,,,,,,0,,,,,
5613:247:1,4237,4904,4573,173.63,-0.9,0.42,2.01,-1.79,3.8,0.24,33.92,,34.07,33.63,34.7061,165,0.2,0,0.21,0.51,0.06
3517:352:2,5068,5548,5372,62.83,6.65,0.34,9.58,4.95,4.63,0.96,34.14,,34.39,33.95,34.707,287,0,0,0,0,0
5201:103:2,2807,4255,3603,327.26,23.47,0.35,26.85,20.99,5.86,2.23,36.64,,36.79,36.46,34.8601,155,0,0,0,0,0
3710:457:2,0,0,0,,,,,,,,,,,,,0,,,,,
7406:350:4,0,131,21,29.77,6.73,0.33,18.93,-1.54,20.47,4.47,30.64,,31.26,29.89,30.8745,130,0.06,0.13,0,0.00487,0.13
1602:382:2,0,0,0,,,,,,,,,,,,,0,,,,,
3208:392:3,4032,4337,4168,62.46,20.3,0.31,24.85,16.45,8.4,1.36,35.78,,36.03,35.67,34.7223,166,0,0,0,0,0
1004:382:3,0,0,0,,,,,,,,,,,,,0,,,,,
1406:372:3,0,0,0,,,,,,,,,,,,,0,,,,,
3504:102:2,3980,4204,4075,52.37,3.06,0.28,5.55,1.2,4.35,0.2,33.95,,34.07,33.87,34.6437,348,2.667e-,0,0,0,0
1316:124:4,6183,6290,6262,15.09,21.37,0.5,27.11,16.29,10.82,,34.67,,34.85,34.54,,376,0,0,0,0,0
1603:487:3,0,121,68,30.23,3.98,0.6,10.49,-1.73,12.22,3.01,34.93,,35.2,34.75,34.6867,633,0.01,0.02,0.01,0.02,0.02
3717:497:2,1,1,1,0,-1.79,0,-1.78,-1.79,0.01,-1.45,34.31,,34.54,33.98,34.3521,661,1.5,1.5,1.5,1.5,1.5
3111:380:2,4117,5047,4532,218.35,26.05,0.43,29.54,23.3,6.24,1.18,34.79,,35.01,34.6,34.7135,255,0,0,0,0,0
7503:228:2,2978,3873,3569,175.58,8.44,0.66,14.13,4.87,9.26,2.64,34.71,,34.9,34.44,34.9337,629,0,0,0,0,0
3312:237:2,3,862,344,269.72,17.86,0.29,21.21,14.9,6.31,11.84,35.81,,36.04,35.56,35.1271,622,0,0,0,0,0
1604:100:4,0,0,0,,,,,,,,,,,,,0,,,,,
5805:207:1,1,1,1,0,-1.79,0,-1.78,-1.79,0.01,-1.3,34.64,,34.92,34.24,34.6741,0,1.5,1.5,1.5,1.5,1.5
3808:486:4,0,0,0,,,,,,,,,,,,,0,,,,,
1113:111:4,5468,6222,5905,126.52,28.77,0.26,30.48,26.93,3.55,,34.21,,34.43,34.01,,194,0,0,0,0,0
5504:459:4,3503,4373,3978,241.42,3.39,0.47,6.6,-0.19,6.79,0.32,33.99,,34.09,33.86,34.6805,311,6.833e-,0,0,0,0
3015:478:1,0,2301,880,817.09,29.26,0.34,30.5,27.3,3.2,4.84,34.63,,35.06,34.22,34.5298,392,0,0,0,0,0
7001:487:4,1974,4794,4251,503.36,27.47,0.34,29.33,24.24,5.09,2.3,35.12,,35.77,34.45,34.8818,548,0,0,0,0,0
1202:469:2,0,0,0,,,,,,,,,,,,,0,,,,,
3809:143:4,0,0,0,,,,,,,,,,,,,0,,,,,
1303:123:3,1226,1584,1385,89.6,21.98,0.31,28.4,16.09,12.31,13.63,39.09,,39.29,38.84,38.7163,317,0,0,0,0,0
5406:102:2,0,0,0,,,,,,,,,,,,,0,,,,,
5116:228:4,5100,5258,5181,27.52,28.82,0.29,30.09,27.09,3,1.09,35.3,,35.76,34.9,34.7102,207,0,0,0,0,0
7305:380:2,5152,5443,5393,34.25,21.41,0.25,27.56,17.45,10.11,2.27,36.15,,36.31,35.84,34.8778,374,0,0,0,0,0
3709:226:4,0,0,0,,,,,,,,,,,,,0,,,,,
1808:133:3,3273,3484,3392,55.93,-1.79,0,-1.78,-1.79,0.01,-0.7,30.72,,32.6,27.39,34.9412,0,0.93,0.98,0.89,0.88,0.96
7807:248:3,1171,1276,1199,25.78,-1.79,0,-1.78,-1.79,0.01,-0.19,30.52,,31.41,29.24,34.9273,0,0.98,0.99,0.96,0.97,0.99
7710:225:3,0,44,11,13.03,-1.79,0.02,-1.29,-1.79,0.5,-0.84,30.95,,37.68,27.2,31.0727,49,0.82,0.91,0.69,0.79,0.89
7312:371:3,0,0,0,,,,,,,,,,,,,0,,,,,
5212:122:1,3115,3815,3527,187.11,25.03,0.25,27.63,22.24,5.39,1.7,36.44,,36.68,36.14,34.6819,127,0,0,0,0,0
5410:465:2,3154,3727,3575,83.87,9.48,0.32,13.08,6.87,6.21,1.35,34.12,,34.22,34.02,34.7125,245,0,0,0,0,0
3610:372:3,0,0,0,,,,,,,,,,,,,0,,,,,
3010:103:2,0,64,10,9.18,29.1,0.35,31.21,27.1,4.11,29.11,29.87,,31.57,27.72,30.2693,183,0,0,0,0,0
3204:144:4,0,0,0,,,,,,,,,,,,,0,,,,,
3109:487:3,5178,5741,5503,75.15,24.94,0.38,27.5,22.65,4.85,1.17,34.71,,34.99,34.53,34.7109,218,0,0,0,0,0
5416:218:3,4236,4744,4580,91.81,14.1,0.54,19.23,10.43,8.8,0.89,34.84,,34.91,34.66,34.706,371,0,0,0,0,0
3217:218:1,3375,4010,3710,141.95,25.18,0.39,28.43,21.84,6.59,1.85,35.39,,35.59,35.19,34.6822,301,0,0,0,0,0
5706:110:2,0,37,19,9.76,-1.79,0,-1.78,-1.79,0.01,-1.62,34.16,,34.61,33.2,34.2117,210,0.88,0.89,0.89,0.87,0.88
7708:110:3,0,0,0,,,,,,,,,,,,,0,,,,,
5107:225:2,0,0,0,,,,,,,,,,,,,0,,,,,
7015:140:1,4730,5183,5003,78.85,27.81,0.64,29.86,24.94,4.92,1.43,34.81,,34.88,34.71,34.695,357,0,0,0,0,0
3009:477:1,4152,5179,4685,222.74,28.49,0.38,30.45,25.89,4.56,1.17,34.09,,34.38,33.62,34.7137,274,0,0,0,0,0
3414:478:4,1057,1673,1340,169.31,10.41,0.43,13.23,8.46,4.77,3.29,34.61,,34.8,34.36,34.429,393,0,0,0,0,0
7200:142:3,0,0,0,,,,,,,,,,,,,0,,,,,
5314:364:1,4516,5384,4890,166.04,16.27,0.32,21.4,13,8.4,1.2,34.74,,34.87,34.57,34.7034,246,0,0,0,0,0
7306:390:1,4322,5333,5132,137.96,21.54,0.37,27.23,16.05,11.18,2.28,35.67,,36.09,35.35,34.8922,495,0,0,0,0,0
5700:390:3,0,0,0,,,,,,,,,,,,,0,,,,,
3106:485:3,2042,3309,2699,250.96,25.62,0.28,28.86,22.75,6.11,1.86,34.85,,35.16,34.67,34.7201,203,0,0,0,0,0
5707:371:4,1,1,1,0,-1.79,0,-1.78,-1.79,0.01,-1.7,33.9,,34.25,33.5,33.4427,0,1.5,1.5,1.5,1.5,1.5
7602:123:2,1285,1531,1359,56.7,8.32,0.32,12.4,5.81,6.59,3.78,35.25,,35.33,35.14,34.9939,441,0,0,0,0,0
3712:390:1,0,0,0,,,,,,,,,,,,,0,,,,,
1412:372:4,0,0,0,,,,,,,,,,,,,0,,,,,
5700:217:2,0,103,43,29.45,-1.78,0.01,-1.55,-1.79,0.24,-1.72,34.11,,34.56,33.45,34.2757,187,1.5,1.5,1.5,1.5,1.5
1616:102:1,0,69,55,17.46,2.82,0.37,12.45,-1.79,14.24,0.41,32.36,,33,31.51,32.7001,908,0.34,0.53,0.03,0.08,0.7
7501:361:2,2571,2809,2668,54.47,11.33,0.26,15.48,9.1,6.38,3.08,35.34,,35.42,35.27,34.9611,501,0,0,0,0,0
5413:495:4,4357,4747,4546,92.09,8.74,0.44,12.57,6.41,6.16,1.28,34.33,,34.51,34.19,34.7173,281,0,0,0,0,0
1007:485:1,1563,2842,2458,320.42,28.53,0.29,31.05,26.28,4.77,2.07,34.54,,35.09,33.44,34.7277,907,0,0,0,0,0
7114:229:2,5187,5440,5317,54.71,26.63,0.34,28.64,24.51,4.13,1.47,34.36,,34.68,34.04,34.6959,248,0,0,0,0,0
7414:101:3,4582,4876,4718,53.09,14.79,0.75,21.34,9.31,12.03,1.54,33.32,,33.43,33.06,34.6921,430,0,0,0,0,0

44
log.txt Normal file
View File

@ -0,0 +1,44 @@
0 [main] INFO org.gcube.common.scope.impl.ScopeProviderScanner - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@685e95ae
633 [main] INFO org.gcube.common.scan.DefaultScanner - matched 13 resources from 62 urls in 313 ms
653 [main] INFO org.gcube.common.scope.impl.ServiceMapScanner - loading jar:file:/home/loredana/.m2/repository/org/gcube/core/common-scope-maps/1.0.2-SNAPSHOT/common-scope-maps-1.0.2-SNAPSHOT.jar!/d4science.servicemap
682 [main] INFO org.gcube.common.scope.impl.ServiceMapScanner - loading jar:file:/home/loredana/.m2/repository/org/gcube/core/common-scope-maps/1.0.2-SNAPSHOT/common-scope-maps-1.0.2-SNAPSHOT.jar!/vo1.servicemap
683 [main] INFO org.gcube.common.scope.impl.ServiceMapScanner - loading jar:file:/home/loredana/.m2/repository/org/gcube/core/common-scope-maps/1.0.2-SNAPSHOT/common-scope-maps-1.0.2-SNAPSHOT.jar!/openbio.servicemap
685 [main] INFO org.gcube.common.scope.impl.ServiceMapScanner - loading jar:file:/home/loredana/.m2/repository/org/gcube/core/common-scope-maps/1.0.2-SNAPSHOT/common-scope-maps-1.0.2-SNAPSHOT.jar!/cnr.servicemap
687 [main] INFO org.gcube.common.scope.impl.ServiceMapScanner - loading jar:file:/home/loredana/.m2/repository/org/gcube/core/common-scope-maps/1.0.2-SNAPSHOT/common-scope-maps-1.0.2-SNAPSHOT.jar!/testing.servicemap
689 [main] INFO org.gcube.common.scope.impl.ServiceMapScanner - loading jar:file:/home/loredana/.m2/repository/org/gcube/core/common-scope-maps/1.0.2-SNAPSHOT/common-scope-maps-1.0.2-SNAPSHOT.jar!/gcube.servicemap
690 [main] INFO org.gcube.common.scope.impl.ServiceMapScanner - loading jar:file:/home/loredana/.m2/repository/org/gcube/core/common-scope-maps/1.0.2-SNAPSHOT/common-scope-maps-1.0.2-SNAPSHOT.jar!/farm.servicemap
692 [main] INFO org.gcube.common.scope.impl.ServiceMapScanner - loading jar:file:/home/loredana/.m2/repository/org/gcube/core/common-scope-maps/1.0.2-SNAPSHOT/common-scope-maps-1.0.2-SNAPSHOT.jar!/isti.servicemap
694 [main] INFO org.gcube.common.scope.impl.ServiceMapScanner - loading jar:file:/home/loredana/.m2/repository/org/gcube/core/common-scope-maps/1.0.2-SNAPSHOT/common-scope-maps-1.0.2-SNAPSHOT.jar!/gcubeapps.servicemap
696 [main] INFO org.gcube.common.scope.impl.ServiceMapScanner - loading jar:file:/home/loredana/.m2/repository/org/gcube/core/common-scope-maps/1.0.2-SNAPSHOT/common-scope-maps-1.0.2-SNAPSHOT.jar!/ecosystem.servicemap
697 [main] INFO org.gcube.common.scope.impl.ServiceMapScanner - loading jar:file:/home/loredana/.m2/repository/org/gcube/core/common-scope-maps/1.0.2-SNAPSHOT/common-scope-maps-1.0.2-SNAPSHOT.jar!/securevo.servicemap
699 [main] INFO org.gcube.common.scope.impl.ServiceMapScanner - loading jar:file:/home/loredana/.m2/repository/org/gcube/core/common-scope-maps/1.0.2-SNAPSHOT/common-scope-maps-1.0.2-SNAPSHOT.jar!/devsec.servicemap
701 [main] INFO org.gcube.common.scope.impl.ServiceMapScanner - loading jar:file:/home/loredana/.m2/repository/org/gcube/core/common-scope-maps/1.0.2-SNAPSHOT/common-scope-maps-1.0.2-SNAPSHOT.jar!/devnext.servicemap
879 [main] INFO org.gcube.common.clients.stubs.jaxws.StubFactory - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl
1420 [main] INFO org.gcube.resources.discovery.icclient.ICClient - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database') return $resource
1497 [main] INFO org.gcube.common.clients.stubs.jaxws.handlers.HandlerRegistry - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@1c7c0f04
1498 [main] INFO org.gcube.common.clients.stubs.jaxws.handlers.HandlerRegistry - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@16bc6a12
1499 [main] INFO org.gcube.common.clients.stubs.jaxws.handlers.HandlerRegistry - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@2f1c7c32
1500 [main] INFO org.gcube.common.clients.stubs.jaxws.handlers.HandlerRegistry - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@281de7b2
1624 [main] INFO org.gcube.resources.discovery.icclient.ICClient - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database') return $resource in 204 ms
0 [main] INFO org.gcube.common.scope.impl.ScopeProviderScanner - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@3c22d5b5
508 [main] INFO org.gcube.common.scan.DefaultScanner - matched 13 resources from 62 urls in 314 ms
529 [main] INFO org.gcube.common.scope.impl.ServiceMapScanner - loading jar:file:/home/loredana/.m2/repository/org/gcube/core/common-scope-maps/1.0.2-SNAPSHOT/common-scope-maps-1.0.2-SNAPSHOT.jar!/d4science.servicemap
557 [main] INFO org.gcube.common.scope.impl.ServiceMapScanner - loading jar:file:/home/loredana/.m2/repository/org/gcube/core/common-scope-maps/1.0.2-SNAPSHOT/common-scope-maps-1.0.2-SNAPSHOT.jar!/vo1.servicemap
559 [main] INFO org.gcube.common.scope.impl.ServiceMapScanner - loading jar:file:/home/loredana/.m2/repository/org/gcube/core/common-scope-maps/1.0.2-SNAPSHOT/common-scope-maps-1.0.2-SNAPSHOT.jar!/openbio.servicemap
560 [main] INFO org.gcube.common.scope.impl.ServiceMapScanner - loading jar:file:/home/loredana/.m2/repository/org/gcube/core/common-scope-maps/1.0.2-SNAPSHOT/common-scope-maps-1.0.2-SNAPSHOT.jar!/cnr.servicemap
562 [main] INFO org.gcube.common.scope.impl.ServiceMapScanner - loading jar:file:/home/loredana/.m2/repository/org/gcube/core/common-scope-maps/1.0.2-SNAPSHOT/common-scope-maps-1.0.2-SNAPSHOT.jar!/testing.servicemap
564 [main] INFO org.gcube.common.scope.impl.ServiceMapScanner - loading jar:file:/home/loredana/.m2/repository/org/gcube/core/common-scope-maps/1.0.2-SNAPSHOT/common-scope-maps-1.0.2-SNAPSHOT.jar!/gcube.servicemap
566 [main] INFO org.gcube.common.scope.impl.ServiceMapScanner - loading jar:file:/home/loredana/.m2/repository/org/gcube/core/common-scope-maps/1.0.2-SNAPSHOT/common-scope-maps-1.0.2-SNAPSHOT.jar!/farm.servicemap
568 [main] INFO org.gcube.common.scope.impl.ServiceMapScanner - loading jar:file:/home/loredana/.m2/repository/org/gcube/core/common-scope-maps/1.0.2-SNAPSHOT/common-scope-maps-1.0.2-SNAPSHOT.jar!/isti.servicemap
570 [main] INFO org.gcube.common.scope.impl.ServiceMapScanner - loading jar:file:/home/loredana/.m2/repository/org/gcube/core/common-scope-maps/1.0.2-SNAPSHOT/common-scope-maps-1.0.2-SNAPSHOT.jar!/gcubeapps.servicemap
572 [main] INFO org.gcube.common.scope.impl.ServiceMapScanner - loading jar:file:/home/loredana/.m2/repository/org/gcube/core/common-scope-maps/1.0.2-SNAPSHOT/common-scope-maps-1.0.2-SNAPSHOT.jar!/ecosystem.servicemap
574 [main] INFO org.gcube.common.scope.impl.ServiceMapScanner - loading jar:file:/home/loredana/.m2/repository/org/gcube/core/common-scope-maps/1.0.2-SNAPSHOT/common-scope-maps-1.0.2-SNAPSHOT.jar!/securevo.servicemap
576 [main] INFO org.gcube.common.scope.impl.ServiceMapScanner - loading jar:file:/home/loredana/.m2/repository/org/gcube/core/common-scope-maps/1.0.2-SNAPSHOT/common-scope-maps-1.0.2-SNAPSHOT.jar!/devsec.servicemap
578 [main] INFO org.gcube.common.scope.impl.ServiceMapScanner - loading jar:file:/home/loredana/.m2/repository/org/gcube/core/common-scope-maps/1.0.2-SNAPSHOT/common-scope-maps-1.0.2-SNAPSHOT.jar!/devnext.servicemap
717 [main] INFO org.gcube.common.clients.stubs.jaxws.StubFactory - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl
892 [main] INFO org.gcube.resources.discovery.icclient.ICClient - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database') return $resource
926 [main] INFO org.gcube.common.clients.stubs.jaxws.handlers.HandlerRegistry - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@3d48392b
927 [main] INFO org.gcube.common.clients.stubs.jaxws.handlers.HandlerRegistry - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@657189ad
928 [main] INFO org.gcube.common.clients.stubs.jaxws.handlers.HandlerRegistry - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@45f2a7e9
929 [main] INFO org.gcube.common.clients.stubs.jaxws.handlers.HandlerRegistry - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@5eb85400
1022 [main] INFO org.gcube.resources.discovery.icclient.ICClient - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database') return $resource in 130 ms

147
pom.xml Normal file
View File

@ -0,0 +1,147 @@
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>org.gcube.dataanalysis</groupId>
<version>1.0.0-SNAPSHOT</version>
<parent>
<groupId>org.gcube.tools</groupId>
<artifactId>maven-parent</artifactId>
<version>1.0.0</version>
</parent>
<dependencies>
<dependency>
<groupId>org.gcube.contentmanagement</groupId>
<artifactId>storage-manager-core</artifactId>
<version>[1.0.0-SNAPSHOT, 3.0.0-SNAPSHOT)</version>
<!-- <scope>provided</scope> -->
</dependency>
<dependency>
<groupId>org.gcube.core</groupId>
<artifactId>common-scope-maps</artifactId>
<version>[1.0.0-SNAPSHOT, 3.0.0-SNAPSHOT)</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.gcube.resources.discovery</groupId>
<artifactId>ic-client</artifactId>
<version>[1.0.0-SNAPSHOT, 3.0.0-SNAPSHOT)</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.gcube.contentmanagement</groupId>
<artifactId>storage-manager-wrapper</artifactId>
<version>[1.0.0-SNAPSHOT, 3.0.0-SNAPSHOT)</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.11</version>
</dependency>
<dependency>
<groupId>org.gcube.core</groupId>
<artifactId>common-scope</artifactId>
<version>[1.0.0-SNAPSHOT, 3.0.0-SNAPSHOT)</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.gcube.resources</groupId>
<artifactId>common-gcore-resources</artifactId>
<version>[1.0.0-SNAPSHOT,2.0.0-SNAPSHOT)</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.gcube.core</groupId>
<artifactId>common-encryption</artifactId>
<version>[1.0.1-SNAPSHOT,3.0.0-SNAPSHOT)</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.gcube.dataanalysis</groupId>
<artifactId>ecological-engine</artifactId>
<version>[1.7.2-SNAPSHOT,1.8.0-SNAPSHOT)</version>
<exclusions>
<exclusion>
<artifactId>postgresql</artifactId>
<groupId>postgresql</groupId>
</exclusion>
</exclusions>
</dependency>
<!-- <dependency> -->
<!-- <groupId>org.gcube.dataanalysis</groupId> -->
<!-- <artifactId>ecological-engine</artifactId> -->
<!-- <version>1.7.3-SNAPSHOT</version> -->
<!-- </dependency> -->
<dependency>
<groupId>com.mysql</groupId>
<artifactId>mysql-connector</artifactId>
<version>5.1.7</version>
</dependency>
<dependency>
<groupId>postgresql</groupId>
<artifactId>postgresql</artifactId>
<version>9.1-901-1.jdbc4</version>
</dependency>
<dependency>
<groupId>org.gcube.resources.discovery</groupId>
<artifactId>discovery-client</artifactId>
<version>[1.0.0-SNAPSHOT,2.0.0-SNAPSHOT)</version>
</dependency>
<dependency>
<groupId>rapidminer-custom</groupId>
<artifactId>swissql-api</artifactId>
<version>1.0.0</version>
</dependency>
<!--<dependency> <groupId>javax.persistence</groupId> <artifactId>persistence-api</artifactId>
<version>1.0.2</version> </dependency> -->
<!-- <dependency> -->
<!-- <groupId>org.slf4j</groupId> -->
<!-- <artifactId>slf4j-log4j12</artifactId> -->
<!-- <version>1.6.4</version> -->
<!-- </dependency> -->
</dependencies>
<repositories>
<repository>
<id>dnet-deps</id>
<name>dnet-deps</name>
<url>http://maven.research-infrastructures.eu/nexus/content/repositories/dnet-deps//</url>
</repository>
</repositories>
<build>
<plugins>
<plugin>
<artifactId>maven-assembly-plugin</artifactId>
<configuration>
<descriptorRefs>
<descriptorRef>jar-with-dependencies</descriptorRef>
</descriptorRefs>
</configuration>
<executions>
<execution>
<phase>package</phase> <!-- bind to the packaging phase -->
<goals>
<goal>single</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins>
</build>
<artifactId>databases-resources-manager</artifactId>
</project>

View File

@ -0,0 +1,37 @@
package org.gcube.dataanalysis.databases.access;
import static org.gcube.resources.discovery.icclient.ICFactory.clientFor;
import static org.gcube.resources.discovery.icclient.ICFactory.queryFor;
import java.util.List;
import org.gcube.common.resources.gcore.ServiceEndpoint;
import org.gcube.dataanalysis.databases.resources.DBResource;
import org.gcube.resources.discovery.client.api.DiscoveryClient;
import org.gcube.resources.discovery.client.api.DiscoveryException;
import org.gcube.resources.discovery.client.api.InvalidResultException;
import org.gcube.resources.discovery.client.queries.impl.XQuery;
/** Class that allows to discover databases submitting a query to the IS.
* It retrieves a list of DBResource object */
public class DatabasesDiscoverer extends ResourceDiscoverer{
public DatabasesDiscoverer(){
}
/** Method that performs the discovery process of database resources */
public List<DBResource> discover() throws IllegalStateException, DiscoveryException, InvalidResultException{
XQuery query = queryFor(ServiceEndpoint.class);
query.addCondition("$resource/Profile/Category/text() eq 'Database'");
DiscoveryClient<DBResource> discovery = clientFor(DBResource.class);
List<DBResource> resources = discovery.submit(query);
return resources;
}
}

View File

@ -0,0 +1,13 @@
package org.gcube.dataanalysis.databases.access;
import java.util.List;
import org.gcube.dataanalysis.databases.resources.DBResource;
/** Class that allows to discover generic resources submitting a query to the IS.
* It retrieves a list of DBResource objects. */
public abstract class ResourceDiscoverer {
public abstract List<DBResource> discover();
}

View File

@ -0,0 +1,63 @@
package org.gcube.dataanalysis.databases.accessold;
import static org.gcube.resources.discovery.icclient.ICFactory.clientFor;
import static org.gcube.resources.discovery.icclient.ICFactory.queryFor;
import java.util.ArrayList;
import java.util.List;
import org.gcube.common.resources.gcore.ServiceEndpoint;
import org.gcube.common.scope.api.ScopeProvider;
import org.gcube.dataanalysis.databases.resources.DBResource;
import org.gcube.resources.discovery.client.api.DiscoveryClient;
import org.gcube.resources.discovery.client.queries.impl.XQuery;
/**
* Class that, allowing to set a scope and to submit a query to the IS, recover
* a list of url for each DBResource object
*/
public class AddressesDiscoverer {
/** Method to set the scope */
public void setScope(String scope) {
ScopeProvider.instance.set(scope);
}
/** Method to recover the url's list */
public List<String> retrieveAddress(String Category) {
List<String> addresses = new ArrayList<String>();
XQuery query = queryFor(ServiceEndpoint.class);
query.addCondition("$resource/Profile/Category/text() eq '" + Category
+ "'");
DiscoveryClient<DBResource> submitop = clientFor(DBResource.class);
List<DBResource> access = submitop.submit(query);
// System.out.println("size resource: "+access.size());
int APsize = 0;
String address = "";
for (int i = 0; i < access.size(); i++) {
APsize = access.get(i).getAccessPoints().size();
for (int j = 0; j < APsize; j++) {
address = access.get(i).getAccessPoints().get(j).address();
addresses.add(address);
}
}
return addresses;
}
}

View File

@ -0,0 +1,38 @@
package org.gcube.dataanalysis.databases.accessold;
import static org.gcube.resources.discovery.icclient.ICFactory.clientFor;
import static org.gcube.resources.discovery.icclient.ICFactory.queryFor;
import java.util.List;
import org.gcube.common.resources.gcore.ServiceEndpoint;
import org.gcube.common.scope.api.ScopeProvider;
import org.gcube.dataanalysis.databases.resources.DBResource;
import org.gcube.resources.discovery.client.api.DiscoveryClient;
import org.gcube.resources.discovery.client.queries.impl.XQuery;
/** Class that allows to discover databases submitting a query to the IS and given a certain scope.
* It retrieves a list of DBResource object */
public class DatabasesDiscoverer extends ResourceDiscoverer{
public DatabasesDiscoverer(){
}
/** Method that performs the discovery process of database resources */
public List<DBResource> discovery(String scope) {
ScopeProvider.instance.set(scope);
XQuery query = queryFor(ServiceEndpoint.class);
query.addCondition("$resource/Profile/Category/text() eq 'Database'");
DiscoveryClient<DBResource> discovery = clientFor(DBResource.class);
List<DBResource> resources = discovery.submit(query);
return resources;
}
}

View File

@ -0,0 +1,15 @@
package org.gcube.dataanalysis.databases.accessold;
import java.util.List;
import org.gcube.dataanalysis.databases.resources.DBResource;
/** Class that allows to discover generic resources submitting a query to the IS and given a certain scope.
* It retrieves a list of DBResource objects. */
public abstract class ResourceDiscoverer {
public abstract List<DBResource> discovery(String scope);
}

View File

@ -0,0 +1,36 @@
package org.gcube.dataanalysis.databases.converter;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import com.adventnet.swissqlapi.SwisSQLAPI;
import com.adventnet.swissqlapi.sql.exception.ConvertException;
import com.adventnet.swissqlapi.sql.parser.ParseException;
/**
* Class that allows to convert a query specifying a sql dialect by means of a SwisSQL API
*/
public class SqlDialectConverter {
private SwisSQLAPI obj;
// Constructor
public SqlDialectConverter(String query) {
obj = new SwisSQLAPI(query);
}
public String convert(int dialect) throws ParseException, ConvertException {
String queryConverted = null;
queryConverted = obj.convert(dialect);
AnalysisLogger.getLogger().debug(
"In SqlDialectConverter-> query converted: " + queryConverted);
return queryConverted;
}
}

View File

@ -0,0 +1,203 @@
package org.gcube.dataanalysis.databases.lexer;
import java.util.ArrayList;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
/**
* Class that allows to filter queries no read-only compliant by means of a
* lexical analysis
*/
public class LexicalAnalyzer {
private ArrayList<String> BlackList = new ArrayList<String>(); // Keywords'
// blacklist
// Method that performs the lexical analysis
public boolean analyze(String query) throws Exception {
boolean NotAllowed = false;
// building the keywords' blacklist
// the following keywords are common for MySQL and PostgreSQL databases.
BlackList.add("INSERT");
BlackList.add("DELETE ");
BlackList.add("UPDATE ");
BlackList.add("CREATE");
BlackList.add("ALTER");
BlackList.add("DROP");
BlackList.add("GRANT");
BlackList.add("REVOKE");
BlackList.add("TRUNCATE");
BlackList.add("DO");
BlackList.add("START TRANSACTION");
BlackList.add("COMMIT");
BlackList.add("BEGIN");
BlackList.add("ROLLBACK");
BlackList.add("SET");
BlackList.add("SAVEPOINT");
BlackList.add("RELEASE SAVEPOINT");
BlackList.add("LOCK");
BlackList.add("DECLARE");
BlackList.add("PREPARE");
BlackList.add("FETCH");
// BlackList.add("EXPLAIN");
BlackList.add("ANALYZE");
BlackList.add("EXECUTE");
BlackList.add("SHOW");
BlackList.add("RESET");
String queryParsed = null;
int NumOfBlackList = BlackList.size();
AnalysisLogger.getLogger().debug(
"LexicalAnalyzer->blacklist size: " + NumOfBlackList);
// System.out.println("size blacklist: " + NumOfBlackList);
// parse the query using the regular expressions
queryParsed = LexicalAnalyzer.parseRegularExpressions(query);
// check if the query contains a word defined in the blacklist
NotAllowed = check(queryParsed);
AnalysisLogger.getLogger().debug(
"LexicalAnalyzer->query not allowed: " + NotAllowed);
if (NotAllowed == true) {
throw new Exception("Only read-only queries are allowed");
}
return NotAllowed;
}
// Method that checks if the query contains a word defined in the blacklist
public boolean check(String query) {
boolean NotAllowed = false;
check_tokens: for (int j = 0; j < BlackList.size(); j++) {
// System.out.println("BlackList word: " + BlackList.get(j));
// keyword check with regex regular expression
String Keyword = BlackList.get(j);
String regex = ".*\\b" + Keyword.replaceAll(" +", "[ ]\\+")
+ "\\b.*";
if (query.toUpperCase().matches(regex)) {
NotAllowed = true;
break check_tokens;
}
// if (Keyword.contains(" ")) {
//
// String[] arrayKeyword = Keyword.split(" ");
//
// int i;
// // boolean notContained = false;
//
// String Regex = "";
// String regexKeyword = regexKeyword = ".*\\b" + arrayKeyword[0]
// + "\\s*";
// Regex = regexKeyword;
//
// for (i = 1; i < arrayKeyword.length; i++) {
//
// if (i == arrayKeyword.length - 1) {
//
// Regex = Regex + arrayKeyword[i] + "\\b.*";
//
// } else {
//
// Regex = Regex + arrayKeyword[i] + "\\s*";
//
// }
//
// }
//
// if (query.toUpperCase().matches(Regex)) {
//
// // notContained = true;
//
// AnalysisLogger.getLogger().debug(
// "LexicalAnalyzer-> : the query contains the word in the blacklist "
// + BlackList.get(j));
//
// NotAllowed = true;
//
// break check_tokens;
//
// }
//
// } else {
//
// String regexKeyword = ".*\\b" + BlackList.get(j) + "\\b.*";
//
// if (query.toUpperCase().matches(regexKeyword)) {
//
// AnalysisLogger.getLogger().debug(
// "LexicalAnalyzer-> : the query contains the word in the blacklist "
// + BlackList.get(j));
//
// NotAllowed = true;
//
// break check_tokens;
//
// }
//
// }
}
return NotAllowed;
}
public ArrayList<String> getBlackList() {
return BlackList;
}
private static String parseRegularExpressions(String phrase) {
// replacement of the punctuation characters
// String todelete = "[\\]\\[!#$%&()*+,./:;<=>?@\\^_{|}~-]";
String todelete = "[\\]\\[!#$%&()*+,./:;<=>?@\\^{|}~-]";
phrase = phrase.replaceAll(todelete, " ");
phrase = phrase.replaceAll("[ ]+", " ");
AnalysisLogger.getLogger().debug(
"LexicalAnalyzer-> : replacing query " + phrase);
// elimination by means of a replacement of the word enclosed in '',
// "",``
String apex = "'.*'";
phrase = phrase.replaceAll(apex, "");
String apex2 = "\".*\"";
phrase = phrase.replaceAll(apex2, "");
String apex3 = "`.*`";
phrase = phrase.replaceAll(apex3, "");
AnalysisLogger.getLogger().debug(
"LexicalAnalyzer-> : parsed string " + phrase);
return phrase;
}
}

View File

@ -0,0 +1,56 @@
package org.gcube.dataanalysis.databases.lexer;
import java.util.ArrayList;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
/**
* Class that allows to filter queries no read-only compliant considering the
* MySQL database and by means of a lexical analysis
*/
public class MySQLLexicalAnalyzer extends LexicalAnalyzer {
public MySQLLexicalAnalyzer() {
ArrayList<String> BlackList = new ArrayList<String>(); // Keywords' blacklist
BlackList = super.getBlackList();
updateBlackList(BlackList);
}
// update the keywords' blacklist
private void updateBlackList(ArrayList<String> BlackList) {
// Mysql's keywords
BlackList.add("RENAME");
BlackList.add("REPLACE");
BlackList.add("LOAD DATA INFILE");
BlackList.add("CALL");
BlackList.add("HANDLER");
BlackList.add("UNLOCK");
BlackList.add("DEALLOCATE PREPARE");
BlackList.add("OPEN");
BlackList.add("CLOSE");
BlackList.add("BACKUP");
BlackList.add("CHECK");
BlackList.add("CHECKSUM");
BlackList.add("OPTIMIZE");
BlackList.add("REPAIR");
BlackList.add("RESTORE");
BlackList.add("CACHE");
BlackList.add("FLUSH");
BlackList.add("KILL");
BlackList.add("LOAD INDEX INTO CACHE");
BlackList.add("PURGE BINARY LOGS");
// BlackList.add("RESET");
AnalysisLogger.getLogger().debug(
"MySQLLexicalAnalyzer->: blacklist updated");
}
}

View File

@ -0,0 +1,67 @@
package org.gcube.dataanalysis.databases.lexer;
import java.util.ArrayList;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
/**
* Class that allows to filter queries no read-only compliant considering the
* Postgres database and by means of a lexical analysis
*/
public class PostgresLexicalAnalyzer extends LexicalAnalyzer {
// private ArrayList<String> BlackList = new ArrayList<String>(); // Keywords'
// blacklist
public PostgresLexicalAnalyzer() {
ArrayList<String> BlackList = new ArrayList<String>(); // Keywords' blacklist
BlackList = super.getBlackList();
updateBlackList(BlackList);
}
// update the keywords' blacklist
private void updateBlackList(ArrayList<String> BlackList) {
// Keywords'
// blacklist
BlackList.add("COPY");
BlackList.add("COMMENT");
BlackList.add("SELECT INTO");
BlackList.add("UNLISTEN");
BlackList.add("VACUUM");
BlackList.add("VALUES");
BlackList.add("SECURITY LABEL");
BlackList.add("REASSIGN OWNED");
BlackList.add("ABORT");
BlackList.add("CHECKPOINT");
BlackList.add("CLOSE");
BlackList.add("CLUSTER");
BlackList.add("DEALLOCATE");
BlackList.add("DISCARD");
BlackList.add("END");
BlackList.add("LISTEN");
BlackList.add("LOAD");
BlackList.add("MOVE");
BlackList.add("NOTIFY");
BlackList.add("REFRESH MATERIALIZED VIEW");
BlackList.add("REINDEX");
// BlackList.add("RESET");
// BlackList.add("SET ROLE");
// BlackList.add("SET SESSION AUTHORIZATION");
// BlackList.add("SET TRANSACTION");
// BlackList.add("SET CONSTRAINTS");
AnalysisLogger.getLogger().debug(
"PostgresLexicalAnalyzer->: blacklist updated");
}
}

View File

@ -0,0 +1,803 @@
package org.gcube.dataanalysis.databases.resources;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import javax.xml.bind.annotation.XmlAttribute;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlElementRef;
import javax.xml.bind.annotation.XmlElementWrapper;
import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.XmlValue;
import org.gcube.common.encryption.StringEncrypter;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.databases.resources.processing.Normalizer;
/** Class that describes a resource database considering information specified from the user in a xml file.
* Information are retrieved from the xml file Through the JAXB and the relative object is initialized */
//Database Resource Description class
@XmlRootElement(name = "Resource")
public class DBResource {
// Variables
private String ResourceName;
private String PlatformName;
private String PlatformVersion;
private String HostedOn;
private String Port = null;
private String dbguessed = null;
@XmlElement(name = "ID")
private String id;
@XmlElementRef
private Profile profile;
// Methods
public String getID() {
if (id == null)
id = "";
return id;
}
public String getPort() {
if (Port == null) {
Port = "";
}
return Port;
}
public void setPort(String value) {
Port = value;
}
public String getResourceName() {
ResourceName = this.profile.getname().trim();
return ResourceName;
}
public String getHostedOn() {
HostedOn = this.profile.getHostedOn();
return HostedOn;
}
public String getPlatformName() {
PlatformName = this.profile.getPlatformName();
if (PlatformName.contains(" ")) {
PlatformName = PlatformName.trim();
}
return PlatformName;
}
public void setPlatformName(String name) {
this.profile.platform.name = name;
}
public String getPlatformVersion() {
PlatformVersion = this.profile.getPlatformVersion();
return PlatformVersion;
}
public void setHostedOn(String value) {
HostedOn = value;
}
public String getDBguessed() {
return dbguessed;
}
public void setDBguessed(String name) {
dbguessed = name;
}
public void normalize(int index) throws IOException{
try {
Normalizer.normalize(this, index);
} catch (IOException e) {
// e.printStackTrace();
throw e;
}
}
public List<AccessPoint> getAccessPoints() {
List<AccessPoint> ap = this.profile.accessPoints();
return ap;
}
// Class Profile
@XmlRootElement(name = "Profile")
static class Profile {
@XmlElement(name = "Name")
private String name;
@XmlElementRef
private Platform platform;
@XmlElementRef
private Runtime runtime;
@XmlElementRef
private List<AccessPoint> accessPoints = new ArrayList<AccessPoint>();
public String getname() {
if (name == null)
name = "";
return name;
}
public List<AccessPoint> accessPoints() {
return accessPoints;
}
public String getHostedOn() {
return this.runtime.getHostedOn().trim();
}
public String getPlatformName() {
return this.platform.getName();
}
public String getPlatformVersion() {
return this.platform.getVersion();
}
}
// Class Runtime
@XmlRootElement(name = "RunTime")
public static class Runtime {
@XmlElement(name = "HostedOn")
private String hostedOn;
public String getHostedOn() {
if (hostedOn == null)
hostedOn = "";
return hostedOn;
}
}
// Class Platform
@XmlRootElement(name = "Platform")
public static class Platform {
private String version;
@XmlElement(name = "Name")
private String name;
@XmlElement(name = "Version")
private String Version;
@XmlElement(name = "MinorVersion")
private String minorVersion;
@XmlElement(name = "RevisionVersion")
private String revisionVersion;
public String getVersion() {
// Version's computation
if ((Version == null) || (Version.equals(""))) {
// Version="8";
// minorVersion="4";
// revisionVersion="0";
Version = "";
minorVersion = "";
revisionVersion = "";
version = Version + "." + minorVersion + "." + revisionVersion;
} else {
version = Version;
if ((minorVersion != null) && (!(minorVersion.equals("")))) {
version = version.concat(".").concat(minorVersion);
if ((revisionVersion != null)
&& (!(revisionVersion.equals("")))) {
version = version.concat(".").concat(revisionVersion);
}
}
}
return version;
}
public String getName() {
return name;
}
}
// Class AccessPoint
@XmlRootElement(name = "AccessPoint")
public static class AccessPoint {
private String endpoint;
// private String port;
private String username;
private String password;
private String DatabaseName = null;
private String Driver = null;
private String Dialect = null;
private String MaxConnections = null;
private String schema = null;
private String tableSpaceCount = null;
private String tableSpacePrefix = null;
/* it contains the variables aquamapsWorldTable,aquamapsDataStore */
private HashMap<String, String> auxiliaryProperties = new HashMap<String, String>();
@XmlElementRef
private Interface itfce = new Interface();
@XmlElementRef
private AccessData accessData = new AccessData();
@XmlElementWrapper(name = "Properties")
@XmlElementRef
private List<Property> properties = new ArrayList<Property>();
@XmlElement(name = "Description")
private String description;
public String name() {
return itfce.endpoint().name();
}
public String address() {
endpoint = itfce.endpoint().address().trim();
if (endpoint == null)
endpoint = "";
return endpoint;
}
public void setUrl(String value) {
itfce.endpoint.address = value;
endpoint = itfce.endpoint.address.trim();
}
public String getUsername() {
username = this.accessData.username();
return username;
}
public String getPassword() throws Exception{
String pwd = this.accessData.password();
try {
password = StringEncrypter.getEncrypter().decrypt(pwd);
} catch (Exception e) {
// e.printStackTrace();
throw e;
}
return password;
}
public String getDescription() {
if ((description == null) || (description.equals(""))) {
description = "jdbc connection url";
}
return description;
}
public String getDatabaseName() {
if (properties.size() == 0) {
Property p = new Property();
p.name = "dbname";
p.value = "";
properties.add(p);
return DatabaseName = "";
} else {
for (int i = 0; i < properties.size(); i++) {
if (((properties.get(i).name()).toLowerCase()
.contains("dbname"))
|| (properties.get(i).name()).toLowerCase()
.contains("databasename")
|| ((properties.get(i).name()).toLowerCase()
.contains("database"))) {
DatabaseName = properties.get(i).value();
}
}
if (DatabaseName == null) {
Property p = new Property();
p.name = "dbname";
p.value = "";
properties.add(p);
DatabaseName = "";
return DatabaseName;
}
}
return DatabaseName;
}
public void setDatabaseName(String value) {
DatabaseName=value;
for (int i = 0; i < properties.size(); i++) {
if (((properties.get(i).name()).toLowerCase()
.contains("dbname"))
|| (properties.get(i).name()).toLowerCase().contains(
"databasename")
|| ((properties.get(i).name()).toLowerCase()
.contains("database"))) {
DatabaseName = properties.get(i).setvalue(value);
AnalysisLogger.getLogger().debug(
"In class DBResource->setting the database's name to value : " + DatabaseName);
}
}
}
public String getDriver() {
if (properties.size() == 0) {
Property p = new Property();
p.name = "driver";
p.value = "";
properties.add(p);
return Driver = "";
} else {
for (int i = 0; i < properties.size(); i++) {
if ((properties.get(i).name()).toLowerCase().contains(
"driver")) {
Driver = properties.get(i).value();
}
}
if (Driver == null) {
Property p = new Property();
p.name = "driver";
p.value = "";
properties.add(p);
Driver = "";
return Driver;
}
}
return Driver;
}
public void SetDriver(String value) {
// Driver="org"+"."+value+"."+"Driver";
for (int i = 0; i < properties.size(); i++) {
if ((properties.get(i).name()).toLowerCase().contains("driver")) {
Driver = properties.get(i).setvalue(value);
AnalysisLogger.getLogger().debug(
"In class DBResource->setting the driver's name to value : " + Driver);
}
}
}
public String getDialect() {
if (properties.size() == 0) {
Property p = new Property();
p.name = "dialect";
p.value = "";
properties.add(p);
return Dialect = "";
} else {
for (int i = 0; i < properties.size(); i++) {
if ((properties.get(i).name()).toLowerCase().contains(
"dialect")) {
Dialect = properties.get(i).value();
}
}
if (Dialect == null) {
Property p = new Property();
p.name = "dialect";
p.value = "";
properties.add(p);
Dialect = "";
return Dialect;
}
}
return Dialect;
}
public void SetDialect(String value) {
// Driver="org"+"."+value+"."+"Driver";
for (int i = 0; i < properties.size(); i++) {
if ((properties.get(i).name()).toLowerCase()
.contains("dialect")) {
Dialect = properties.get(i).setvalue(value);
AnalysisLogger.getLogger().debug(
"In class DBResource->Setting the dialect: "
+ Dialect);
}
}
}
public String getMaxConnections() {
/* Check if the AccessPoint object does not have a Properties section */
if (properties.size() == 0) {
return MaxConnections = "2";
}
else {
for (int i = 0; i < properties.size(); i++) {
if ((properties.get(i).name()).equals("maxConnection")) {
MaxConnections = properties.get(i).value();
}
}
if (MaxConnections == null)
return MaxConnections = "2";
else
return MaxConnections;
}
}
public String getSchema() {
/* Check if the AccessPoint object does not have a Properties section */
if (properties.size() == 0)
return schema = "public";
else {
for (int i = 0; i < properties.size(); i++) {
//
if ((properties.get(i).name()).equals("schema")) {
schema = properties.get(i).value();
}
}
if (schema == null)
return schema = "public";
else
return schema;
}
}
public String getTableSpaceCount() {
/* Check if the AccessPoint object does not have a Properties section */
if (properties.size() == 0)
return tableSpaceCount = "0";
else {
for (int i = 0; i < properties.size(); i++) {
if ((properties.get(i).name()).equals("tableSpaceCount")) {
tableSpaceCount = properties.get(i).value();
}
}
if (tableSpaceCount == null)
return tableSpaceCount = "0";
else
return tableSpaceCount;
}
}
public String getTableSpacePrefix() {
/* Check if the AccessPoint object does not have a Properties section */
if (properties.size() == 0)
return tableSpacePrefix = "";
else {
for (int i = 0; i < properties.size(); i++) {
if ((properties.get(i).name()).equals("tableSpacePrefix")) {
tableSpacePrefix = properties.get(i).value();
}
}
if (tableSpacePrefix == null)
return tableSpacePrefix = "";
else
return tableSpacePrefix;
}
}
public HashMap<String, String> getAuxiliaryProperties() {
String AuxiliaryProperty;
/* Check if the AccessPoint object does not have a Properties section */
if (properties.size() == 0)
return auxiliaryProperties;
else {
for (int i = 0; i < properties.size(); i++) {
if ((properties.get(i).name()).equals("aquamapsWorldTable")) {
AuxiliaryProperty = properties.get(i).value();
auxiliaryProperties.put("aquamapsWorldTable",
AuxiliaryProperty);
}
if ((properties.get(i).name()).equals("aquamapsDataStore")) {
AuxiliaryProperty = properties.get(i).value();
auxiliaryProperties.put("aquamapsDataStore",
AuxiliaryProperty);
}
}
return auxiliaryProperties;
}
}
}
// Class Interface
@XmlRootElement(name = "Interface")
public static class Interface {
@XmlElementRef
private Endpoint endpoint = new Endpoint();
public Endpoint endpoint() {
return endpoint;
}
}
// Class Endpoint
@XmlRootElement(name = "Endpoint")
public static class Endpoint {
@XmlAttribute(name = "EntryName")
private String name;
@XmlValue
private String address;
public String name() {
return name;
}
public String address() {
if (address == null)
address = "";
return address;
}
}
// Class AccessData
@XmlRootElement(name = "AccessData")
public static class AccessData {
@XmlElement(name = "Username")
private String username;
@XmlElement(name = "Password")
private String password;
public String username() {
if ((username == null) || (username.equals(""))) {
username = "gcube";
}
return username;
}
public String password() {
if ((password == null) || (password.equals(""))) {
password = "d4science";
}
return password;
}
}
// Class Property
@XmlRootElement(name = "Property")
public static class Property {
@XmlElement(name = "Name")
private String name;
@XmlElement(name = "Value")
private String value;
public String name() {
return name;
}
public String value() {
return value;
}
public String setvalue(String val) {
value = val;
return value;
}
}
}

View File

@ -0,0 +1,503 @@
package org.gcube.dataanalysis.databases.resources.processing;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.databases.resources.DBResource;
/**
* Class that, taking the url as input, performs a parsing process of the field
* Url through a tree decision
*/
public class Decider {
// Method that implements the tree decision to parse and build the field
// Url.
public static void decide(DBResource obj, int index) {
AnalysisLogger.getLogger().debug(
"In class Decider->starting the parsing process");
String EntireUrl = "";
String[] SplitOne = null;
boolean varone = false;
boolean var = false;
if (obj.getAccessPoints().get(index).address().contains("//")) { // the
// url
// contains
// the
// character
// "//"
AnalysisLogger.getLogger().debug(
"In class Decider->the url contains the character '//' ");
varone = true;
SplitOne = obj.getAccessPoints().get(index).address().split("//"); // Split
// on
// the
// node
// of
// the
// tree
AnalysisLogger.getLogger().debug(
"In class Decider->split operation on '//' ");
AnalysisLogger.getLogger().debug(
"In class Decider->SplitOne's lenght: " + SplitOne.length);
// Test Print
for (int i = 0; i < SplitOne.length; i++) {
AnalysisLogger.getLogger().debug(
"In class Decider->Split_one: " + SplitOne[i]);
}
if (SplitOne.length > 1) { // with the split operation there is two
// parts on the left of "//" and on the
// right of "//".
// try to build the first part of the string url
// recover the url in the left part of the url
if (SplitOne[0].length() == 0) { // There is not information on
// the left of "//".
// Left Node LevelTree=2
EntireUrl = EntireUrl + "jdbc" + ":";
if (obj.getPlatformName().toLowerCase().contains("mysql")) {
EntireUrl = EntireUrl + "mysql" + ":";
} else {
EntireUrl = EntireUrl + "postgresql" + ":";
}
AnalysisLogger.getLogger().debug(
"In class Decider->result: " + EntireUrl);
}
// Split operation on the Left Node LevelTree=2
else { // there is information on the left of "//"
// if (SplitOne[0].contains(":")){
String[] SplitTwo = SplitOne[0].split(":");
// System.out.println("split ':' one");
AnalysisLogger.getLogger().debug(
"In class Decider->split operation on '/'");
AnalysisLogger.getLogger().debug(
"In class Decider->Split_two's lenght: "
+ SplitTwo.length);
// Test Print
for (int i = 0; i < SplitTwo.length; i++) {
AnalysisLogger.getLogger().debug(
"In class Decider->Split_two: " + SplitTwo[i]);
}
// check on the lenght
if (SplitTwo.length == 2) { // the two strings related to
// "jdbc" and the driver's name
// are presented.
if ((obj.getPlatformName().toLowerCase()
.contains("postgres"))) {
AnalysisLogger
.getLogger()
.debug("In class Decider->setting the url using the driver");
EntireUrl = SplitTwo[0] + ":" + "postgresql" + ":";
}
if (obj.getPlatformName().toLowerCase()
.contains("mysql")) {
EntireUrl = SplitTwo[0] + ":" + "mysql" + ":";
}
} else { // there is one string: or the "jdbc" or the
// driver's name.
if (SplitTwo[0].toLowerCase().equals("jdbc")) {
EntireUrl = "jdbc" + ":";
// if
// ((this.getPlatformName().toLowerCase().contains("postgres"))||(this.getPlatformName().toLowerCase().contains("postgis"))){
if ((obj.getPlatformName().toLowerCase()
.contains("postgres"))) {
EntireUrl = EntireUrl + "postgresql" + ":";
}
if (obj.getPlatformName().toLowerCase()
.contains("mysql")) {
EntireUrl = EntireUrl + "mysql" + ":";
}
} else { // there is the driver's name. I check the
// variable db, set by the method guessDB,
// to set the url properly.
if (obj.getPlatformName().toLowerCase()
.contains("postgres")) {
AnalysisLogger
.getLogger()
.debug("In class Decider->setting the url using the driver postgres");
EntireUrl = "jdbc" + ":" + "postgresql" + ":";
}
else if (obj.getPlatformName().toLowerCase()
.contains("mysql")) {
AnalysisLogger
.getLogger()
.debug("In class Decider->setting the url using the driver mysql");
EntireUrl = "jdbc" + ":" + "mysql" + ":";
}
}
}
// }
}
} else { // with the split operation there is one part on the left
// of "//".
EntireUrl = obj.getAccessPoints().get(index).address();
if ((obj.getAccessPoints().get(index).address().toLowerCase()
.contains("postgres"))
|| (obj.getAccessPoints().get(index).address()
.toLowerCase().contains("postgis"))) {
EntireUrl = "jdbc:postgresql://";
}
if (obj.getAccessPoints().get(index).address().toLowerCase()
.contains("mysql")) {
EntireUrl = "jdbc:mysql://";
}
// the url is built using the available information.
EntireUrl = EntireUrl + obj.getHostedOn() + ":" + obj.getPort()
+ "/"
+ obj.getAccessPoints().get(index).getDatabaseName();
AnalysisLogger.getLogger().debug(
"In class Decider->result: " + EntireUrl);
var = true;
}
}
if ((obj.getAccessPoints().get(index).address().contains("/"))
&& (var == false)) { // it manages several cases. It is selected
// if: 1) there is only the part on the
// right of "//", 2) there is the entire
// string, 3) there is the part on the
// right of "/"
// System.out.println("manage '/'");
AnalysisLogger.getLogger().debug(
"In class Decider->the url contains characther '/'");
if (varone == true) { // There is the hostname, the port number or
// both. A split on "/" is performed to
// retrieve the database's name
// Split operation on the Right Node LevelTree=2
String[] SplitThree = SplitOne[1].split("/");
AnalysisLogger.getLogger().debug(
"In class Decider->split operation on '/'");
AnalysisLogger.getLogger().debug(
"In class Decider->Split_three's lenght: "
+ SplitThree.length);
for (int i = 0; i < SplitThree.length; i++) {
AnalysisLogger.getLogger().debug(
"In class Decider->Split_three: " + SplitThree[i]);
}
if (SplitThree[0].length() == 0) { // There are not the hostname
// and the port number.
// Left Node LevelTree= 3
EntireUrl = EntireUrl + "//" + obj.getHostedOn() + ":"
+ obj.getPort();
} else { // Recovery host and the port number
String[] SplitFour = SplitThree[0].split(":");
AnalysisLogger.getLogger().debug(
"In class Decider->split operation on ':'");
AnalysisLogger.getLogger().debug(
"In class Decider->Split_four's lenght: "
+ SplitFour.length);
for (int i = 0; i < SplitFour.length; i++) {
AnalysisLogger.getLogger()
.debug("In class Decider->Split_four: "
+ SplitFour[i]);
}
if (SplitFour[0].length() == 0) { // there is not the
// hostname.
// Left Node LevelTree=4
EntireUrl = EntireUrl + "//" + obj.getHostedOn();
} else { // there is the hostname
obj.setHostedOn(SplitFour[0]);
EntireUrl = EntireUrl + "//" + SplitFour[0];
}
if (SplitFour.length > 1) { // the url contains the port
// number too.
// the url contains the port number
obj.setPort(SplitFour[1]);
EntireUrl = EntireUrl + ":" + SplitFour[1];
} else {
// the url does not contain the port number
EntireUrl = EntireUrl + ":" + obj.getPort();
}
}
if (SplitThree.length > 1) { // Right Node LevelTree= 3
// the url contains the database's name
obj.getAccessPoints().get(index)
.setDatabaseName(SplitThree[1]);
EntireUrl = EntireUrl + "/" + SplitThree[1];
}
else {
// The url does not contain the database's name
EntireUrl = EntireUrl
+ "/"
+ obj.getAccessPoints().get(index)
.getDatabaseName();
}
}
else { // there is only the database's name
String[] SplitThree = obj.getAccessPoints().get(index)
.address().split("/");
AnalysisLogger.getLogger().debug(
"In class Decider->split operation on '/'");
obj.getAccessPoints().get(index).setDatabaseName(SplitThree[1]);
if (SplitThree[0].length() == 0) { // only the database's name
// is retrieved
// if
// ((this.getPlatformName().equals("postgres"))||(this.getPlatformName().equals("postgis"))){
if ((obj.getPlatformName().equals("postgres"))) {
EntireUrl = "jdbc:" + "postgresql" + ":" + "//"
+ obj.getHostedOn() + ":" + obj.getPort() + "/"
+ SplitThree[1];
}
if (obj.getPlatformName().toLowerCase().contains("mysql")) {
EntireUrl = "jdbc:" + "mysql" + ":" + "//"
+ obj.getHostedOn() + ":" + obj.getPort() + "/"
+ SplitThree[1];
}
}
if (SplitThree[0].length() != 0) { // on the left of "/" there
// are other information
String[] SplitTwo = SplitThree[0].split(":");
AnalysisLogger.getLogger().debug(
"In class Decider->split operation on ':'");
AnalysisLogger.getLogger().debug(
"In class Decider->Split_two's lenght");
for (int i = 0; i < SplitTwo.length; i++) {
AnalysisLogger.getLogger().debug(
"In class Decider->Split_two: " + SplitTwo[i]);
}
// check on the lenght
if (SplitTwo.length == 2) { // The two strings "jdbc" and
// driver's name are presented.
// sono presenti stringa "jdbc" e "nome driver"
// *this.getAccessPoints().get(index).SetDriver(SplitTwo[1]);
if ((SplitTwo[1].toLowerCase().contains("postgres"))
|| (SplitTwo[1].toLowerCase()
.contains("postgis"))) {
EntireUrl = SplitTwo[0] + ":" + "postgresql" + ":";
}
if (SplitTwo[1].toLowerCase().contains("mysql")) {
EntireUrl = SplitTwo[0] + ":" + "mysql" + ":";
}
// EntireUrl=SplitTwo[0]+":"+SplitTwo[1]+":";
// if
// ((this.getPlatformName().toLowerCase().contains("postgres"))||(this.getPlatformName().toLowerCase().contains("postgis"))){
if ((obj.getPlatformName().toLowerCase()
.contains("postgres"))) {
EntireUrl = EntireUrl + "//" + obj.getHostedOn()
+ ":" + obj.getPort() + "/" + SplitThree[1];
}
if (obj.getPlatformName().toLowerCase()
.contains("mysql")) {
EntireUrl = EntireUrl + "//" + obj.getHostedOn()
+ ":" + obj.getPort() + "/" + SplitThree[1];
}
} else { // only one string between "jdbc" or driver's name
// is presented
if (SplitTwo[0].toLowerCase().equals("jdbc")) { // the
// string
// "jdbc"
// is
// presented
EntireUrl = "jdbc" + ":";
if ((obj.getPlatformName().toLowerCase()
.contains("postgres"))
|| (obj.getPlatformName().toLowerCase()
.contains("postgis"))) {
EntireUrl = EntireUrl + "postgresql" + ":"
+ "//" + obj.getHostedOn() + ":"
+ obj.getPort() + "/" + SplitThree[1];
}
if (obj.getPlatformName().toLowerCase()
.contains("mysql")) {
EntireUrl = EntireUrl + "mysql" + ":" + "//"
+ obj.getHostedOn() + ":"
+ obj.getPort() + "/" + SplitThree[1];
}
} else { // the string related to the driver's name is
// presented
// EntireUrl="jdbc"+":"+SplitTwo[0]+":"+"//"+this.getHostedOn()+":"+this.getPort()+"/"+SplitThree[1];;
// *this.getAccessPoints().get(index).SetDriver(SplitTwo[0]);
if ((obj.getDBguessed().contains("postgis"))
|| (obj.getDBguessed().contains("postgres"))) {
// EntireUrl="jdbc"+":"+SplitTwo[0]+":"+"//"+this.getHostedOn()+":"+this.getPort()+"/"+SplitThree[1];}
// *this.getAccessPoints().get(index).SetDriver(SplitTwo[0]);
EntireUrl = "jdbc" + ":" + "postgresql" + ":"
+ "//" + obj.getHostedOn() + ":"
+ obj.getPort() + "/" + SplitThree[1];
}
if (obj.getDBguessed().contains("mysql")) {
EntireUrl = "jdbc" + ":" + "mysql" + ":" + "//"
+ obj.getHostedOn() + ":"
+ obj.getPort() + "/" + SplitThree[1];
}
}
}
}
}
}
// if(this.getAccessPoints().get(index).address().contains(":")){}
obj.getAccessPoints().get(index).setUrl(EntireUrl);
AnalysisLogger.getLogger().debug(
"In class Decider->Url normalized: "
+ obj.getAccessPoints().get(index).address());
}
}

View File

@ -0,0 +1,361 @@
package org.gcube.dataanalysis.databases.resources.processing;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.databases.resources.DBResource;
/**
* Class that uses as inputs the available information (platform, driver and
* dialect) to determine the database's type
*/
public class Guesser {
private String db = ""; // the database's type
// Method that determines the database's type using the available
// information as platform, driver and dialect and set these parameters to
// the correct values if they are not specified according to a well-formed
// mode.
public String guessDB(DBResource obj, int index) {
AnalysisLogger.getLogger().debug(
"In class Guesser->starting the guess process");
// String platform = "";
if ((obj.getPlatformName().trim().equals(""))
&& (obj.getAccessPoints().get(index).getDriver().equals(""))
&& (obj.getAccessPoints().get(index).getDialect().equals(""))
&& (obj.getAccessPoints().get(index).address().toLowerCase()
.contains("mysql"))) { // it is used the 'mysql'
// driver's name
AnalysisLogger
.getLogger()
.debug("In class Guesser->database's name and port number determined using the 'mysql' driver's name");
// System.out.println("Set condition default");
db = "mysql";
obj.setPort("3306");
// return db;
}
else if ((obj.getPlatformName().trim().equals(""))
&& (obj.getAccessPoints().get(index).getDriver().equals(""))
&& (obj.getAccessPoints().get(index).getDialect().equals(""))
&& (obj.getAccessPoints().get(index).address().toLowerCase()
.contains("postgis"))) { // it is used the 'postgis'
// dialect's name
AnalysisLogger
.getLogger()
.debug("In class Guesser->database's name and port number determined using the address information: "
+ obj.getAccessPoints().get(index).address()
.toLowerCase());
db = "postgis";
obj.setPort("5432");
}
if ((obj.getPlatformName().trim().equals(""))
&& (obj.getAccessPoints().get(index).getDriver().equals(""))
&& (obj.getAccessPoints().get(index).getDialect().equals(""))
&& (!(obj.getAccessPoints().get(index).address().toLowerCase()
.contains("mysql")))
&& (!(obj.getAccessPoints().get(index).address().toLowerCase()
.contains("postgres")))) { // in this case there are not
// enough information so the
// database's name and the
// port number are set to
// the default values.
AnalysisLogger
.getLogger()
.debug("In class Guesser->database's name and port number are set to the default values");
db = "postgres";
obj.setPort("5432");
}
if ((obj.getPlatformName().trim().equals(""))
&& (obj.getAccessPoints().get(index).getDriver().equals(""))
&& (obj.getAccessPoints().get(index).getDialect().equals(""))
&& (obj.getAccessPoints().get(index).address().toLowerCase()
.contains("postgres"))) { // it is used the 'postgres'
// driver's name
AnalysisLogger
.getLogger()
.debug("In class Guesser->database's name and port number determined using the address information: "
+ obj.getAccessPoints().get(index).address()
.toLowerCase());
db = "postgres";
obj.setPort("5432");
}
if (((obj.getPlatformName() != ""))
&& (obj.getPlatformName().toLowerCase().contains("mysql"))) {
AnalysisLogger
.getLogger()
.debug("In class Guesser->database's name and port number determined using the platform's name: "
+ obj.getPlatformName().toLowerCase());
db = "mysql";
obj.setPort("3306");
} else if ((obj.getAccessPoints().get(index).getDriver() != "")
&& (obj.getAccessPoints().get(index).getDriver().toLowerCase()
.contains("mysql"))) {
AnalysisLogger
.getLogger()
.debug("In class Guesser->database's name and port number determined using the driver's name: "
+ obj.getAccessPoints().get(index).getDriver()
.toLowerCase());
db = "mysql";
obj.setPort("3306");
} else if ((obj.getAccessPoints().get(index).getDialect() != "")
&& (obj.getAccessPoints().get(index).getDialect().toLowerCase()
.contains("mysql"))) {
AnalysisLogger
.getLogger()
.debug("In class Guesser->database's name and port number determined using the dialect's name: "
+ obj.getAccessPoints().get(index).getDialect()
.toLowerCase());
db = "mysql";
obj.setPort("3306");
}
if ((obj.getPlatformName() != "")
&& (obj.getPlatformName().toLowerCase().contains("postgres"))) {
AnalysisLogger
.getLogger()
.debug("In class Guesser->database's name and port number determined using the platform's name: "
+ obj.getPlatformName().toLowerCase());
db = "postgres";
obj.setPort("5432");
} else if ((obj.getAccessPoints().get(index).getDriver() != "")
&& (obj.getAccessPoints().get(index).getDriver().toLowerCase()
.contains("postgres"))) {
AnalysisLogger
.getLogger()
.debug("In class Guesser->database's name and port number determined using the driver's name: "
+ obj.getAccessPoints().get(index).getDriver()
.toLowerCase());
db = "postgres";
obj.setPort("5432");
} else if ((obj.getAccessPoints().get(index).getDialect() != "")
&& (obj.getAccessPoints().get(index).getDialect().toLowerCase()
.contains("postgres"))) {
AnalysisLogger
.getLogger()
.debug("In class Guesser->database's name and port number determined using the dialect's name: "
+ obj.getAccessPoints().get(index).getDialect()
.toLowerCase());
db = "postgres";
obj.setPort("5432");
}
if ((obj.getPlatformName() != "")
&& (obj.getPlatformName().toLowerCase().contains("postgis"))) {
AnalysisLogger
.getLogger()
.debug("In class Guesser->database's name and port number determined using the platform's name: "
+ obj.getPlatformName().toLowerCase());
db = "postgis";
obj.setPort("5432");
// }else if
// ((this.getAccessPoints().get(index).Driver.toLowerCase().contains("postgis"))&&(!(this.getAccessPoints().get(index).Driver.equals("")))){
} else if ((obj.getAccessPoints().get(index).getDriver() != "")
&& (obj.getAccessPoints().get(index).getDriver().toLowerCase()
.contains("postgis"))) {
AnalysisLogger
.getLogger()
.debug("In class Guesser->database's name and port number determined using the driver's name: "
+ obj.getAccessPoints().get(index).getDriver()
.toLowerCase());
db = "postgis";
obj.setPort("5432");
// }else if
// ((this.getAccessPoints().get(index).getDialect()!="")&&(this.getAccessPoints().get(index).getDialect().toLowerCase().contains("postgis")))
// {
} else if ((obj.getAccessPoints().get(index).getDialect() != "")
&& (obj.getAccessPoints().get(index).getDialect().toLowerCase()
.contains("postgis"))) {
AnalysisLogger
.getLogger()
.debug("In class Guesser->database's name and port number determined using the dialect's name: "
+ obj.getAccessPoints().get(index).getDialect()
.toLowerCase());
db = "postgis";
obj.setPort("5432");
} else if (obj.getAccessPoints().get(index).address().toLowerCase()
.contains("postgis")) {
AnalysisLogger
.getLogger()
.debug("In class Guesser->database's name and port number determined using the address information: "
+ obj.getAccessPoints().get(index).address()
.toLowerCase());
db = "postgis";
obj.setPort("5432");
}
if (db.equals("")) {
AnalysisLogger
.getLogger()
.debug("In class Guesser->database's name and port number are set to default values because the database's type is not determined : "
+ obj.getAccessPoints().get(index).address()
.toLowerCase());
db = "postgres";
obj.setPort("5432");
}
// 'Set' process of the platform, driver and dialect parameters
// Set Platform's name Operation
if ((db.equals("mysql")) || (db.equals("postgres"))) {
obj.setPlatformName(db);
AnalysisLogger.getLogger().debug(
"In class Guesser->setting platform's name: "
+ obj.getPlatformName());
} else if (db.equals("postgis")) {
obj.setPlatformName("postgres");
AnalysisLogger.getLogger().debug(
"In class Guesser->setting platform's name: "
+ obj.getPlatformName());
}
// Set Driver's name Operation
if ((obj.getAccessPoints().get(index).getDriver() == "")
|| (!(obj.getAccessPoints().get(index).getDriver()
.contains(".")))) {
if (db.contains("postgres")) {
obj.getAccessPoints().get(index)
.SetDriver("org.postgresql.Driver");
AnalysisLogger.getLogger().debug(
"In class Guesser->setting driver's name: "
+ obj.getAccessPoints().get(index).getDriver());
} else if (db.contains("postgis")) {
obj.getAccessPoints().get(index)
.SetDriver("org.postgresql.Driver");
AnalysisLogger.getLogger().debug(
"In class Guesser->setting driver's name: "
+ obj.getAccessPoints().get(index).getDriver());
} else if (db.contains("mysql")) {
obj.getAccessPoints().get(index)
.SetDriver("com.mysql.jdbc.Driver");
AnalysisLogger.getLogger().debug(
"In class Guesser->setting driver's name: "
+ obj.getAccessPoints().get(index).getDriver());
}
}
// Set Dialect's name operation
if ((obj.getAccessPoints().get(index).getDialect() == "")
|| (!(obj.getAccessPoints().get(index).getDialect()
.contains(".")))) {
if (db.contains("postgres")) {
obj.getAccessPoints().get(index)
.SetDialect("org.hibernate.dialect.PostgreSQLDialect");
AnalysisLogger
.getLogger()
.debug("In class Guesser->setting dialect's name: "
+ obj.getAccessPoints().get(index).getDialect());
} else if (db.contains("postgis")) {
obj.getAccessPoints()
.get(index)
.SetDialect(
"org.hibernatespatial.postgis.PostgisDialect");
AnalysisLogger
.getLogger()
.debug("In class Guesser->setting dialect's name: "
+ obj.getAccessPoints().get(index).getDialect());
} else if (db.contains("mysql")) {
obj.getAccessPoints().get(index)
.SetDialect("org.hibernate.dialect.MySQLDialect");
AnalysisLogger
.getLogger()
.debug("In class Guesser->setting dialect's name: "
+ obj.getAccessPoints().get(index).getDialect());
}
}
return db;
}
// it returns the db field of the object Guesser
public String getDB() {
return db;
}
}

View File

@ -0,0 +1,145 @@
package org.gcube.dataanalysis.databases.resources.processing;
import java.io.IOException;
import java.net.UnknownHostException;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.databases.resources.DBResource;
import org.gcube.dataanalysis.databases.resources.processing.Normalizer;
import org.gcube.dataanalysis.databases.resources.processing.Guesser;
/** Class that performs the normalization process using the available information specified from the user */
public class Normalizer {
/* It performs the normalization process considering as input an access point field of a DBResource resource */
public static void normalize(DBResource obj, int index) throws IOException {
AnalysisLogger.getLogger().debug(
"In class Normalizer->starting to guess the database's type");
Guesser guess=new Guesser();
String db = guess.guessDB(obj, index);
obj.setDBguessed(db);
AnalysisLogger.getLogger().debug(
"In class Normalizer->starting the normalization process");
// the Url contains at least the character between the "//", "/" or ":"
if ((obj.getAccessPoints().get(index).address().contains("//"))
|| (obj.getAccessPoints().get(index).address().contains("/"))
|| (obj.getAccessPoints().get(index).address().contains(":")))
{
AnalysisLogger.getLogger().debug(
"In class Normalizer->calling the parsing process of the url");
Decider.decide(obj, index);
} else { // the Url does not contain none of the characters "//", "/" or
// ":" so there is an indecision to be managed
AnalysisLogger.getLogger().debug(
"In class Normalizer->starting to manage an indecision");
if ((obj.getAccessPoints().get(index).address().equals(obj
.getHostedOn())) && (!(obj.getHostedOn().equals("")))) { /* the address is the hostname */
obj.getAccessPoints()
.get(index)
.setUrl("//"
+ obj.getAccessPoints().get(index).address());
AnalysisLogger.getLogger().debug(
"In class Normalizer->starting the tree decision process using the hostname");
Decider.decide(obj, index);
} else if ((!(obj.getAccessPoints().get(index).address().equals("")))
&& (!(obj.getAccessPoints().get(index).address().equals(obj
.getHostedOn())) && (obj.getHostedOn().equals("")))) {
// throw new UnknownHostException("The host is unknown");
throw new UnknownHostException(
"the available information are not sufficient to determine the complete address: please fill the field 'Hosted On'");
} else if ((!(obj.getAccessPoints().get(index).address()
.equals("jdbc")))
&& (!(obj.getAccessPoints().get(index).address()
.toLowerCase().toLowerCase().contains("mysql")))
&& (!(obj.getAccessPoints().get(index).address()
.toLowerCase().contains("postgres")))
&& (!(obj.getAccessPoints().get(index).address()
.toLowerCase().contains("postgis")))
&& (!(obj.getAccessPoints().get(index).address()
.toLowerCase().contains(obj.getPort())))
&& (!(obj.getAccessPoints().get(index).address().equals("")))) { /* the address is the database's name */
obj.getAccessPoints()
.get(index)
.setUrl("/"
+ obj.getAccessPoints().get(index).address());
AnalysisLogger.getLogger().debug(
"In class Normalizer->starting the tree decision process using the database's name");
Decider.decide(obj, index);
} else if ((obj.getAccessPoints().get(index).address()
.toLowerCase().contains("postgres"))
|| (obj.getAccessPoints().get(index).address()
.toLowerCase().contains("postgis"))
|| (obj.getAccessPoints().get(index).address()
.toLowerCase().contains("mysql"))) { /* the address is the driver's name */
obj.getAccessPoints()
.get(index)
.setUrl("jdbc:"
+ obj.getAccessPoints().get(index).address()
+ "://");
AnalysisLogger.getLogger().debug(
"In class Normalizer->starting the tree decision process using the driver's name");
Decider.decide(obj, index);
}
// if ((obj.getAccessPoints().get(index).address().equals(""))){
if ((obj.getAccessPoints().get(index).address().equals(""))) { /* the address is empty so several available information are used to build the Url */
AnalysisLogger.getLogger().debug(
"In class Normalizer->managing the address null");
// Empty address management
if (!(obj.getHostedOn().equals(""))
&& (obj.getHostedOn() != null)) { /* the hostname is used if it is not null. */
// AnalysisLogger.getLogger().debug(
// "In class Normalizer->using the hostname"
// + obj.getHostedOn());
AnalysisLogger.getLogger().debug(
"In class Normalizer->managing the address null using the hostname");
obj.getAccessPoints().get(index).setUrl(obj.getHostedOn());
AnalysisLogger.getLogger().debug(
"In class Normalizer->recalling the 'normalize' method");
normalize(obj, index);
}
}
}
}
}

View File

@ -0,0 +1,47 @@
package org.gcube.dataanalysis.databases.sampler;
/**
* Class that allows to describe
*/
public class RowScore implements Comparable<RowScore> {
private Object row;
private int score;
public RowScore(Object r, int s) {
row = r;
score = s;
}
// to get the row
public Object getRow() {
return row;
}
// to get the score
public int getScore() {
return score;
}
// to compare two RowScore objects in order to sort a list of this objects
@Override
public int compareTo(RowScore o) {
// TODO Auto-generated method stub
if (this.score > o.getScore())
return 1;
if (this.score == o.getScore())
return 0;
if (this.score < o.getScore())
return -1;
return 0;
}
}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,168 @@
package org.gcube.dataanalysis.databases.structure;
import java.util.ArrayList;
import java.util.List;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.hibernate.SessionFactory;
/** Class that allows to create a table for a database */
public abstract class AbstractTableStructure {
protected List<String> ColumnNames;
protected List<String> TypesList;
protected List<Integer> TypesLengths;
protected List<String> DefaultValues;
protected List<String> CompleteTypes;
protected List<String> ColumnKeys;
protected List<String> UniqueKeys;
protected List<String> ForeignKeys;
protected List<String> Indexes;
protected List<Boolean> IsNullables;
protected String databaseName;
protected String charset;
protected String tableName;
// create table query
protected static String createTableQueryElement = "\"%1$s\" %2$s %3$s %4$s";
protected static String defaultTableQueryElement = "DEFAULT %1$s";
protected static String createTableQuery = "CREATE TABLE %1$s ( %2$s );";
protected static String primaryKeyStatement = "PRIMARY KEY";
protected static String uniqueKeyStatement = "UNIQUE";
protected static String foreignKeyStatement = "FOREIGN KEY";
//Abstracts methods
protected abstract void buildStructure(SessionFactory dbSession)
throws Exception;
protected abstract String getQueryForTableStructure(SessionFactory dbSession)
throws Exception;
protected abstract String getQueryForIndexes(SessionFactory dbSession)
throws Exception;
public AbstractTableStructure(String Databasename, String TableName,
SessionFactory dbSession, boolean buildStructure) throws Exception {
try {
ColumnNames = new ArrayList<String>();
TypesList = new ArrayList<String>();
TypesLengths = new ArrayList<Integer>();
DefaultValues = new ArrayList<String>();
CompleteTypes = new ArrayList<String>();
ColumnKeys = new ArrayList<String>();
UniqueKeys = new ArrayList<String>();
ForeignKeys = new ArrayList<String>();
Indexes = new ArrayList<String>();
IsNullables = new ArrayList<Boolean>();
tableName= TableName;
databaseName = Databasename;
if (buildStructure)
buildStructure(dbSession);
} catch (Exception e) {
throw e;
// String error = e.getCause().toString();
//
// if ((error.contains("Table")) && (error.contains("doesn't exist"))) {
//
// System.out.println("Table " + TableName + " doesn't exist");
//
// }
}
}
public AbstractTableStructure(String Databasename, String TableName,
SessionFactory dbSession) throws Exception {
this(Databasename, TableName, dbSession, true);
}
// builds a table by merging information in data structure
public String buildUpCreateTable() {
int numOfElements = ColumnNames.size();
StringBuffer elementsBuffer = new StringBuffer();
// build up create statement elements
for (int i = 0; i < numOfElements; i++) {
String nullable = "";
if (!IsNullables.get(i).booleanValue())
nullable = "NOT NULL";
String defaultvalue = "";
if (DefaultValues.size()!=0){
if ((DefaultValues.get(i) != null)
&& (DefaultValues.get(i).trim().length() > 0)
&& (nullable.equals("NOT NULL"))) {
defaultvalue = DefaultValues.get(i);
defaultvalue = String.format(defaultTableQueryElement,
defaultvalue);
}
}
String createStatementElement = String.format(
createTableQueryElement, ColumnNames.get(i),
TypesList.get(i), nullable, defaultvalue);
elementsBuffer.append(createStatementElement);
if (i < numOfElements - 1)
elementsBuffer.append(",");
}
// build up primary keys statements
elementsBuffer
.append(buildUPConstraint(primaryKeyStatement, ColumnKeys));
elementsBuffer
.append(buildUPConstraint(uniqueKeyStatement, UniqueKeys));
elementsBuffer.append(buildUPConstraint(foreignKeyStatement,
ForeignKeys));
// build up create statement
String createStatement = String.format(createTableQuery, tableName,
elementsBuffer.toString(), charset);
AnalysisLogger.getLogger().debug(
"AbstractTableStructure->Create Table Query: "
+ createStatement);
return createStatement;
}
private String buildUPConstraint(String statement, List<String> Keys) {
// build up primary keys statements
StringBuffer elementsBuffer = new StringBuffer();
int numKeys = Keys.size();
if (numKeys > 0) {
elementsBuffer.append(", " + statement + "(");
for (int i = 0; i < numKeys; i++) {
String columnKey = Keys.get(i);
if (columnKey != null) {
elementsBuffer.append("\"" + columnKey + "\"");
if (i < numKeys - 1)
elementsBuffer.append(",");
}
}
elementsBuffer.append(")");
}
return elementsBuffer.toString();
}
}

View File

@ -0,0 +1,194 @@
package org.gcube.dataanalysis.databases.structure;
import java.util.List;
import org.gcube.dataanalysis.databases.utils.ConnectionManager;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.hibernate.SessionFactory;
/** Class that allows to manage the MySQL database. */
public class MySQLTableStructure extends AbstractTableStructure {
public MySQLTableStructure(String Databasename, String TableName,
SessionFactory dbSession) throws Exception {
super(Databasename, TableName, dbSession, false);
}
// Method that executes the query "show create table" in order to retrieve
// the "create table" statement
public String showCreateTable(ConnectionManager connection, SessionFactory dbSession) throws Exception {
// Retrieve the query
String queryForIndexes = getQueryForIndexes(dbSession);
try {
// List<Object> indexSet = DatabaseFactory.executeSQLQuery(
// String.format(queryForIndexes, tableName), dbSession);
List<Object> indexSet = connection.executeQuery(String.format(queryForIndexes, tableName), dbSession);
String createTableStatement = (String) (((Object[]) indexSet.get(0))[1]);
AnalysisLogger.getLogger().debug(
"MySQLTableStructure->'Create Table' statement: "
+ createTableStatement);
return createTableStatement;
} catch (Exception e) {
throw e;
}
}
// Method that returns the query to build the table's structure. This method
// is not useful for mysql.
@Override
protected String getQueryForTableStructure(SessionFactory dbSession)
throws Exception {
// TODO Auto-generated method stub
String queryForStructure = "SELECT table_schema,table_name,column_name,column_default,is_nullable,data_type,character_maximum_length,character_set_name,column_type,column_key FROM information_schema.COLUMNS WHERE table_name ='%1$s' and table_schema='%2$s';";
return queryForStructure;
}
// Method that returns the query to show the create statement
@Override
protected String getQueryForIndexes(SessionFactory dbSession)
throws Exception {
// TODO Auto-generated method stub
String queryForIndexes = "SHOW CREATE TABLE `%1$s`;";
return queryForIndexes;
}
// This method is not useful for the database mysql because the slq query
// "show create" makes available the create table statement.
@Override
protected void buildStructure(SessionFactory dbSession) throws Exception {
// //retrieve the query
// String queryForStructure=getQueryForTableStructure(dbSession);
//
// String queryStructure =
// String.format(queryForStructure,tableName,databaseName);
// List<Object> resultSet =
// DatabaseFactory.executeSQLQuery(queryStructure, dbSession);
//
//
// AnalysisLogger.getLogger().debug("MySQLTableStructure->Building Structure with query: "+queryStructure);
//
// int resultsNumber = resultSet.size();
//
// for (int i=0;i<resultsNumber;i++) {
// try {
// Object result = resultSet.get(i);
// Object[] resultArray = (Object[]) result;
// if (i==0){
// charset = (String)resultArray[7];
// }
// String columnname = ((String)resultArray[2]).toLowerCase();
// if (columnname.equalsIgnoreCase("class"))
// columnname = "classcolumn";
//
// ColumnNames.add(columnname);
// DefaultValues.add((String)resultArray[3]);
// String yesno = (String)resultArray[4];
// if (yesno.equalsIgnoreCase("YES"))
// IsNullables.add(true);
// else
// IsNullables.add(false);
// TypesList.add((String)resultArray[5]);
// try{
// TypesLengths.add(((BigInteger)resultArray[6]).intValue());
// }catch(Exception e){
// TypesLengths.add(-1);
// }
// CompleteTypes.add((String)resultArray[8]);
//
//
//
// String columnKey = (String)resultArray[9];
// //
// if (columnKey.equals("PRI"))
// ColumnKeys.add(columnname);
// else if (columnKey.equals("UNI"))
// UniqueKeys.add(columnname);
// else if (columnKey.equals("MUL"))
// UniqueKeys.add(columnname);
//
// // else if ((columnKey != null) && (columnKey.length()>0))
// //
// System.err.println("MySQLTableStructure->KEY NOT CONTEMPLATED : "+columnKey);
//
// /*
// if (columnKey.equals("PRI"))
// ColumnKeys.add(columnname);
// else if (columnKey.equals("UNI"))
// UniqueKeys.add(columnname);
// else if (columnKey.equals("MUL"))
// UniqueKeys.add(columnname);
//
// else if ((columnKey != null) && (columnKey.length()>0))
// System.err.println("MySQLTableStructure->KEY NOT CONTEMPLATED : "+columnKey);
// */
// } catch (Exception e) {
// e.printStackTrace();
// }
// }
//
// // parseIndexes2(dbSession);
// //fill up the indexes array
//
// //***Commentata
//
// // for (String index:Indexes){
// ////
// AnalysisLogger.getLogger().debug("unique index removal: "+index);
// // //eliminate the unicity
// // UniqueKeys.remove(index);
// // }
}
// This method is not useful for the database mysql because the slq query
// "show create" makes available the create table statement.
// private void parseIndexes2 (SessionFactory dbSession) throws Exception{
//
//
// //Retrieve the query
// String queryForIndexes=getQueryForIndexes(dbSession);
//
// List<Object> indexSet =
// DatabaseFactory.executeSQLQuery(String.format(queryForIndexes,tableName),
// dbSession);
//
// String createTableStatement = (String)(((Object[])indexSet.get(0))[1]);
// String [] splitted = createTableStatement.split("\n");
//
// for (int i=0;i<splitted.length;i++){
// String line = splitted[i].trim();
// if (line.contains("KEY")){
// int start = line.indexOf("(");
// int end = line.indexOf(")");
// String column = line.substring(start+1,end);
// column = column.replace("'", "").replace("`", "");
// column = column.toLowerCase().trim();
// // if (column.equals("class"))
// // column = "classcolumn";
// if (!Indexes.contains(column))
// Indexes.add(column);
// }
// }
//
// }
}

View File

@ -0,0 +1,298 @@
package org.gcube.dataanalysis.databases.structure;
import java.util.List;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.hibernate.SessionFactory;
import org.gcube.dataanalysis.databases.utils.DatabaseFactory;
/** Class that allows to manage the Postgres database. */
public class PostgresTableStructure extends AbstractTableStructure {
public PostgresTableStructure(String Schemaname, String TableName,
SessionFactory dbSession) throws Exception {
super(Schemaname, TableName, dbSession);
// TODO Auto-generated constructor stub
}
// Method that allows to build the structure for a Postgres database.
@Override
protected void buildStructure(SessionFactory dbSession) throws Exception {
String queryForStructure = getQueryForTableStructure(dbSession);
String queryStructure = String.format(queryForStructure, tableName,
databaseName);
List<Object> resultSet = DatabaseFactory.executeSQLQuery(
queryStructure, dbSession);
// if (resultSet==null){
//
// throw new Exception("The resulting table has not rows");
//
// }
// manage an error that postgres does not signal. In this case the
// "queryForStructure" query is case sensitive so the field "table_name"
// must be set to the table name well formatted.
if (resultSet == null) {
AnalysisLogger
.getLogger()
.debug("PostgresTableStructure->Error: Results not available. Check that the database and schema names are correct"
+ queryStructure);
throw new Exception(
"Results not available. Check that the database and schema names are correct");
}
AnalysisLogger.getLogger().debug(
"PostgresTableStructure->Building Structure with query: "
+ queryStructure);
int resultsNumber = resultSet.size();
for (int i = 0; i < resultsNumber; i++) {
try {
Object result = resultSet.get(i);
Object[] resultArray = (Object[]) result;
// retrieve the column name
String columnname = ((String) resultArray[0]).toLowerCase();
if (columnname.equalsIgnoreCase("class"))
columnname = "classcolumn";
ColumnNames.add(columnname);
// retrieve the nullable value
String yesno = (String) resultArray[2];
if (yesno.equalsIgnoreCase("YES"))
IsNullables.add(true);
else
IsNullables.add(false);
// retrieve the data type
String type = (String) resultArray[3];
if ((resultArray[1] != null)
&& (resultArray[3].toString().equals("integer"))) {
if ((resultArray[1]).toString().startsWith("nextval('")) {
type = "serial";
}
// else{
//
// //recover the default value
// // DefaultValues.add(resultArray[1]).toString());
// DefaultValues.add((String)(resultArray[1]));
//
//
// }
if (resultArray[4] != null) {
String tot = "";
charset = resultArray[4].toString();
tot = type + "(" + charset + ")";
// retrieve the data type
TypesList.add(tot);
}
else {
// retrieve the data type
TypesList.add(type);
}
} else if (type.equals("USER-DEFINED")) {
type = (String) resultArray[5];
// retrieve the character maximun lenght
if (resultArray[4] != null) {
String tot = "";
charset = resultArray[4].toString();
tot = type + "(" + charset + ")";
// retrieve the data type
TypesList.add(tot);
} else {
// retrieve the data type
TypesList.add(type);
}
} else {
// retrieve the character maximun lenght
if (resultArray[4] != null) {
String tot = "";
charset = resultArray[4].toString();
tot = type + "(" + charset + ")";
// retrieve the data type
TypesList.add(tot);
}
else {
// retrieve the data type
TypesList.add(type);
}
}
// recover the default value
if ((resultArray[1] == null)
|| ((resultArray[1]).toString().startsWith("nextval('"))) {
DefaultValues.add(null);
} else {
DefaultValues.add((String) (resultArray[1]));
}
// String tot = "";
//
// if (resultArray[4] != null) {
//
// charset = resultArray[4].toString();
// // AnalysisLogger.getLogger().debug(
// // "PostgresTableStructure->charset: " + charset);
//
// // String type = (String)resultArray[3];
//
//
// // if((resultArray[3].toString().equals("integer")) &&
// (resultArray[1]).toString().startsWith("nextval('")){
// //
// //
// // type="serial";
// //
// // }
// // if (type.equals("USER-DEFINED")){
// //
// // type=(String)resultArray[5];
// //
// // }
//
// tot = type + "(" + charset + ")";
//
// TypesList.add(tot);
//
//
// }
// else {
// String type = (String)resultArray[3];
// String coldefault=(String)resultArray[1];
//
// // if((type.equals("integer")) && (coldefault!=null) &&
// (coldefault.startsWith("nextval('"))){
//
// if((type.equals("integer")) && (coldefault!=null)) {
//
//
// type="serial";
//
// }
//
// TypesList.add(type);
//
// }
} catch (Exception e) {
// e.printStackTrace();
throw e;
}
}
parseIndexes(dbSession);
}
// Method that allows to recover the keys of a table.
private void parseIndexes(SessionFactory dbSession) throws Exception {
// Query that retrieves keys
String queryForIndexes = getQueryForIndexes(dbSession);
String queryStructure = String.format(queryForIndexes, tableName);
List<Object> resultSet = DatabaseFactory.executeSQLQuery(
queryStructure, dbSession);
AnalysisLogger.getLogger().debug(
"PostgresTableStructure->Building Structure with query adding keys: "
+ queryStructure);
if (resultSet != null) {
int resultsNumber = resultSet.size();
for (int i = 0; i < resultsNumber; i++) {
Object result = resultSet.get(i);
Object[] resultArray = (Object[]) result;
String columnKey = (String) resultArray[1];
if (columnKey.equals("PRIMARY KEY"))
ColumnKeys.add((String) resultArray[3]);
else if (columnKey.equals("UNIQUE KEY"))
UniqueKeys.add((String) resultArray[3]);
else if (columnKey.equals("FOREIGN KEY"))
UniqueKeys.add((String) resultArray[3]);
}
}
}
// Method that returns the query to build the table's structure.
@Override
protected String getQueryForTableStructure(SessionFactory dbSession)
throws Exception {
// TODO Auto-generated method stub
// String queryForStructure =
// "SELECT table_schema,table_name,column_name,column_default,is_nullable,data_type,character_maximum_length,character_set_name FROM information_schema.COLUMNS WHERE table_name ='%1$s' and table_schema='%2$s';";
String queryForStructure = "SELECT column_name,column_default,is_nullable,data_type,character_maximum_length,udt_name FROM information_schema.COLUMNS WHERE table_name ='%1$s' and table_schema='%2$s';";
return queryForStructure;
}
// Method that returns the query to show the create statement
@Override
protected String getQueryForIndexes(SessionFactory dbSession)
throws Exception {
// TODO Auto-generated method stub
String queryForIndexes = "SELECT tc.constraint_name,"
+ "tc.constraint_type,tc.table_name,kcu.column_name,tc.is_deferrable,tc.initially_deferred,rc.match_option AS match_type,rc.update_rule AS on_update,"
+ "rc.delete_rule AS on_delete,ccu.table_name AS references_table,ccu.column_name AS references_field FROM information_schema.table_constraints tc "
+ "LEFT JOIN information_schema.key_column_usage kcu ON tc.constraint_catalog = kcu.constraint_catalog AND tc.constraint_schema = kcu.constraint_schema AND tc.constraint_name = kcu.constraint_name "
+ "LEFT JOIN information_schema.referential_constraints rc ON tc.constraint_catalog = rc.constraint_catalog AND tc.constraint_schema = rc.constraint_schema AND tc.constraint_name = rc.constraint_name "
+ "LEFT JOIN information_schema.constraint_column_usage ccu ON rc.unique_constraint_catalog = ccu.constraint_catalog AND rc.unique_constraint_schema = ccu.constraint_schema AND rc.unique_constraint_name = ccu.constraint_name "
+ "where tc.table_name='%1$s' and tc.constraint_type<>'CHECK'";
return queryForIndexes;
}
}

View File

@ -0,0 +1,62 @@
package org.gcube.dataanalysis.databases.utils;
import java.util.LinkedHashMap;
//import java.util.List;
import java.util.Map;
//import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
//import org.hibernate.Query;
//import org.hibernate.Session;
//import org.hibernate.SessionFactory;
import org.hibernate.transform.BasicTransformerAdapter;
/**
* Class that allows to recover data from database through the class
* BasicTransformerAdapter of Hibernate. It allows to retrieve columns names and
* values.
*/
public class AliasToEntityOrderedMapResultTransformer extends
BasicTransformerAdapter {
public static final AliasToEntityOrderedMapResultTransformer INSTANCE = new AliasToEntityOrderedMapResultTransformer();
/**
* Disallow instantiation of AliasToEntityOrderedMapResultTransformer .
*/
private AliasToEntityOrderedMapResultTransformer() {
super();
}
/**
* {@inheritDoc}
*/
public Object transformTuple(Object[] tuple, String[] aliases) {
// linkedhashmap to get table column name in order
Map result = new LinkedHashMap(tuple.length);
for (int i = 0; i < tuple.length; i++) {
String alias = aliases[i];
if (alias != null) {
result.put(alias, tuple[i]);
}
}
return result;
}
/**
* {@inheritDoc}
*/
public boolean isTransformedValueATupleElement(String[] aliases,
int tupleLength) {
return false;
}
/**
* Serialization hook for ensuring singleton uniqueing.
*
* @return The singleton instance : {@link #INSTANCE}
*/
private Object readResolve() {
return INSTANCE;
}
}

View File

@ -0,0 +1,250 @@
package org.gcube.dataanalysis.databases.utils;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.contentmanagement.lexicalmatcher.utils.FileTools;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.utils.DatabaseUtils;
import org.hibernate.Query;
import org.hibernate.Session;
import org.hibernate.SessionFactory;
import org.hibernate.cfg.Configuration;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.net.MalformedURLException;
import java.util.List;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import javax.xml.parsers.DocumentBuilderFactory;
/**
* Class that allows to manage a database selected from a user. It performs to
* set the database configuration, to connect to the database and finally to
* execute a query.
*/
public class ConnectionManager {
// private org.gcube.dataanalysis.ecoengine.utils.DatabaseFactory df;
// Constructor
public ConnectionManager() {
// org.gcube.dataanalysis.ecoengine.utils.DatabaseFactory df = new
// org.gcube.dataanalysis.ecoengine.utils.DatabaseFactory();
// df = new org.gcube.dataanalysis.ecoengine.utils.DatabaseFactory();
}
// create the database's connection without using the configuration file but
// using the data input
public SessionFactory initDBConnection(AlgorithmConfiguration config) {
SessionFactory dbconnection = DatabaseUtils.initDBSession(config);
return dbconnection;
}
// create the database's connection using the configuration file
public SessionFactory initDBConnection(String configurationFile)
throws Exception {
String xml = FileTools.readXMLDoc(configurationFile);
SessionFactory DBSessionFactory = null;
Configuration cfg = new Configuration();
cfg = cfg.configure(DocumentBuilderFactory.newInstance()
.newDocumentBuilder()
.parse(new ByteArrayInputStream(xml.getBytes())));
DBSessionFactory = cfg.buildSessionFactory();
return DBSessionFactory;
}
/** Method that allows to set the configuration */
public AlgorithmConfiguration setconfiguration(String ConfigPath,
String DatabaseUserName, String DatabasePassword,
String DatabaseDriver, String DatabaseDialect, String DatabaseURL,
String DatabaseName) throws IOException {
AlgorithmConfiguration config = new AlgorithmConfiguration();
if (DatabaseName.equals("")) {
throw new MalformedURLException(
"Invalid Url: the database's name is not present");
// return null;
}
if (!ConfigPath.equals(""))
config.setConfigPath(ConfigPath);
if (!DatabaseUserName.equals("")) {
config.setParam("DatabaseUserName", DatabaseUserName);
}
if (!DatabasePassword.equals(""))
config.setParam("DatabasePassword", DatabasePassword);
if (!DatabaseDriver.equals(""))
config.setParam("DatabaseDriver", DatabaseDriver);
if (!DatabaseDialect.equals(""))
config.setParam("DatabaseDialect", DatabaseDialect);
if (!DatabaseURL.equals(""))
config.setParam("DatabaseURL", DatabaseURL);
return config;
}
/** Method that execute a query */
public List<Object> executeQuery(String query,
SessionFactory DBSessionFactory) throws Exception {
List<Object> obj = null;
Session ss = null;
try {
ss = DBSessionFactory.getCurrentSession();
ss.beginTransaction();
Query qr = null;
// statement to check if the query is a "show create table"
String keyword = "show create table";
String regex = ".*\\b" + keyword.replaceAll(" +", "[ ]\\+")
+ "\\b.*";
if ((!(query.toLowerCase().contains("explain")))
&& (!(query.toLowerCase().matches(regex)))) { // it does not
// work if the
// query
// performs an
// explain
// operation
// Wrapper for a query. It allows the query to operate in a
// proper
// way
// query check in order to remove the character ";" if the query
// contains it
query = query.trim();
if (query.endsWith(";")) {
int endIndex = query.indexOf(";");
query = query.substring(0, endIndex);
}
query = "select * from (" + query + ") as query";
}
AnalysisLogger.getLogger().debug(
"In ConnectionManager-> executing query: " + query);
qr = ss.createSQLQuery(query);
qr.setResultTransformer(AliasToEntityOrderedMapResultTransformer.INSTANCE);
// @SuppressWarnings("unchecked")
List<Object> result = qr.list();
AnalysisLogger.getLogger().debug(
"In ConnectionManager-> result's size: " + result.size());
ss.getTransaction().commit();
/*
* if (result == null) System.out.println(
* "Hibernate doesn't return a valid object when org.gcube.contentmanagement.lexicalmatcher retrieve UserState Object"
* );
*
* if (result != null && result.size() == 0)
* System.out.println(String.format("found nothing in database"));
*/
if (result != null && result.size() != 0) {
obj = result;
} else {
AnalysisLogger.getLogger().debug(
"ConnectionManager->Error: Result not available");
throw new Exception("Result not available");
}
} catch (Exception e) {
// e.printStackTrace();
// System.out.println(String.format("Error while executing query: %1$s %2$s",
// query, e.getMessage()));
// e.printStackTrace();
// System.out.println(String.format("Error while executing query: %1$s %2$s",
// query, e.getMessage()));
if (e.getClass().toString()
.contains("org.hibernate.MappingException")) {
// e.printStackTrace();
// System.out.println(e.getClass().toString());
AnalysisLogger
.getLogger()
.debug("In ConnectionManager-> ERROR The query could not be executed: Error in retrieving a user defined type. Try to use a store procedure to convert the type");
throw new Exception(
"The query could not be executed: Error in retrieving a user defined type. Try to use a store procedure to convert the type");
}
if (e.getClass().toString()
.contains("org.hibernate.exception.SQLGrammarException")) {
// System.out.println(e.getCause().getMessage());
// AnalysisLogger.getLogger().debug("In ConnectionManager-> ERROR The query could not be executed: SQL grammar error in the query");
// throw new
// Exception("The query could not be executed: SQL grammar error in the query");
AnalysisLogger.getLogger().debug(
"In ConnectionManager-> "
+ e.getCause().getLocalizedMessage());
throw new Exception(e.getCause().getMessage());
}
else {
throw e;
}
// throw e;
}
return obj;
}
// /** Method that creates the connection */
// public SessionFactory createConnection(AlgorithmConfiguration config) {
//
// SessionFactory dbconnection = DatabaseUtils.initDBSession(config);
//
// return dbconnection;
//
// }
}

View File

@ -0,0 +1,189 @@
package org.gcube.dataanalysis.databases.utils;
import java.io.ByteArrayInputStream;
import java.util.List;
import javax.xml.parsers.DocumentBuilderFactory;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.contentmanagement.lexicalmatcher.utils.FileTools;
import org.hibernate.MappingException;
import org.hibernate.Query;
import org.hibernate.Session;
import org.hibernate.SessionFactory;
import org.hibernate.cfg.Configuration;
/** Class that allows to connect to a database and to execute a query */
public class DatabaseFactory {
// Method that establish a connection with the database
public static SessionFactory initDBConnection(String configurationFile)
throws Exception {
String xml = FileTools.readXMLDoc(configurationFile);
SessionFactory DBSessionFactory = null;
Configuration cfg = new Configuration();
cfg = cfg.configure(DocumentBuilderFactory.newInstance()
.newDocumentBuilder()
.parse(new ByteArrayInputStream(xml.getBytes())));
DBSessionFactory = cfg.buildSessionFactory();
return DBSessionFactory;
}
// Method that execute the query
public static List<Object> executeSQLQuery(String query,
SessionFactory DBSessionFactory) throws Exception {
// System.out.println("QUERY: "+query);
try {
return executeHQLQuery(query, DBSessionFactory, true);
} catch (Exception e) {
// TODO: handle exception
throw e;
}
}
public static List<Object> executeHQLQuery(String query,
SessionFactory DBSessionFactory, boolean useSQL) throws Exception,
MappingException {
Session ss = null;
List<Object> obj = null;
try {
ss = DBSessionFactory.getCurrentSession();
ss.beginTransaction();
Query qr = null;
if (useSQL)
qr = ss.createSQLQuery(query);
else
qr = ss.createQuery(query);
List<Object> result = null;
AnalysisLogger.getLogger().debug(
"In DatabaseFactory->" + qr.getQueryString());
try {
result = qr.list();
ss.getTransaction().commit();
if (result == null)
System.out
.println("Hibernate doesn't return a valid object when org.gcube.contentmanagement.lexicalmatcher retrieve UserState Object");
// if (result != null && result.size() == 0)
// System.out.println(String.format("found nothing in database for query: "+query));
if (result != null && result.size() != 0) {
obj = result;
}
rollback(ss);
return obj;
} catch (Exception e) {
if (e.getClass()
.toString()
.contains("org.hibernate.exception.SQLGrammarException")) {
// System.out.println(e.getCause().getMessage());
// AnalysisLogger.getLogger().debug("In ConnectionManager-> ERROR The query could not be executed: SQL grammar error in the query");
// throw new
// Exception("The query could not be executed: SQL grammar error in the query");
AnalysisLogger.getLogger().debug(
"In DatabaseFactory-> "
+ e.getCause().getLocalizedMessage());
throw new Exception(e.getCause().getMessage());
}
// if (e.getClass()
// .toString()
// .contains(
// "com.mysql.jdbc.exceptions.jdbc4.MySQLSyntaxErrorException"))
// {
//
// // System.out.println(e.getClass().toString());
//
// AnalysisLogger.getLogger().debug(
// "In DatabaseFactory-> "
// + e.getCause().getLocalizedMessage());
//
// throw new Exception(e.getCause().getMessage());
// }
else {
throw e;
}
}
} catch (Exception e) {
throw e;
}
}
public static void rollback(Session ss) {
try {
if (ss != null && ss.getTransaction() != null)
ss.getTransaction().rollback();
} catch (Exception ex) {
// throw ex;
} finally {
try {
ss.close();
} catch (Exception ee) {
// throw ee;
}
}
}
// public static void executeSQLUpdate(String query, SessionFactory
// DBSessionFactory) throws Exception {
// executeHQLUpdate(query, DBSessionFactory, true);
// }
// public static void executeHQLUpdate(String query, SessionFactory
// DBSessionFactory, boolean useSQL) throws Exception{
// // System.out.println("executing query: " + query);
// Session ss = null;
//
// try {
//
// ss = DBSessionFactory.getCurrentSession();
// // System.out.println("executing query");
// ss.beginTransaction();
// Query qr = null;
//
// if (useSQL)
// qr = ss.createSQLQuery(query);
// else
// qr = ss.createQuery(query);
//
// qr.executeUpdate();
// ss.getTransaction().commit();
//
// } catch (Exception e) {
// AnalysisLogger.getLogger().debug(query);
// rollback(ss);
// // e.printStackTrace();
// throw e;
// }
// }
}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,247 @@
package org.gcube.dataanalysis.databases.utils;
import java.io.File;
import java.io.FileInputStream;
import java.math.BigInteger;
import java.util.Iterator;
import java.util.List;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import org.dom4j.Document;
import org.dom4j.Node;
import org.dom4j.io.SAXReader;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.hibernate.SessionFactory;
/** Class that allows to performs some operations on a database */
public class DatabaseOperations {
private String DBType = ""; // database's type
// private static final String QueryPostgres =
// "select count(*) from \"%1$s\" limit 1";
private static final String QueryPostgres = "select count(*) from (select * from \"%1$s\" limit 1) as a";
private static final String QueryMysql = "select count(*) from (select * from `%1$s` limit 1) as a";
// private static final String QueryMysql =
// "select count(*) from `%1$s` limit 1";
// private static final String Query = "select * from %1$s limit 1";
// private static final String countQuery = "select count(*) from %1$s";
// private static final String explainQuery = "explain select * from %1$s";
private static final String explainQueryPostgres = "explain select * from \"%1$s\"";
private static final String explainQueryMysql = "explain select * from `%1$s`";
private static final String MYSQL = "MySQL";
private static final String POSTGRES = "Postgres";
// Method that recover the schema's name of the database.
public String getDBSchema(String configurationFile) throws Exception {
File fl = new File(configurationFile);
FileInputStream stream = new FileInputStream(fl);
SAXReader saxReader = new SAXReader();
Document document = saxReader.read(stream);
List<Node> nodes = document
.selectNodes("//hibernate-configuration/session-factory/property");
Iterator<Node> nodesIterator = nodes.iterator();
String dbschema = "";
while (nodesIterator.hasNext()) {
Node currentnode = nodesIterator.next();
String element = currentnode.valueOf("@name");
if (element.equals("connection.url")) {
String url = currentnode.getText();
dbschema = url.substring(url.lastIndexOf("/") + 1);
if (dbschema.indexOf('?') > 0)
dbschema = dbschema.substring(0, dbschema.indexOf('?'));
AnalysisLogger.getLogger().debug(
"DatabaseOperations->recovering the database's name: "
+ dbschema);
// DBType="MySQL";
// break;
}
if (element.equals("connection.schemaname")) {
String url = currentnode.getText();
dbschema = url.substring(url.lastIndexOf("/") + 1);
if (dbschema.indexOf('?') > 0)
dbschema = dbschema.substring(0, dbschema.indexOf('?'));
AnalysisLogger.getLogger().debug(
"DatabaseOperations->recovering the schema's name: "
+ dbschema);
DBType = POSTGRES;
// break;
}
if (DBType.equals("")) {
DBType = MYSQL;
}
}
// close stream
stream.close();
return dbschema;
}
// Method that returns the database's type
public String getDBType() {
return DBType;
}
// Method that calculate the estimated number of rows
public long calculateElements(ConnectionManager connection,
String dbType, String tablename, SessionFactory session)
throws Exception {
long count = 0;
String countingQuery = null;
if (dbType.equals(POSTGRES)) {
countingQuery = String.format(QueryPostgres, tablename);
}
if (dbType.equals(MYSQL)) {
countingQuery = String.format(QueryMysql, tablename);
}
AnalysisLogger.getLogger().debug(
"DatabaseOperations->calculating rows' number with the query: "
+ countingQuery);
List<Object> result;
// try {
// result = DatabaseFactory.executeSQLQuery(countingQuery, session);
result = connection.executeQuery(countingQuery, session);
// if ((result != null) && (result.size() > 0)) {
if (result != null) {
Object element = result.get(0);
ArrayList<Object> listvalues = new ArrayList<Object>(((LinkedHashMap<String, Object>) element).values());
// System.out.println("Dimension: " + result.size());
// Integer numElem = Integer.valueOf(result.get(0).toString());
// Long numElemvalue = Long.valueOf(result.get(0).toString());
Long numElemvalue = Long.valueOf(listvalues.get(0).toString());
long numElem = numElemvalue.longValue();
// if (numElem.intValue() == 0){ throw new
// Exception("The table has not rows");}
if (numElem > 0) {
AnalysisLogger
.getLogger()
.debug("DatabaseOperations->the database has at least a row.Calculating rows' number through an estimation");
String explain = null;
if (dbType.equals(POSTGRES)) {
explain = String.format(explainQueryPostgres, tablename);
}
if (dbType.equals(MYSQL)) {
explain = String.format(explainQueryMysql, tablename);
}
// call query with explain function
AnalysisLogger.getLogger().debug(
"DatabaseOperations->calculating rows' number with the query: "
+ explain);
List<Object> resultinfo;
// resultinfo = DatabaseFactory.executeSQLQuery(explain,
// session);
resultinfo = connection.executeQuery(explain, session);
// recovery result
if (dbType.equals(MYSQL)) {
// Object[] resultArray = (Object[]) (resultinfo.get(0));
Object elem = resultinfo.get(0);
ArrayList<Object> values = new ArrayList<Object>(
((LinkedHashMap<String, Object>) elem).values());
// //print check
// AnalysisLogger.getLogger().debug(
// "DatabaseOperations->VALUE: " + values);
BigInteger value = (BigInteger) values.get(8);
// BigInteger value = (BigInteger) resultArray[8];
count = value.longValue();
}
if (dbType.equals(POSTGRES)) {
String var = resultinfo.get(0).toString();
int beginindex = var.indexOf("rows");
int lastindex = var.indexOf("width");
var = var.substring(beginindex + 5, lastindex - 1);
Long value = Long.valueOf(var);
count = value.longValue();
}
}
}
// } catch (Exception e) {
// TODO Auto-generated catch block
// e.printStackTrace();
// }
AnalysisLogger.getLogger().debug(
"DatabaseOperations->rows' number calculated: " + count);
return count;
}
}

View File

@ -0,0 +1,143 @@
//package org.gcube.dataanalysis.databases.utilsold;
//
////import java.awt.List;
//
//import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
//import org.gcube.dataanalysis.ecoengine.utils.DatabaseUtils;
//import org.hibernate.Query;
//import org.hibernate.Session;
//import org.hibernate.SessionFactory;
//
//import java.io.IOException;
//import java.net.MalformedURLException;
//import java.util.List;
//
///**
// * Class that allows to manage a database selected from a user. It performs to
// * set the database configuration, to connect to the database and finally to
// * execute a query.
// */
//public class ConnectionManager {
//
// private org.gcube.dataanalysis.ecoengine.utils.DatabaseFactory df;
//
//
// public ConnectionManager() {
//
//// org.gcube.dataanalysis.ecoengine.utils.DatabaseFactory df = new org.gcube.dataanalysis.ecoengine.utils.DatabaseFactory();
// df = new org.gcube.dataanalysis.ecoengine.utils.DatabaseFactory();
//
//
// }
//
// /** Method that allows to set the configuration */
// public AlgorithmConfiguration setconfiguration(String ConfigPath,
// String DatabaseUserName, String DatabasePassword,
// String DatabaseDriver, String DatabaseDialect, String DatabaseURL,
// String DatabaseName) throws IOException {
//
// AlgorithmConfiguration config = new AlgorithmConfiguration();
//
// if (DatabaseName.equals("")) {
//
// throw new MalformedURLException(
// "Invalid Url: the database's name is not present");
// // return null;
// }
//
// if (!ConfigPath.equals(""))
// config.setConfigPath(ConfigPath);
//
// if (!DatabaseUserName.equals("")) {
// config.setParam("DatabaseUserName", DatabaseUserName);
// }
//
// if (!DatabasePassword.equals(""))
// config.setParam("DatabasePassword", DatabasePassword);
//
// if (!DatabaseDriver.equals(""))
// config.setParam("DatabaseDriver", DatabaseDriver);
//
// if (!DatabaseDialect.equals(""))
// config.setParam("DatabaseDialect", DatabaseDialect);
//
// if (!DatabaseURL.equals(""))
// config.setParam("DatabaseURL", DatabaseURL);
//
// return config;
//
// }
//
// /** Method that creates the connection */
// public SessionFactory createConnection(AlgorithmConfiguration config) {
//
// SessionFactory dbconnection = DatabaseUtils.initDBSession(config);
//
// return dbconnection;
//
// }
//
//
//
// // public List <Object> executeQuery(String query, SessionFactory
// // DBSessionFactory){
// //
// // List <Object> obj = null;
// //
// //
// //
// // return obj;
// // }
// //
//
// /** Method that execute a query */
// public List<Object> executeQuery(String query,
// SessionFactory DBSessionFactory) throws Exception {
//
// List<Object> obj = null;
// Session ss = null;
//
// try {
// ss = DBSessionFactory.getCurrentSession();
//
// ss.beginTransaction();
//
// Query qr = null;
//
// // Wrapper for a query. It allows the query to operate in a proper
// // way
// query = "select * from (" + query + ") as query";
//
// qr = ss.createSQLQuery(query);
//
// List<Object> result = qr.list();
//
// ss.getTransaction().commit();
//
// /*
// * if (result == null) System.out.println(
// * "Hibernate doesn't return a valid object when org.gcube.contentmanagement.lexicalmatcher retrieve UserState Object"
// * );
// *
// * if (result != null && result.size() == 0)
// * System.out.println(String.format("found nothing in database"));
// */
// if (result != null && result.size() != 0) {
// obj = result;
// }
//
// } catch (Exception e) {
//
// // System.out.println(String.format("Error while executing query: %1$s %2$s",
// // query, e.getMessage()));
// // e.printStackTrace();
// // System.out.println(String.format("Error while executing query: %1$s %2$s",
// // query, e.getMessage()));
// throw e;
// }
//
// return obj;
//
// }
//
//}

View File

@ -0,0 +1,160 @@
//package org.gcube.dataanalysis.databases.utilsold;
//
//import java.io.ByteArrayInputStream;
//import java.util.List;
//
//import javax.xml.parsers.DocumentBuilderFactory;
//
//import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
//import org.gcube.contentmanagement.lexicalmatcher.utils.FileTools;
//import org.hibernate.Query;
//import org.hibernate.Session;
//import org.hibernate.SessionFactory;
//import org.hibernate.cfg.Configuration;
//
//
///** Class that allows to connect to a database and to execute a query */
//public class DatabaseFactory {
//
//
// //Method that establish a connection with the database
// public static SessionFactory initDBConnection(String configurationFile) throws Exception {
// String xml = FileTools.readXMLDoc(configurationFile);
// SessionFactory DBSessionFactory = null;
// Configuration cfg = new Configuration();
// cfg = cfg.configure(DocumentBuilderFactory.newInstance().newDocumentBuilder().parse(new ByteArrayInputStream(xml.getBytes())));
// DBSessionFactory = cfg.buildSessionFactory();
// return DBSessionFactory;
// }
//
//
//
// //Method that execute the query
// public static List<Object> executeSQLQuery(String query, SessionFactory DBSessionFactory) throws Exception {
//// System.out.println("QUERY: "+query);
// try {
// return executeHQLQuery(query, DBSessionFactory, true);
//
// } catch (Exception e) {
// // TODO: handle exception
// throw e;
// }
//
// }
//
// public static List<Object> executeHQLQuery(String query, SessionFactory DBSessionFactory, boolean useSQL) throws Exception{
// Session ss = null;
// List<Object> obj = null;
//
//
// try {
//
// ss = DBSessionFactory.getCurrentSession();
//
// ss.beginTransaction();
//
// Query qr = null;
//
// if (useSQL)
// qr = ss.createSQLQuery(query);
// else
// qr = ss.createQuery(query);
//
// List<Object> result = null;
//
// AnalysisLogger.getLogger().debug("DatabaseFactory->"+qr.getQueryString());
// try {
// result = qr.list();
// ss.getTransaction().commit();
//
// if (result == null)
// System.out.println("Hibernate doesn't return a valid object when org.gcube.contentmanagement.lexicalmatcher retrieve UserState Object");
//
//// if (result != null && result.size() == 0)
//// System.out.println(String.format("found nothing in database for query: "+query));
//
// if (result != null && result.size() != 0) {
// obj = result;
// }
//
// rollback(ss);
//
// return obj;
//
// } catch (Exception e) {
// // TODO: handle exception
// throw e;
// }
//
//
//
//
//
//
//
//
// } catch (Exception e) {
// // TODO: handle exception
//
// throw e;
// }
//
//
//
//
//
//
// }
//
//
// public static void rollback(Session ss) {
//
// try {
// if (ss != null && ss.getTransaction() != null)
// ss.getTransaction().rollback();
// } catch (Exception ex) {
//
// } finally {
// try {
// ss.close();
// } catch (Exception ee) {
// }
// }
// }
//
//
//
//// public static void executeSQLUpdate(String query, SessionFactory DBSessionFactory) throws Exception {
//// executeHQLUpdate(query, DBSessionFactory, true);
//// }
//
//
//// public static void executeHQLUpdate(String query, SessionFactory DBSessionFactory, boolean useSQL) throws Exception{
////// System.out.println("executing query: " + query);
//// Session ss = null;
////
//// try {
////
//// ss = DBSessionFactory.getCurrentSession();
////// System.out.println("executing query");
//// ss.beginTransaction();
//// Query qr = null;
////
//// if (useSQL)
//// qr = ss.createSQLQuery(query);
//// else
//// qr = ss.createQuery(query);
////
//// qr.executeUpdate();
//// ss.getTransaction().commit();
////
//// } catch (Exception e) {
//// AnalysisLogger.getLogger().debug(query);
//// rollback(ss);
////// e.printStackTrace();
//// throw e;
//// }
//// }
//
//
//}

View File

@ -0,0 +1,207 @@
//package org.gcube.dataanalysis.databases.utilsold;
//
//import java.math.BigInteger;
//import java.util.ArrayList;
//import java.util.List;
//
//import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
////import org.gcube.databasemanagement.DBAdapter;
//import org.gcube.dataanalysis.databases.structure.MySQLTableStructure;
//import org.gcube.dataanalysis.databases.structure.AbstractTableStructure;
//import org.gcube.dataanalysis.databases.structure.PostgresTableStructure;
////import org.gcube.contentmanagement.databases.structure.MySQLTableStructure;
//import org.hibernate.SessionFactory;
//
//
///** Class that allows manage a database offering several functionalities */
//public class DatabaseManagement {
//
// // AbstractTableStructure crossTableStructure;
// private List<String> tablesname = new ArrayList<String>();
// private String configPath;
// private String sourceSchemaName;
// private SessionFactory sourceDBSession;
// private String DBType;
// private AbstractTableStructure crossTableStructure;
// // private DBAdapter typesMap;
// private DatabaseOperations op = new DatabaseOperations();
//// private String destinationDBType;
//// private String sourceDBType;
// MySQLTableStructure mysqlobj;
//
// private static final String MYSQL = "MySQL";
// private static final String POSTGRES = "Postgres";
// private static final String selectTablesQuery = "SELECT distinct table_name FROM information_schema.COLUMNS where table_schema='%1$s';";
// private static final String listSchemaNameQuery="select schema_name from information_schema.schemata where schema_name <> 'information_schema' and schema_name !~ E'^pg_'";
//
// public DatabaseManagement(String cfgDir, String SourceFile)
// throws Exception {
//
// configPath = cfgDir;
// if (!configPath.endsWith("/"))
// configPath += "/";
//
// sourceSchemaName = op.getDBSchema(configPath + SourceFile);
//
// sourceDBSession = DatabaseFactory.initDBConnection(configPath
// + SourceFile);
//
//
//// destinationDBType = POSTGRES;
//// sourceDBType = MYSQL;
////
//// // typesMap = new DBAdapter(configPath + "/" + sourceDBType + "2"
//// // + destinationDBType + ".properties");
//
// }
//
// // Get the table's names
// public List<String> getTables() throws Exception {
//
// String query = String.format(selectTablesQuery, sourceSchemaName);
//
// List<Object> resultSet = DatabaseFactory.executeSQLQuery(query,
// sourceDBSession);
//
// for (Object result : resultSet) {
// tablesname.add((String) result);
// }
//
// // Get the Database's type
// DBType = op.getDBType();
//
// return tablesname;
//
// }
//
//
// //Get the schema's name for the database Postgres
// public List<String> getSchemas() throws Exception{
//
// // Get the Database's type
// DBType = op.getDBType();
//
// List<String> list= new ArrayList<String>();
//
//
//
// if (DBType.equals(POSTGRES)) {
//
//
// List<Object> resultSet = DatabaseFactory.executeSQLQuery(listSchemaNameQuery,
// sourceDBSession);
//
// for (Object result : resultSet) {
// list.add((String) result);
// }
// }
//
// if (DBType.equals(MYSQL)){
//
// list=null;
//
//
// }
//
//
//
// return list;
//
// }
//
// // Get the "Create Table" statement
// public String getCreateTable(String tablename) throws Exception {
//
// String createstatement = "";
//
// if (DBType.equals(POSTGRES)) {
//
// // for (String table : tablesname) {
//
// crossTableStructure = getSourceTableObject(tablename);
//
// String tableBuildQuery = crossTableStructure.buildUpCreateTable();
//
// AnalysisLogger.getLogger().debug(
// "DatabaseManagement->'Create Table' statement: "
// + tableBuildQuery);
//
// // }
//
// }
//
// if (DBType.equals(MYSQL)) {
//
// // for (String table : tablesname) {
//
// crossTableStructure = getSourceTableObject(tablename);
//
// try {
//
// String createtablestatement = mysqlobj
// .showCreateTable(sourceDBSession);
//
// AnalysisLogger.getLogger().debug(
// "DatabaseManagement->'Create Table' statement: "
// + createtablestatement);
//
// } catch (Exception e) {
// // TODO: handle exception
//
// AnalysisLogger.getLogger().debug(
// "DatabaseManagement->Exception: " + e.getMessage());
// }
//
// // }
//
// }
//
// return createstatement;
//
// }
//
// // Method that create the database object
// private AbstractTableStructure getSourceTableObject(String tablename)
// throws Exception {
//
// if (DBType.equals(MYSQL)) {
//
// mysqlobj = new MySQLTableStructure(sourceSchemaName, tablename,
// sourceDBSession);
//
// // mysqlobj = new MySQLTableStructure(sourceSchemaName, tablename,
// // typesMap, sourceDBSession);
//
// // return new MySQLTableStructure(sourceSchemaName, tablename,
// // typesMap, sourceDBSession);
// return mysqlobj;
//
// }
//
// else if (DBType.equals(POSTGRES)) {
//
// PostgresTableStructure postobj = new PostgresTableStructure(
// sourceSchemaName, tablename, sourceDBSession);
//
// // PostgresTableStructure postobj = new PostgresTableStructure(
// // sourceSchemaName, tablename, typesMap, sourceDBSession);
//
// return postobj;
//
// } else {
// return null;
// }
//
// }
//
// // Method that returns the estimated number of rows
// public BigInteger getNumberOfRows(String tablename) throws Exception {
//
// BigInteger rows;
//
// rows = op.calculateElements(tablename, sourceDBSession);
// return rows;
//
// }
//
//}

View File

@ -0,0 +1,156 @@
//package org.gcube.dataanalysis.databases.utilsold;
//
//import java.io.File;
//import java.io.FileInputStream;
//import java.math.BigInteger;
//import java.util.Iterator;
//import java.util.List;
//
//import org.dom4j.Document;
//import org.dom4j.Node;
//import org.dom4j.io.SAXReader;
//import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
//import org.hibernate.SessionFactory;
//
///** Class that allows to performs some operations on a database */
//public class DatabaseOperations {
//
// private String DBType = ""; //database's type
//
// private static final String Query = "select * from %1$s limit 1";
// // private static final String countQuery = "select count(*) from %1$s";
// private static final String explainQuery = "explain select * from %1$s";
//
// private static final String MYSQL = "MySQL";
// private static final String POSTGRES = "Postgres";
//
//
// //Method that recover the schema's name of the database.
// public String getDBSchema(String configurationFile) throws Exception {
//
// File fl = new File(configurationFile);
// FileInputStream stream = new FileInputStream(fl);
//
// SAXReader saxReader = new SAXReader();
// Document document = saxReader.read(stream);
//
// List<Node> nodes = document
// .selectNodes("//hibernate-configuration/session-factory/property");
//
// Iterator<Node> nodesIterator = nodes.iterator();
//
// String dbschema = "";
// while (nodesIterator.hasNext()) {
//
// Node currentnode = nodesIterator.next();
// String element = currentnode.valueOf("@name");
// if (element.equals("connection.url")) {
// String url = currentnode.getText();
// dbschema = url.substring(url.lastIndexOf("/") + 1);
// if (dbschema.indexOf('?') > 0)
// dbschema = dbschema.substring(0, dbschema.indexOf('?'));
// AnalysisLogger.getLogger().debug(
// "DatabaseOperations-> recovering the database's name: " + dbschema);
//
//
// // DBType="MySQL";
//
// // break;
// }
//
// if (element.equals("connection.schemaname")) {
// String url = currentnode.getText();
// dbschema = url.substring(url.lastIndexOf("/") + 1);
// if (dbschema.indexOf('?') > 0)
// dbschema = dbschema.substring(0, dbschema.indexOf('?'));
// AnalysisLogger.getLogger().debug(
// "DatabaseOperations-> recovering the schema's name: " + dbschema);
// DBType = POSTGRES;
// // break;
//
// }
//
// if (DBType.equals("")) {
//
// DBType = MYSQL;
//
// }
//
// }
//
// // close stream
// stream.close();
//
// return dbschema;
// }
//
// //Method that returns the database's type
// public String getDBType() {
//
// return DBType;
//
// }
//
// //Method that calculate the estimated number of rows
// public BigInteger calculateElements(String tablename, SessionFactory session) throws Exception{
//
// BigInteger count = BigInteger.ZERO;
//
// String countingQuery = String.format(Query, tablename);
//
// AnalysisLogger.getLogger().debug(
// "DatabaseOperations-> calculating rows' number with the query: " + countingQuery);
//
// List<Object> result;
//
//// try {
// result = DatabaseFactory.executeSQLQuery(countingQuery, session);
//
// if ((result != null) && (result.size() > 0)) {
//
// // call query with explain function
//
// String explain = String.format(explainQuery, tablename);
// AnalysisLogger.getLogger().debug(
// "DatabaseOperations-> calculating rows' number with the query: " + explain);
//
// List<Object> resultinfo;
//
// resultinfo = DatabaseFactory.executeSQLQuery(explain, session);
//
// // recovery result
//
// if (DBType.equals(MYSQL)) {
//
// Object[] resultArray = (Object[]) (resultinfo.get(0));
//
// count = (BigInteger) resultArray[8];
//
// }
//
// if (DBType.equals(POSTGRES)) {
//
// String var = resultinfo.get(0).toString();
//
// int beginindex = var.indexOf("rows");
//
// int lastindex = var.indexOf("width");
//
// var = var.substring(beginindex + 5, lastindex - 1);
//
// count = new BigInteger(var);
//
// }
//
// }
//
//// } catch (Exception e) {
//// TODO Auto-generated catch block
//// e.printStackTrace();
//// }
//
// return count;
//
// }
//
//}

View File

@ -0,0 +1 @@
< ャ@Qaj、F÷gク<67>

View File

@ -0,0 +1,36 @@
package org.gcube.dataanalysis.test;
import org.gcube.common.encryption.StringEncrypter;
import org.gcube.common.scope.api.ScopeProvider;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.interfaces.StandardLocalExternalAlgorithm;
public class Prova {
/**
* @param args
*/
public static void main(String[] args) {
// TODO Auto-generated method stub
// AnalysisLogger.setLogger("./cfg/"+AlgorithmConfiguration.defaultLoggerFile);
// AnalysisLogger.getLogger().debug("ciao");
ScopeProvider.instance.set("/gcube/devsec");
try {
String password = StringEncrypter.getEncrypter().decrypt("UwNMZOK7FlIjGPR+NZCV6w==");
System.out.println(password);
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
StandardLocalExternalAlgorithm sle = null;
// System.out.println("CIAO");
}
}

View File

@ -0,0 +1,163 @@
//package org.gcube.dataanalysis.test;
//
//import java.io.IOException;
//
//import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
//import org.gcube.dataanalysis.databases.utils.DatabaseManagement;
//
//public class RegressionRandomSampleOnTable {
//
// /**
// * @param args
// */
// public static void main(String[] args) {
// // TODO Auto-generated method stub
// AnalysisLogger.getLogger().debug("Executing: " + "Postgres");
// testPostgres();
//
// AnalysisLogger.getLogger().debug("Executing: " + "Mysql1");
//// testMysql1();
//
// AnalysisLogger.getLogger().debug("Executing: " + "Mysql2");
//// testMysql2();
//
// AnalysisLogger.getLogger().debug("Executing: " + "Mysql3");
//// testMysql3();
// }
//
// // Postgres database
// private static void testPostgres() {
//
// // connection to database
// DatabaseManagement mgt = new DatabaseManagement("");
//
// try {
// mgt.createConnection(
// "postgres",
// "d4science2",
// "org.postgresql.Driver",
// "org.hibernate.dialect.PostgreSQLDialect",
// "jdbc:postgresql://geoserver-dev.d4science-ii.research-infrastructures.eu:5432/aquamapsdb",
// "aquamapsdb");
// } catch (IOException e) {
// // TODO Auto-generated catch block
// e.printStackTrace();
// }
//
// // SmartSampleOnTable operation
//
// try {
// // for database postgres, if a table is not in lower case format, it
// // is necessary to include the table name in quotes ""
// mgt.randomSampleOnTable("Divisions", "public");
// } catch (Exception e) {
// // TODO Auto-generated catch block
// e.printStackTrace();
//
// AnalysisLogger.getLogger().debug(
// "In RegressionRandomSampleOnTable->EXCEPTION: " + e);
// }
//
// }
//
// // Mysql database
// private static void testMysql1() {
//
// // connection to database
// DatabaseManagement mgt = new DatabaseManagement("");
//
// try {
// mgt.createConnection("root", "test", "com.mysql.jdbc.Driver",
// "org.hibernate.dialect.MySQLDialect",
// // "jdbc:mysql://146.48.87.169:3306/col2oct2010",
// "jdbc:mysql://146.48.87.169:3306/aquamaps", "hcaf_d");
// } catch (IOException e) {
// // TODO Auto-generated catch block
// e.printStackTrace();
// }
//
// // SmartSampleOnTable operation
//
// try {
// // for database postgres, if a table is not in lower case format, it
// // is necessary to include the table name in quotes ""
// mgt.randomSampleOnTable("hcaf_d", "aquamaps");
// } catch (Exception e) {
// // TODO Auto-generated catch block
// e.printStackTrace();
//
// AnalysisLogger.getLogger().debug(
// "In RegressionRandomSampleOnTable->EXCEPTION: " + e);
// }
//
// }
//
// // Mysql database
// private static void testMysql2() {
//
// // connection to database
// DatabaseManagement mgt = new DatabaseManagement("");
//
// try {
// mgt.createConnection("root", "test", "com.mysql.jdbc.Driver",
// "org.hibernate.dialect.MySQLDialect",
// "jdbc:mysql://146.48.87.169:3306/col2oct2010",
//// "jdbc:mysql://146.48.87.169:3306/aquamaps",
// "example");
// } catch (IOException e) {
// // TODO Auto-generated catch block
// e.printStackTrace();
// }
//
// // SmartSampleOnTable operation
//
// try {
// // for database postgres, if a table is not in lower case format, it
// // is necessary to include the table name in quotes ""
// mgt.randomSampleOnTable("example", "col2oct2010");
// } catch (Exception e) {
// // TODO Auto-generated catch block
// e.printStackTrace();
//
// AnalysisLogger.getLogger().debug(
// "In RegressionRandomSampleOnTable->EXCEPTION: " + e);
// }
//
//
// }
//
// //Mysql database
// private static void testMysql3() {
//
// // connection to database
// DatabaseManagement mgt = new DatabaseManagement("");
//
// try {
// mgt.createConnection("root", "test", "com.mysql.jdbc.Driver",
// "org.hibernate.dialect.MySQLDialect",
// "jdbc:mysql://146.48.87.169:3306/col2oct2010",
//// "jdbc:mysql://146.48.87.169:3306/aquamaps",
// "common_names");
// } catch (IOException e) {
// // TODO Auto-generated catch block
// e.printStackTrace();
// }
//
// // SmartSampleOnTable operation
//
// try {
// // for database postgres, if a table is not in lower case format, it
// // is necessary to include the table name in quotes ""
// mgt.randomSampleOnTable("common_names", "col2oct2010");
// } catch (Exception e) {
// // TODO Auto-generated catch block
// e.printStackTrace();
//
// AnalysisLogger.getLogger().debug(
// "In RegressionRandomSampleOnTable->EXCEPTION: " + e);
// }
//
//
// }
//
//}

View File

@ -0,0 +1,91 @@
//package org.gcube.dataanalysis.test;
//
//import java.io.IOException;
//
//import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
//import org.gcube.dataanalysis.databases.utils.DatabaseManagement;
//import org.junit.rules.TestName;
//
//public class RegressionSmartSampleOnTable {
//
// // String [] testName = {"Postgres", "Mysql"};
//
// public static void main(String[] args) {
// // TODO Auto-generated method stub
//
// AnalysisLogger.getLogger().debug("Executing: " + "Postgres");
// testPostgres();
//
// AnalysisLogger.getLogger().debug("Executing: " + "Mysql");
//// testMysql();
//
// }
//
// // Postgres database
// private static void testPostgres() {
//
// // connection to database
// DatabaseManagement mgt = new DatabaseManagement("");
//
// try {
// mgt.createConnection(
// "postgres",
// "d4science2",
// "org.postgresql.Driver",
// "org.hibernate.dialect.PostgreSQLDialect",
// "jdbc:postgresql://geoserver-dev.d4science-ii.research-infrastructures.eu:5432/aquamapsdb",
// "aquamapsdb");
// } catch (IOException e) {
// // TODO Auto-generated catch block
// e.printStackTrace();
// }
//
// // SmartSampleOnTable operation
//
// try {
// // for database postgres, if a table is not in lower case format, it
// // is necessary to include the table name in quotes ""
// mgt.smartSampleOnTable("Divisions", "public");
// } catch (Exception e) {
// // TODO Auto-generated catch block
// e.printStackTrace();
//
// AnalysisLogger.getLogger().debug(
// "In TestSmartSampleOnTable->EXCEPTION: " + e);
// }
//
// }
//
// // Mysql database
// private static void testMysql() {
//
// // connection to database
// DatabaseManagement mgt = new DatabaseManagement("");
//
// try {
// mgt.createConnection("root", "test", "com.mysql.jdbc.Driver",
// "org.hibernate.dialect.MySQLDialect",
// // "jdbc:mysql://146.48.87.169:3306/col2oct2010",
// "jdbc:mysql://146.48.87.169:3306/aquamaps", "hcaf_d");
// } catch (IOException e) {
// // TODO Auto-generated catch block
// e.printStackTrace();
// }
//
// // SmartSampleOnTable operation
//
// try {
// // for database postgres, if a table is not in lower case format, it
// // is necessary to include the table name in quotes ""
// mgt.smartSampleOnTable("hcaf_d", "aquamaps");
// } catch (Exception e) {
// // TODO Auto-generated catch block
// e.printStackTrace();
//
// AnalysisLogger.getLogger().debug(
// "In TestSampleOnTable->EXCEPTION: " + e);
// }
//
// }
//
//}

View File

@ -0,0 +1,433 @@
//package org.gcube.dataanalysis.test;
//import static org.gcube.resources.discovery.icclient.ICFactory.clientFor;
//import static org.gcube.resources.discovery.icclient.ICFactory.queryFor;
//
//import java.io.BufferedReader;
//import java.io.IOException;
//import java.io.InputStreamReader;
//import java.util.List;
//import java.util.Scanner;
//
//import org.gcube.common.encryption.StringEncrypter;
//import org.gcube.common.resources.gcore.ServiceEndpoint;
//import org.gcube.common.scope.api.ScopeProvider;
//import org.gcube.dataanalysis.databases.resources.DBResource;
//import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
//import org.gcube.resources.discovery.client.api.DiscoveryClient;
//import org.gcube.resources.discovery.client.queries.impl.XQuery;
//import org.hibernate.SessionFactory;
//
//public class TestApp {
//
// /**
// * @param args
// */
// public static void main(String[] args) {
// // TODO Auto-generated method stub
//
//
//// ScopeProvider.instance.set("/gcube/devsec/devVRE");
// ScopeProvider.instance.set("/gcube/devsec");
//
//
//
// XQuery query = queryFor(ServiceEndpoint.class);
// query.addCondition("$resource/Profile/Category/text() eq 'Database'");
//
// DiscoveryClient<DBResource> prova=clientFor(DBResource.class);
// List<DBResource> access = prova.submit(query);
//
//
// System.out.println("size resource: "+access.size());
//
//
//
//
//
// for(int i=0;i<access.size();i++){
//
//
// //access.get(i).parse();
// System.out.println("{ ID: "+access.get(i).getID()
// +" ResourceName: "+access.get(i).getResourceName()+" HostedOn: "+access.get(i).getHostedOn()
// +" PlatformName: "+access.get(i).getPlatformName()+" PlatformVersion: "+access.get(i).getPlatformVersion()+" }");
//
// System.out.println();
//
// for (int j=0;j<access.get(i).getAccessPoints().size();j++){
//
//
// System.out.println("############################# AccessPointInfo #################################");
// System.out.println("Description: "+access.get(i).getAccessPoints().get(j).getDescription()
// +"\n"+"Endpoint: "+access.get(i).getAccessPoints().get(j).address()
// //+"\n"+"Port Number: "+access.get(i).getAccessPoints().get(j).getPort());
// +"\n"+"Port Number: "+access.get(i).getPort());
//// );
//
//
// System.out.println("Username: "+access.get(i).getAccessPoints().get(j).getUsername()
// +"\n"+"Password: "+access.get(i).getAccessPoints().get(j).getPassword()
// +"\n"+"DatabaseName: "+access.get(i).getAccessPoints().get(j).getDatabaseName()
// +"\n"+"Driver: "+access.get(i).getAccessPoints().get(j).getDriver()
// +"\n"+"Dialect: "+access.get(i).getAccessPoints().get(j).getDialect()
// +"\n"+"MaxConnections: "+access.get(i).getAccessPoints().get(j).getMaxConnections()
// +"\n"+"Schema: "+access.get(i).getAccessPoints().get(j).getSchema()
// +"\n"+"tableSpaceCount: "+access.get(i).getAccessPoints().get(j).getTableSpaceCount()
// +"\n"+"tableSpacePrefix: "+access.get(i).getAccessPoints().get(j).getTableSpacePrefix());
//
//
// //System.out.println("Dim: "+access.get(i).getAccessPoints().get(j).getAuxiliaryProperties().size());
//
// if (access.get(i).getAccessPoints().get(j).getAuxiliaryProperties().size()!=0){
//
//
//
// System.out.println("AuxiliaryProperty: "+"'aquamapsWorldTable' "+access.get(i).getAccessPoints().get(j).getAuxiliaryProperties().get("aquamapsWorldTable"));
// System.out.println("AuxiliaryProperty: "+"'aquamapsDataStore' "+access.get(i).getAccessPoints().get(j).getAuxiliaryProperties().get("aquamapsDataStore"));
//
//
//
//
//
//
// }
// }
//
// System.out.println();
// System.out.println();
//
//
//
// //System.out.println("ID "+access.get(i).getID()+" User "+access.get(i).getTestData().size());
//
// }
//
//
//
//
// // Fase di Selezione del DB e Normalizzazione
//
// //access.get(2).normalize("//geoserver-dev.d4science-ii.research-infrastructures.eu:5432/aquamapsdb");
//
//// access.get(1).normalize("jdbc:postgresql://statistical-manager.d.d4science.research-infrastructures.eu/testdb");
//
//
// // retrieve the decrypted version
//// try {
//// String password = StringEncrypter.getEncrypter().decrypt("Db/lnp5cAPwrAfjqorqctA==");
////
//// System.out.println("password Obis: " +password);
//// } catch (Exception e) {
//// // TODO Auto-generated catch block
//// e.printStackTrace();
//// }
////
//
//
//
// Scanner sc = new Scanner(System.in);
//
// System.out.println("Seleziona la risorsa database: inserisci un numero fra 0 e "+Integer.toString((access.size())-1));
// System.out.println();
//
// String index_resource = sc.next();
//
// System.out.println("Ho letto: " + index_resource);
//
//
// int resourceIndex= Integer.valueOf(index_resource).intValue();
//
//
//// System.out.println("Inserisci l'Endpoint della risorsa");
//// String Endpoint_value = sc.next();
//
//
// System.out.println("Seleziona il database: inserisci un numero fra 0 e "+Integer.toString(access.get(resourceIndex).getAccessPoints().size()-1));
// System.out.println();
//
//
// String db_value = sc.next();
//
// System.out.println("Ho letto: " + db_value);
//
//
// int dbIndex= Integer.valueOf(db_value).intValue();
//
// //access.get(resourceIndex).normalize(access.get(resourceIndex).getAccessPoints().get(dbIndex).address());
//
//
//
//
//
//
// //Fase di Stampa
//
//
//
//
//
//
//
//
//
//
//
// ///***** Da qui ho commentato
//
//// try {
// access.get(resourceIndex).normalize(dbIndex);
//
//
// System.out.println();
// System.out.println();
//
//
//
// System.out.println("---------------------------------------------------------------"+" Normalization: "+"-----------------------------------------------------");
// System.out.println();
// System.out.println();
// System.out.println();
// System.out.println();
//
//
// for(int i=0;i<access.size();i++){
//
//
// //access.get(i).parse();
// System.out.println("{ ID: "+access.get(i).getID()
// +" ResourceName: "+access.get(i).getResourceName()+" HostedOn: "+access.get(i).getHostedOn()
// +" PlatformName: "+access.get(i).getPlatformName()+" PlatformVersion: "+access.get(i).getPlatformVersion()+" }");
//
//
// System.out.println();
//
// for (int j=0;j<access.get(i).getAccessPoints().size();j++){
//
//
// System.out.println("############################# AccessPointInfo #################################");
// System.out.println("Description: "+access.get(i).getAccessPoints().get(j).getDescription()
// +"\n"+"Endpoint: "+access.get(i).getAccessPoints().get(j).address()
// //+"\n"+"Port Number: "+access.get(i).getAccessPoints().get(j).getPort());
// +"\n"+"Port Number: "+access.get(i).getPort());
//
//
//
//
// System.out.println("Username: "+access.get(i).getAccessPoints().get(j).getUsername()
// +"\n"+"Password: "+access.get(i).getAccessPoints().get(j).getPassword()
// +"\n"+"DatabaseName: "+access.get(i).getAccessPoints().get(j).getDatabaseName()
// +"\n"+"Driver: "+access.get(i).getAccessPoints().get(j).getDriver()
// +"\n"+"Dialect: "+access.get(i).getAccessPoints().get(j).getDialect()
// +"\n"+"MaxConnections: "+access.get(i).getAccessPoints().get(j).getMaxConnections()
// +"\n"+"Schema: "+access.get(i).getAccessPoints().get(j).getSchema()
// +"\n"+"tableSpaceCount: "+access.get(i).getAccessPoints().get(j).getTableSpaceCount()
// +"\n"+"tableSpacePrefix: "+access.get(i).getAccessPoints().get(j).getTableSpacePrefix());
//
//
// //System.out.println("Dim: "+access.get(i).getAccessPoints().get(j).getAuxiliaryProperties().size());
//
// if (access.get(i).getAccessPoints().get(j).getAuxiliaryProperties().size()!=0){
// System.out.println();
//
//
// System.out.println("AuxiliaryProperty: "+"'aquamapsWorldTable' "+access.get(i).getAccessPoints().get(j).getAuxiliaryProperties().get("aquamapsWorldTable"));
// System.out.println("AuxiliaryProperty: "+"'aquamapsDataStore' "+access.get(i).getAccessPoints().get(j).getAuxiliaryProperties().get("aquamapsDataStore"));
//
//
//
//
// }
// }
//
// System.out.println();
// System.out.println();
//
//
//
// //System.out.println("ID "+access.get(i).getID()+" User "+access.get(i).getTestData().size());
//
// }
//
//
//
//
// System.out.println("---------------------------------------------------------------"+" Database Query: "+"-----------------------------------------------------");
//
//// // Sottomissione query
////
//// System.out.println("Insert the Query");
////
////
////
////
//// InputStreamReader is = new InputStreamReader(System.in);
//// BufferedReader br = new BufferedReader(is);
////
//// String q="";
//// try {
//// q = br.readLine();
//// } catch (IOException e1) {
//// // TODO Auto-generated catch block
//// e1.printStackTrace();
//// }
////
//// System.out.println("Letta: "+q);
//
//
//
// org.gcube.dataanalysis.databases.utilsold.ConnectionManager df=new org.gcube.dataanalysis.databases.utilsold.ConnectionManager();
//
//// AlgorithmConfiguration config=df.setconfiguration
//// ("./cfg/", access.get(1).getAccessPoints().get(0).getUsername(), access.get(1).getAccessPoints().get(0).getPassword(),
//// access.get(1).getAccessPoints().get(0).getDriver(), access.get(1).getAccessPoints().get(0).getDialect(),
//// access.get(1).getAccessPoints().get(0).address());
//
//
//
//
//
// //Codice funzionante
//// AlgorithmConfiguration config=df.setconfiguration("./cfg/", "utente", "d4science", "org.postgresql.Driver","org.hibernate.dialect.MySQLDialect",
//// "jdbc:postgresql://statistical-manager.d.d4science.research-infrastructures.eu:5432/testdb");
//
//
//
//
// AlgorithmConfiguration config;
// try {
// config = df.setconfiguration("./cfg/", access.get(resourceIndex).getAccessPoints().get(dbIndex).getUsername(),
// access.get(resourceIndex).getAccessPoints().get(dbIndex).getPassword(), access.get(resourceIndex).getAccessPoints().get(dbIndex).getDriver(), access.get(resourceIndex).getAccessPoints().get(dbIndex).getDialect(),
// access.get(resourceIndex).getAccessPoints().get(dbIndex).address(), access.get(resourceIndex).getAccessPoints().get(dbIndex).getDatabaseName());
//
// SessionFactory sf=df.createConnection(config);
//
// boolean val=sf.isClosed();
//
// if (val!=true){
//
//
// System.out.println("la connessione è attiva");
//
// }
//
//
// } catch (IOException e) {
// // TODO Auto-generated catch block
// e.printStackTrace();
// }
//
//
//// if (config!=null){
//
//
//// SessionFactory sf=df.createConnection(config);
////
//// boolean val=sf.isClosed();
////
//// if (val!=true){
////
////
//// System.out.println("la connessione è attiva");
////
//// }
//
//
//
//// try {
////// List<Object> rawnames = df.executeQuery("select * from (select a.field1 as aa, b.field1_id as bb, b.field1 as cc from a7f768710_c7b0_11df_b2bc_e0f36cf4c8cd as a "+
////// "left join cur_00d4e2d0_ecbd_11df_87fa_de008e0850ff as b on a.field3_id=b.field1_id) as cd", sf);
////
//// /*List<Object> rawnames = df.executeQuery("select * from (select a.field1, b.field1_id, b.field1 from a7f768710_c7b0_11df_b2bc_e0f36cf4c8cd as a "+
//// "left join cur_00d4e2d0_ecbd_11df_87fa_de008e0850ff as b on a.field3_id=b.field1_id) as cd", sf);*/
////
////// List<Object> rawnames = df.executeQuery("select a.field1 as aa, b.field1_id as bb, b.field1 as cc from a7f768710_c7b0_11df_b2bc_e0f36cf4c8cd as a "+
////// "left join cur_00d4e2d0_ecbd_11df_87fa_de008e0850ff as b on a.field3_id=b.field1_id)", sf);
////
////
////
////
////
//// //Query funzionante
////
////// List<Object> rawnames = df.executeQuery("select a.field1, b.field1_id from a7f768710_c7b0_11df_b2bc_e0f36cf4c8cd as a "+
////// "left join cur_00d4e2d0_ecbd_11df_87fa_de008e0850ff as b on a.field3_id=b.field1_id", sf);
////
////
//// //Query non funzionante
////// List<Object> rawnames = df.executeQuery("select a.field1, b.field1_id, b.field1 from a7f768710_c7b0_11df_b2bc_e0f36cf4c8cd as a "+
////// "left join cur_00d4e2d0_ecbd_11df_87fa_de008e0850ff as b on a.field3_id=b.field1_id", sf);
////
//// //Query funzionante con alias
////
////// List<Object> rawnames = df.executeQuery("select a.field1 as aa, b.field1_id as bb, b.field1 as cc from a7f768710_c7b0_11df_b2bc_e0f36cf4c8cd as a "+
////// "left join cur_00d4e2d0_ecbd_11df_87fa_de008e0850ff as b on a.field3_id=b.field1_id", sf);
////
//// //Query funzionante
////// List<Object> rawnames = df.executeQuery("select a.source_data as sourceA, b.source_data as sourceB, a.target_data_scientific_name as targetA, b.target_data_scientific_name as tagertB " +
////// "from bionymoutsimplefaked1csvpreprcsv as a join bionymoutfaked1csvpreprcsv as b on a.source_data=b.source_data limit 10",sf);
////
//// List<Object> rawnames = df.executeQuery(q,sf);
////
////
//// System.out.println("***************************************************************");
//// System.out.println();
////
//// System.out.println("Size: "+rawnames.size());
// //
//// for (int i = 0; i < rawnames.size(); i++) {
// //
//// Object[] row = (Object[]) rawnames.get(i);
// //
//// for (int j = 0; j < row.length; j++) {
// //
//// System.out.print("\"" + row[j] + "\"; ");
// //
//// }
//// System.out.println();
//// //System.out.println("Fine ");
// //
//// }
////
////
////
//// } catch (Exception e) {
//// // TODO Auto-generated catch block
//// //e.printStackTrace();
////
//// System.out.println("***************************************************************");
//// System.out.println();
////
//// String error=e.getCause().toString();
////
//// if (error.contains("MySQLSyntaxErrorException"))
//// {
////
//// System.out.println("ERROR "+e.getMessage()+" "+"because an error is present: "+e.getCause().getMessage());
//// System.out.println("Suggestion: insert an alias name for the columns");
////
////
//// }
////
////
//// }
//
//
//
//// }else {
//// System.out.println("ERRORE: Non è possibile eseguire la connessione perchè l'indirizzo non è completo: databasename non dsiponibile");
//// }8
//
//
//
//// } catch (IOException e1) {
//// // TODO Auto-generated catch block
//// e1.printStackTrace();
//// }
//
//
//
//
//
//
// }
//}

View File

@ -0,0 +1,34 @@
package org.gcube.dataanalysis.test;
/** Class that interacts with the IS in order to recover some information about the available databases and the contained data.
* Once recovered the interested data he can import these in the database used by the SM.
* */
public class TestDatabasesResourcesManager {
/**
* @param args
*/
public static void main(String[] args) {
// TODO Auto-generated method stub
// the user sets the scope
// the user views the available databases in the fixed scope
// the user selects a database
// the user wants to view some characteristics of the chosen database such as: the tables, rows' number and the create statement
// the user wants to recover data from a certain table so he submits a 'select' query on the database
// ?? the user create a table in the database's SM and import the recovered data
}
}

View File

@ -0,0 +1,114 @@
package org.gcube.dataanalysis.test;
import java.math.BigInteger;
import java.util.ArrayList;
import java.util.List;
import java.util.Scanner;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.databases.utils.DatabaseManagement;
/** Class that tests the code for the recovery of the tables */
public class TestGetTables {
public static void main(String[] args) {
// TODO Auto-generated method stub
Scanner sc = new Scanner(System.in);
System.out.println("Specifica il nome del file di configurazione");
String configurationfile=sc.next();
//TODO: decommentare per la corretta esecuzione della classe
// try {
// DatabaseManagement obj=new DatabaseManagement("./cfg/", configurationfile);
//
//
// //Retrieve the schema for the postgres database
//
// List <String> schemas=new ArrayList<String>();
//
// schemas=obj.getSchemas();
//
// if (schemas!=null){
//
// //test Print
// for (int i=0;i<schemas.size();i++)
// {
//
// System.out.println("Schema's name: "+ schemas.get(i));
//
// }
//
//
// }
// else{
//
//
// System.out.println("il database nn ha schemi");
// }
//
//
//
//
//
//
//
// List <String> tables=new ArrayList<String>();
//
//
// //Retrieve the table's names of the database
// tables=obj.getTables();
//
//
// //test print
// for (int i=0;i<tables.size();i++)
// {
//
// System.out.println(tables.get(i));
//
// }
//
//
//
//
// //Retrieve the "Create Table" statement
// System.out.println("Specifica il nome della tabella");
// String tablename=sc.next();
//
// String createtable=obj.getCreateTable(tablename);
//
//
//
// //View number of rows
// try {
//
// BigInteger rows=obj.getNumberOfRows(tablename);
//
// System.out.println("il numero di righe è: " + rows);
//
//
// } catch (Exception e) {
// // TODO: handle exception
//
// System.out.println("The table does not exist.");
//
// }
//
//
//
//
// } catch (Exception e) {
// // TODO Auto-generated catch block
//// e.printStackTrace();
// }
}
}

View File

@ -0,0 +1,145 @@
//package org.gcube.dataanalysis.test;
//
//import java.util.Scanner;
//import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
//import org.gcube.dataanalysis.databases.lexer.MySQLLexicalAnalyzer;
//import org.gcube.dataanalysis.databases.lexer.PostgresLexicalAnalyzer;
//
//
///** Class that tests the query's parsing by means of a lexical analyzer. It allows to filter a query no read-only compliant. */
//public class TestParsing {
//
//
// public static void main(String[] args) {
// // TODO Auto-generated method stub
//
// String query="INSERT INTO ANTIQUES VALUES (21, 01, 'Ottoman', 200.00);";
//
// // String query =
// // "SELECT COUNT(*) FROM EMPLOYEESTATISTICSTABLE WHERE POSITION = 'Staff';";
// // String
// // query="SELECT COUNT(*) FROM EMPLOYEESTATISTICSTABLE WHERE POSITION = 'Staff';";
//
// // String query =
// // "SELECT EMPLOYEEIDNO FROM EMPLOYEESTATISTICSTABLE WHERE SALARY<40000 OR BENEFITS<10000;";
//
// // String
// // query="SELECT EMPLOYEEIDNO FROM EMPLOYEESTATISTICSTABLE WHERE POSITION='Manager' AND SALARY>60000 OR BENEFITS>12000;";
//
// // String
// // query="SELECT EMPLOYEEIDNO FROM EMPLOYEESTATISTICSTABLE WHERE SALARY>=50000;";
//
// // String
// // query="SELECT EMPLOYEEIDNO FROM EMPLOYEESTATISTICSTABLE WHERE SALARY<=50000;";
//
// // String
// // query="SELECT SUM(SALARY), AVG(SALARY) FROM EMPLOYEESTATISTICSTABLE;";
//
// // String
// // query="SELECT FirstName, LastName, Address, City, State FROM EmployeeAddressTable;";
//
// // String
// // query="SELECT EMPLOYEEIDNO FROM EMPLOYEESTATISTICSTABLE WHERE SALARY<>50000;";
//
// // String query="INSERT INTO ANTIQUES from select * from hcaf_d;";
//
// // String query = "COPY lori FROM lavoro";
//
//// String query = "COPY lori FROM \"lavoro\";";
//
//
//
//
// // String query="SELECT TITLE, DIRECTOR "
// // +"FROM MOVIE "
// // +"WHERE MOVIE_ID IN"
// // +"("
// // +"("
// // +"SELECT MOVIE_ID "
// // +"FROM ACTOR "
// // +"WHERE NAME=?Tom Cruise? "
// // +"UNION "
// // +"SELECT "
// // +"MOVIE_ID "
// // +"FROM ACTOR "
// // +"WHERE NAME=?Kelly McGillis? "
// // +")"
// // +"INTERSECT "
// // +"SELECT MOVIE_ID "
// // +"FROM KEYWORD "
// // +"WHERE KEYWORD=?drama? "
// // +");";
//
// // String query="select * from `drop` where `drop`.id>10;";
//
// // System.out.println("Inserisci la query");
// // Scanner scanIn = new Scanner(System.in);
// //
// // String query=scanIn.nextLine();
// //
// // scanIn.close();
//
// // StringTokenizer string=new StringTokenizer(query, " ()[]{}<;>=,",
// // false);
// //
// // ArrayList<String> tokenslist=new ArrayList<String>();
// //
// // //StringTokenizer
// // AnalysisLogger.getLogger().debug("------------ Tokenizer ----------- ");
// //
// //
// // int count = string.countTokens();
// //
// // for (int i=0; i< count; i++){
// //
// // String token=string.nextToken();
// //
// //
// // tokenslist.add(token);
// //
// // AnalysisLogger.getLogger().debug("TestParsing->: "+ token);
// //
// // }
//
// AnalysisLogger.getLogger().debug("TestParsing->: Query " + query);
// // System.out.println();
//
// boolean AllowedQuery = false;
//
// // LexicalAnalyzer lexer=new LexicalAnalyzer();
// // AllowedQuery=lexer.analyze(query);
//
// System.out.println("Specifica il tipo di piattaforma");
//
// Scanner scanIn = new Scanner(System.in);
// String platform = scanIn.nextLine();
// scanIn.close();
//
// if (platform.toLowerCase().contains("postgres")) {
//
// PostgresLexicalAnalyzer obj = new PostgresLexicalAnalyzer();
//
// AllowedQuery = obj.analyze(query);
//
// }
//
// if (platform.toLowerCase().contains("mysql")) {
//
// MySQLLexicalAnalyzer obj = new MySQLLexicalAnalyzer();
//
// AllowedQuery = obj.analyze(query);
//
// }
//
// if (AllowedQuery == true) {
//
// AnalysisLogger.getLogger().debug("TestParsing->: filtered Query");
//
// } else {
// AnalysisLogger.getLogger().debug(
// "TestParsing->: not filtered query");
// }
//
// }
//
//}

View File

@ -0,0 +1,284 @@
package org.gcube.dataanalysis.test;
import java.util.List;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.utils.DatabaseUtils;
import org.hibernate.Query;
import org.hibernate.Session;
import org.hibernate.SessionFactory;
public class TestPostgres {
/**
* @param args
*/
@SuppressWarnings({"unchecked"})
public static List<Object> executeHQLQuery(String query, SessionFactory DBSessionFactory, boolean useSQL) throws Exception{
List<Object> obj = null;
Session ss = null;
try {
ss = DBSessionFactory.getCurrentSession();
ss.beginTransaction();
Query qr = null;
if (useSQL)
qr = ss.createSQLQuery(query);
else
qr = ss.createQuery(query);
List<Object> result = qr.list();
ss.getTransaction().commit();
/*
if (result == null)
System.out.println("Hibernate doesn't return a valid object when org.gcube.contentmanagement.lexicalmatcher retrieve UserState Object");
if (result != null && result.size() == 0)
System.out.println(String.format("found nothing in database"));
*/
if (result != null && result.size() != 0) {
obj = result;
}
} catch (Exception e) {
// System.out.println(String.format("Error while executing query: %1$s %2$s", query, e.getMessage()));
// e.printStackTrace();
System.out.println(String.format("Error while executing query: %1$s %2$s", query, e.getMessage()));
throw e;
}
return obj;
}
public static void main(String[] args) throws Exception{
// TODO Auto-generated method stub
/*AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setConfigPath("./cfg/");
config.setParam("DatabaseUserName", "utente");
config.setParam("DatabasePassword", "d4science");
config.setParam("DatabaseDriver","org.postgresql.Driver");
//config.setParam("DatabaseDialect", "org.hibernate.dialect.PostgresPlusDialect");
config.setParam(
"DatabaseURL",
"jdbc:postgresql://statistical-manager.d.d4science.research-infrastructures.eu/testdb");
SessionFactory dbconnection = DatabaseUtils.initDBSession(config);
List<Object> rawnames = DatabaseFactory
.executeSQLQuery(
"select a.source_data as sourceA, b.source_data as sourceB, a.target_data_scientific_name as targetA, b.target_data_scientific_name as tagertB " +
"from bionymoutsimplefaked1csvpreprcsv as a join bionymoutfaked1csvpreprcsv as b on a.source_data=b.source_data limit 10",
dbconnection);
List<Object> rawnames = DatabaseFactory
.executeSQLQuery(
"select a.source_data as sourceA, a.target_data_scientific_name as targetA, b.source_data sourceB " +
"from bion_id_a1f27126_df23_4980_8e2b_4afc8aaa404f as a " +
"left join bion_id_ab251ee0_7cc6_49b2_8956_330f4716650f as b " +
"on a.source_data=b.source_data",
dbconnection);
//List<Object> rawnames = DatabaseFactory.executeHQLQuery(query, DBSessionFactory, useSQL);
System.out.println("***************************************************************");
System.out.println();
for (int i = 0; i < rawnames.size(); i++) {
Object[] row = (ObEntityManagerject[]) rawnames.get(i);
for (int j = 0; j < row.length; j++) {
System.out.print("\"" + row[j] + "\"; ");
}
System.out.println();
//System.out.println("Fine ");
}*/
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setConfigPath("./cfg/");
config.setParam("DatabaseUserName", "root");
config.setParam("DatabasePassword", "test");
config.setParam("DatabaseDriver","com.mysql.jdbc.Driver");
config.setParam("DatabaseDialect", "org.hibernate.dialect.MySQLDialect");
config.setParam(
"DatabaseURL",
"jdbc:mysql://146.48.87.169/timeseries");
SessionFactory dbconnection = DatabaseUtils.initDBSession(config);
/*List<Object> rawnames = DatabaseFactory
.executeSQLQuery(
"select a.source_data as sourceA, b.source_data as sourceB, a.target_data_scientific_name as targetA, b.target_data_scientific_name as tagertB " +
"from bionymoutsimplefaked1csvpreprcsv as a join bionymoutfaked1csvpreprcsv as b on a.source_data=b.source_data limit 10",
dbconnection);*/
//try{
// dbconnection.getCurrentSession().;
//
//
// ss.getCause()
// }
/*catch(JDBCExceptionReporter e)
{
Throwable t = e.getCause();
SQLException ex = (SQLException) t.getCause();
while(ex != null){
while(t != null) {
t = t.getCause();
}
// Logger logger=new Logger();
// logger.warn("SQLException="+ex.getLocalizedMessage());
System.out.println("sono qui");
ex = ex.getNextException();
}
}
*/
System.out.println("***************************************************************");
System.out.println();
//List<Object> rawnames =((javax.persistence.Query) query).getResultList();
try{
/*List<Object> rawnames = executeHQLQuery(
"select * from (select a.field1, b.field1_id as bb, b.field1 from a7f768710_c7b0_11df_b2bc_e0f36cf4c8cd as a "+
"left join cur_00d4e2d0_ecbd_11df_87fa_de008e0850ff as b on a.field3_id=b.field1_id) as cd",*/
// "select * "+
// "from a7f768710_c7b0_11df_b2bc_e0f36cf4c8cd as a, cur_00d4e2d0_ecbd_11df_87fa_de008e0850ff as b limit 10",
// dbconnection, true);
List<Object> rawnames = executeHQLQuery(
"select a.field1, b.field1_id from a7f768710_c7b0_11df_b2bc_e0f36cf4c8cd as a "+
"left join cur_00d4e2d0_ecbd_11df_87fa_de008e0850ff as b on a.field3_id=b.field1_id",dbconnection, true);
// "select * "+
// "from a7f768710_c7b0_11df_b2bc_e0f36cf4c8cd as a, cur_00d4e2d0_ecbd_11df_87fa_de008e0850ff as b limit 10",
// dbconnection, true);
System.out.println("Size: "+rawnames.size());
for (int i = 0; i < rawnames.size(); i++) {
Object[] row = (Object[]) rawnames.get(i);
for (int j = 0; j < row.length; j++) {
System.out.print("\"" + row[j] + "\"; ");
}
System.out.println();
//System.out.println("Fine ");
}
}catch(Exception e){
e.printStackTrace();
System.out.println("message: "+e.getMessage());
//System.out.println(e.getLocalizedMessage());
StackTraceElement [] elem=e.getStackTrace();
System.out.println("localized: "+e.getCause().toString());
String error=e.getCause().toString();
if (error.contains("MySQLSyntaxErrorException"))
{
System.out.println("ERROR "+e.getMessage()+" "+"because an error is present: "+e.getCause().getMessage());
}
// System.out.println("cause: "+e.getCause().getMessage());
// for (int i=0;i<elem.length;i++){
//
//
//
// System.out.println("elem: "+ elem[i]);
//
//
//
// }
//System.out.println("JDBC Error: "+JDBCExceptionReporter.DEFAULT_EXCEPTION_MSG);
//System.out.println("JDBC Error: "+JDBCExceptionReporter.DEFAULT_WARNING_MSG);
//System.out.println("JDBC Error: "+JDBCExceptionReporter.log.getName());
//System.out.println("JDBC Error: "+JDBCExceptionReporter.log.ROOT_LOGGER_NAME);
//org.slf4j.Logger logger = LoggerFactory.getLogger("log4j.logger.org.hibernate");
// Session s=dbconnection.openSession();
// s.close()
// System.out.println(dbconnection.getCache().getClass());
//SQLException ex= new SQLException(JDBCExceptionReporter.log.getName());
//SQLException ex= new SQLException
//System.out.println("message: "+ex.getLocalizedMessage());
// System.out.println(ex.getCause();
// JDBCExceptionReporter.;
//System.out.println("Eccezione:" +e.getMessage());
//e.printStackTrace();
//e.getLocalizedMessage();
/*Connection a=dbconnection.getCurrentSession().disconnect();
try {
System.out.println(a.getWarnings().getSQLState());
} catch (SQLException e1) {
// TODO Auto-generated catch block
System.out.println("message: "+e1.getCause().getStackTrace().length);
//e1.printStackTrace();
}
*/
}
}
}

View File

@ -0,0 +1,40 @@
package org.gcube.dataanalysis.test;
import java.util.ArrayList;
import java.util.List;
import org.gcube.dataanalysis.databases.accessold.AddressesDiscoverer;
public class TestRetrieveEndpoint {
/**
* @param args
*/
public static void main(String[] args) {
// TODO Auto-generated method stub
AddressesDiscoverer obj=new AddressesDiscoverer();
List <String> addresses=new ArrayList<String>();
obj.setScope("/gcube/devsec");
addresses=obj.retrieveAddress("Database");
//Stampa
for (int i=0; i<addresses.size();i++){
System.out.println(addresses.get(i));
}
}
}

View File

@ -0,0 +1,88 @@
//package org.gcube.dataanalysis.test;
//
//import java.io.IOException;
//
//import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
//import org.gcube.dataanalysis.databases.utils.DatabaseManagement;
//
//public class TestSampleOnTable {
//
// public static void main(String[] args) {
//
// // connection to database
// DatabaseManagement mgt = new DatabaseManagement("");
//
//
//
//
// //Postgres Database
// try {
// mgt.createConnection(
// "postgres",
// "d4science2",
// "org.postgresql.Driver",
// "org.hibernate.dialect.PostgreSQLDialect",
// "jdbc:postgresql://geoserver-dev.d4science-ii.research-infrastructures.eu:5432/aquamapsdb",
// "aquamapsdb");
// } catch (IOException e) {
// // TODO Auto-generated catch block
// e.printStackTrace();
// }
//
// // SampleOnTable operation
//
// try {
// // for database postgres, if a table is not in lower case format, it
// // is necessary to include the table name in quotes ""
// mgt.sampleOnTable("Divisions", "public");
// } catch (Exception e) {
// // TODO Auto-generated catch block
// e.printStackTrace();
//
// AnalysisLogger
// .getLogger()
// .debug("In TestSampleOnTable->EXCEPTION: "+ e);
// }
//
// // SmartSampleOnTable operation
//
// // mgt.smartSampleOnTable(tableName);
//
//
//
//
//
// //MYSQL Database
//
//// try {
//// mgt.createConnection(
//// "root",
//// "test",
//// "com.mysql.jdbc.Driver",
//// "org.hibernate.dialect.MySQLDialect",
////// "jdbc:mysql://146.48.87.169:3306/col2oct2010",
//// "jdbc:mysql://146.48.87.169:3306/aquamaps",
//// "hcaf_d");
//// } catch (IOException e) {
//// // TODO Auto-generated catch block
//// e.printStackTrace();
//// }
////
//// // SampleOnTable operation
////
//// try {
//// // for database postgres, if a table is not in lower case format, it
//// // is necessary to include the table name in quotes ""
//// mgt.sampleOnTable("hcaf_d", "aquamaps");
//// } catch (Exception e) {
//// // TODO Auto-generated catch block
//// e.printStackTrace();
////
//// AnalysisLogger
//// .getLogger()
//// .debug("In TestSampleOnTable->EXCEPTION: "+ e);
//// }
//
// }
//
//}

View File

@ -0,0 +1,109 @@
//package org.gcube.dataanalysis.test;
//
//import java.io.IOException;
//
//import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
//import org.gcube.dataanalysis.databases.utils.DatabaseManagement;
//
//public class TestSmartSampleOnTable {
//
// public static void main(String[] args) {
//
// // connection to database
// DatabaseManagement mgt = new DatabaseManagement("");
//
//// // Postgres Database
//// try {
//// mgt.createConnection(
//// "postgres",
//// "d4science2",
//// "org.postgresql.Driver",
//// "org.hibernate.dialect.PostgreSQLDialect",
//// "jdbc:postgresql://geoserver-dev.d4science-ii.research-infrastructures.eu:5432/aquamapsdb",
//// "aquamapsdb");
//// } catch (IOException e) {
//// // TODO Auto-generated catch block
//// e.printStackTrace();
//// }
////
//// // SampleOnTable operation
////
//// try {
//// // for database postgres, if a table is not in lower case format, it
//// // is necessary to include the table name in quotes ""
//// mgt.smartSampleOnTable("Divisions", "public");
//// } catch (Exception e) {
//// // TODO Auto-generated catch block
//// e.printStackTrace();
////
//// AnalysisLogger.getLogger().debug(
//// "In TestSmartSampleOnTable->EXCEPTION: " + e);
//// }
//
//
//
// //Mysql database
//
// try {
// mgt.createConnection(
// "root",
// "test",
// "com.mysql.jdbc.Driver",
// "org.hibernate.dialect.MySQLDialect",
//// "jdbc:mysql://146.48.87.169:3306/col2oct2010",
// "jdbc:mysql://146.48.87.169:3306/aquamaps",
// "hcaf_d");
// } catch (IOException e) {
// // TODO Auto-generated catch block
// e.printStackTrace();
// }
//
// // SampleOnTable operation
//
// try {
// // for database postgres, if a table is not in lower case format, it
// // is necessary to include the table name in quotes ""
// mgt.smartSampleOnTable("hcaf_d", "aquamaps");
// } catch (Exception e) {
// // TODO Auto-generated catch block
// e.printStackTrace();
//
// AnalysisLogger
// .getLogger()
// .debug("In TestSampleOnTable->EXCEPTION: "+ e);
// }
//
// //Mysql database
//
//// try {
//// mgt.createConnection(
//// "root",
//// "test",
//// "com.mysql.jdbc.Driver",
//// "org.hibernate.dialect.MySQLDialect",
////// "jdbc:mysql://146.48.87.169:3306/col2oct2010",
//// "jdbc:mysql://146.48.87.169:3306/col2oct2010",
//// "common_names");
//// } catch (IOException e) {
//// // TODO Auto-generated catch block
//// e.printStackTrace();
//// }
////
//// // SampleOnTable operation
////
//// try {
//// // for database postgres, if a table is not in lower case format, it
//// // is necessary to include the table name in quotes ""
//// mgt.smartSampleOnTable("common_names", "col2oct2010");
//// } catch (Exception e) {
//// // TODO Auto-generated catch block
//// e.printStackTrace();
////
//// AnalysisLogger
//// .getLogger()
//// .debug("In TestSampleOnTable->EXCEPTION: "+ e);
//// }
//
// }
//
//}