diff --git a/.classpath b/.classpath new file mode 100644 index 0000000..e527956 --- /dev/null +++ b/.classpath @@ -0,0 +1,33 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/.project b/.project new file mode 100644 index 0000000..30b8cca --- /dev/null +++ b/.project @@ -0,0 +1,23 @@ + + + DatabasesResourcesManager + + + + + + org.eclipse.jdt.core.javabuilder + + + + + org.eclipse.m2e.core.maven2Builder + + + + + + org.eclipse.m2e.core.maven2Nature + org.eclipse.jdt.core.javanature + + diff --git a/cfg/ALog.properties b/cfg/ALog.properties new file mode 100644 index 0000000..875385d --- /dev/null +++ b/cfg/ALog.properties @@ -0,0 +1,24 @@ +#### Use two appenders, one to log to console, another to log to a file +#log4j.rootCategory= R + +#### Second appender writes to a file +#log4j.appender.stdout=org.apache.log4j.ConsoleAppender +#log4j.appender.stdout.Threshold=trace +#log4j.appender.stdout.layout=org.apache.log4j.PatternLayout +#log4j.appender.stdout.layout.ConversionPattern=%d{dd/MM/yyyy HH:mm:ss} %p %t %c - %m%n + +log4j.logger.AnalysisLogger=TRACE,AR +log4j.appender.AR=org.apache.log4j.RollingFileAppender +log4j.appender.AR.Threshold=trace +log4j.appender.AR.File=./Analysis.log +log4j.appender.AR.MaxFileSize=50000KB +log4j.appender.AR.MaxBackupIndex=2 +log4j.appender.AR.layout=org.apache.log4j.PatternLayout +log4j.appender.AR.layout.ConversionPattern=%d{dd/MM/yyyy HH:mm:ss} %p %t %c - %m%n + +#### Third appender writes to a file +log4j.logger.org.hibernate=H +log4j.appender.H=org.apache.log4j.AsyncAppender +log4j.appender.H.Threshold=OFF +log4j.appender.H.layout=org.apache.log4j.PatternLayout +log4j.appender.H.layout.ConversionPattern=%d{dd/MM/yyyy HH:mm:ss} %p %t %c - %m%n diff --git a/cfg/DestinationDBHibernate.cfg.out.xml b/cfg/DestinationDBHibernate.cfg.out.xml new file mode 100644 index 0000000..e69de29 diff --git a/cfg/DestinationDBHibernate.cfg.xml b/cfg/DestinationDBHibernate.cfg.xml new file mode 100644 index 0000000..90d87a9 --- /dev/null +++ b/cfg/DestinationDBHibernate.cfg.xml @@ -0,0 +1,18 @@ + + + + com.mysql.jdbc.Driver + org.hibernate.connection.C3P0ConnectionProvider + jdbc:mysql://146.48.87.169/timeseries + root + test + + org.hibernate.dialect.MySQLDialect + org.hibernate.transaction.JDBCTransactionFactory + 0 + 1 + 0 + 1 + thread + + \ No newline at end of file diff --git a/cfg/HibernateConfigurationMySQL.cfg.xml b/cfg/HibernateConfigurationMySQL.cfg.xml new file mode 100644 index 0000000..c5b646e --- /dev/null +++ b/cfg/HibernateConfigurationMySQL.cfg.xml @@ -0,0 +1,18 @@ + + + + com.mysql.jdbc.Driver + org.hibernate.connection.C3P0ConnectionProvider + jdbc:mysql://146.48.87.169/col2oct2010 + root + test + + org.hibernate.dialect.MySQLDialect + org.hibernate.transaction.JDBCTransactionFactory + 0 + 1 + 0 + 1 + thread + + \ No newline at end of file diff --git a/cfg/HibernateConfigurationPostgres.cfg.xml b/cfg/HibernateConfigurationPostgres.cfg.xml new file mode 100644 index 0000000..80a0435 --- /dev/null +++ b/cfg/HibernateConfigurationPostgres.cfg.xml @@ -0,0 +1,19 @@ + + + + org.postgresql.Driver + org.hibernate.connection.C3P0ConnectionProvider + jdbc:postgresql://statistical-manager.d.d4science.research-infrastructures.eu/testdb + utente + d4science + + org.hibernate.dialect.PostgreSQLDialect + org.hibernate.transaction.JDBCTransactionFactory + public + 0 + 1 + 0 + 1 + thread + + \ No newline at end of file diff --git a/cfg/HibernateConfigurationPostgres1.cfg.xml b/cfg/HibernateConfigurationPostgres1.cfg.xml new file mode 100644 index 0000000..97da43e --- /dev/null +++ b/cfg/HibernateConfigurationPostgres1.cfg.xml @@ -0,0 +1,19 @@ + + + + org.postgresql.Driver + org.hibernate.connection.C3P0ConnectionProvider + jdbc:postgresql://dbtest.research-infrastructures.eu/aquamapsorgupdated + utente + d4science + + org.hibernate.dialect.PostgreSQLDialect + org.hibernate.transaction.JDBCTransactionFactory + public + 0 + 1 + 0 + 1 + thread + + \ No newline at end of file diff --git a/cfg/algorithms.properties b/cfg/algorithms.properties new file mode 100644 index 0000000..fdf28d7 --- /dev/null +++ b/cfg/algorithms.properties @@ -0,0 +1,13 @@ +AQUAMAPS_SUITABLE=org.gcube.dataanalysis.ecoengine.spatialdistributions.AquamapsSuitable +AQUAMAPS_NATIVE=org.gcube.dataanalysis.ecoengine.spatialdistributions.AquamapsNative +AQUAMAPS_NATIVE_2050=org.gcube.dataanalysis.ecoengine.spatialdistributions.AquamapsNative2050 +AQUAMAPS_SUITABLE_2050=org.gcube.dataanalysis.ecoengine.spatialdistributions.AquamapsSuitable2050 +AQUAMAPS_NATIVE_NEURALNETWORK=org.gcube.dataanalysis.ecoengine.spatialdistributions.AquamapsNN +AQUAMAPS_SUITABLE_NEURALNETWORK=org.gcube.dataanalysis.ecoengine.spatialdistributions.AquamapsNNSuitable +FEED_FORWARD_A_N_N_DISTRIBUTION=org.gcube.dataanalysis.ecoengine.spatialdistributions.FeedForwardNeuralNetworkDistribution +LWR=org.gcube.dataanalysis.executor.nodes.algorithms.LWR +BIONYM_BIODIV=org.gcube.dataanalysis.executor.nodes.transducers.bionym.BionymBiodiv +BIONYM=org.gcube.dataanalysis.executor.nodes.transducers.bionym.BionymFlexibleWorkflowTransducer +OCCURRENCES_MERGER=org.gcube.dataanalysis.executor.nodes.transducers.OccurrenceMergingNode +OCCURRENCES_INTERSECTOR=org.gcube.dataanalysis.executor.nodes.transducers.OccurrenceIntersectionNode +OCCURRENCES_SUBTRACTION=org.gcube.dataanalysis.executor.nodes.transducers.OccurrenceSubtractionNode \ No newline at end of file diff --git a/cfg/clusterers.properties b/cfg/clusterers.properties new file mode 100644 index 0000000..f9321de --- /dev/null +++ b/cfg/clusterers.properties @@ -0,0 +1,4 @@ +DBSCAN=org.gcube.dataanalysis.ecoengine.clustering.DBScan +LOF=org.gcube.dataanalysis.ecoengine.clustering.LOF +KMEANS=org.gcube.dataanalysis.ecoengine.clustering.KMeans +XMEANS=org.gcube.dataanalysis.ecoengine.clustering.XMeansWrapper \ No newline at end of file diff --git a/cfg/evaluators.properties b/cfg/evaluators.properties new file mode 100644 index 0000000..9d7a554 --- /dev/null +++ b/cfg/evaluators.properties @@ -0,0 +1,4 @@ +MAPS_COMPARISON=org.gcube.dataanalysis.geo.algorithms.MapsComparator +DISCREPANCY_ANALYSIS=org.gcube.dataanalysis.ecoengine.evaluation.DiscrepancyAnalysis +QUALITY_ANALYSIS=org.gcube.dataanalysis.ecoengine.evaluation.DistributionQualityAnalysis +HRS=org.gcube.dataanalysis.ecoengine.evaluation.HabitatRepresentativeness \ No newline at end of file diff --git a/cfg/generators.properties b/cfg/generators.properties new file mode 100644 index 0000000..2907162 --- /dev/null +++ b/cfg/generators.properties @@ -0,0 +1,3 @@ +LOCAL_WITH_DATABASE=org.gcube.dataanalysis.ecoengine.processing.LocalSplitGenerator +SIMPLE_LOCAL=org.gcube.dataanalysis.ecoengine.processing.LocalSimpleSplitGenerator +D4SCIENCE=org.gcube.dataanalysis.executor.generators.D4ScienceDistributedProcessing \ No newline at end of file diff --git a/cfg/modelers.properties b/cfg/modelers.properties new file mode 100644 index 0000000..d5c652e --- /dev/null +++ b/cfg/modelers.properties @@ -0,0 +1 @@ +HSPEN_MODELER=org.gcube.dataanalysis.ecoengine.modeling.SimpleModeler \ No newline at end of file diff --git a/cfg/models.properties b/cfg/models.properties new file mode 100644 index 0000000..1e54f69 --- /dev/null +++ b/cfg/models.properties @@ -0,0 +1,4 @@ +HSPEN=org.gcube.dataanalysis.ecoengine.models.ModelHSPEN +AQUAMAPSNN=org.gcube.dataanalysis.ecoengine.models.ModelAquamapsNN +FEED_FORWARD_ANN=org.gcube.dataanalysis.ecoengine.models.FeedForwardNN +#FEED_FORWARD_ANN_FILE=org.gcube.dataanalysis.ecoengine.models.testing.FeedForwardNNFile \ No newline at end of file diff --git a/cfg/nodealgorithms.properties b/cfg/nodealgorithms.properties new file mode 100644 index 0000000..700df1d --- /dev/null +++ b/cfg/nodealgorithms.properties @@ -0,0 +1,10 @@ +AQUAMAPS_SUITABLE=org.gcube.dataanalysis.executor.nodes.algorithms.AquamapsSuitableNode +AQUAMAPS_NATIVE=org.gcube.dataanalysis.executor.nodes.algorithms.AquamapsNativeNode +AQUAMAPS_NATIVE_2050=org.gcube.dataanalysis.executor.nodes.algorithms.AquamapsNative2050Node +AQUAMAPS_SUITABLE_2050=org.gcube.dataanalysis.executor.nodes.algorithms.AquamapsSuitable2050Node +OCCURRENCES_MERGER=org.gcube.dataanalysis.executor.nodes.transducers.OccurrenceMergingNode +OCCURRENCES_INTERSECTOR=org.gcube.dataanalysis.executor.nodes.transducers.OccurrenceIntersectionNode +OCCURRENCES_SUBTRACTION=org.gcube.dataanalysis.executor.nodes.transducers.OccurrenceSubtractionNode +LWR=org.gcube.dataanalysis.executor.nodes.algorithms.LWR +BIONYM_BIODIV=org.gcube.dataanalysis.executor.nodes.transducers.bionym.BionymBiodiv +BIONYM=org.gcube.dataanalysis.executor.nodes.transducers.bionym.BionymFlexibleWorkflowTransducer \ No newline at end of file diff --git a/cfg/operators.xml b/cfg/operators.xml new file mode 100644 index 0000000..3f39339 --- /dev/null +++ b/cfg/operators.xml @@ -0,0 +1,2620 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/cfg/table.txt b/cfg/table.txt new file mode 100644 index 0000000..e69de29 diff --git a/cfg/transducerers.properties b/cfg/transducerers.properties new file mode 100644 index 0000000..34dcbb2 --- /dev/null +++ b/cfg/transducerers.properties @@ -0,0 +1,36 @@ +BIOCLIMATE_HSPEC=org.gcube.dataanalysis.ecoengine.transducers.BioClimateHSPECTransducer +BIOCLIMATE_HCAF=org.gcube.dataanalysis.ecoengine.transducers.BioClimateHCAFTransducer +BIOCLIMATE_HSPEN=org.gcube.dataanalysis.ecoengine.transducers.BioClimateHSPENTransducer +HCAF_INTERPOLATION=org.gcube.dataanalysis.ecoengine.transducers.InterpolationTransducer +HCAF_FILTER=org.gcube.dataanalysis.ecoengine.transducers.simplequeryexecutors.HcafFilter +HSPEN_FILTER=org.gcube.dataanalysis.ecoengine.transducers.simplequeryexecutors.HspenFilter +ABSENCE_CELLS_FROM_AQUAMAPS=org.gcube.dataanalysis.ecoengine.transducers.simplequeryexecutors.MarineAbsencePointsFromAquamapsDistribution +PRESENCE_CELLS_GENERATION=org.gcube.dataanalysis.ecoengine.transducers.simplequeryexecutors.MarinePresencePoints +OCCURRENCES_MERGER=org.gcube.dataanalysis.ecoengine.transducers.OccurrencePointsMerger +OCCURRENCES_INTERSECTOR=org.gcube.dataanalysis.ecoengine.transducers.OccurrencePointsIntersector +OCCURRENCES_MARINE_TERRESTRIAL=org.gcube.dataanalysis.ecoengine.transducers.OccurrencePointsInSeaOnEarth +OCCURRENCES_DUPLICATES_DELETER=org.gcube.dataanalysis.ecoengine.transducers.OccurrencePointsDuplicatesDeleter +OCCURRENCES_SUBTRACTION=org.gcube.dataanalysis.ecoengine.transducers.OccurrencePointsSubtraction +FIN_TAXA_MATCH=org.gcube.dataanalysis.fin.taxamatch.TaxaMatchTransducer +OBIS_MOST_OBSERVED_SPECIES=org.gcube.dataanalysis.trendylyzeralgorithms.AbsoluteSpeciesBarChartsAlgorithm +OBIS_SINGLE_SPECIES_DISTRIBUTION_PER_AREA=org.gcube.dataanalysis.trendylyzeralgorithms.SpeciesObservationsAreaBarChart +OBIS_SPECIES_OBSERVATIONS_PER_YEAR=org.gcube.dataanalysis.trendylyzeralgorithms.SpeciesObservationsPerYear +OBIS_MOST_OBSERVED_TAXA=org.gcube.dataanalysis.trendylyzeralgorithms.TaxaObservationsBarChartAlgorithm +OBIS_TAXA_OBSERVATIONS_PER_YEAR=org.gcube.dataanalysis.trendylyzeralgorithms.TaxaObservationsPerYearLineChart +OBIS_SPECIES_OBSERVATIONS_PER_LME_AREA=org.gcube.dataanalysis.trendylyzeralgorithms.SpeciesObservationsPerLMEAreaPerYearLineChart +OBIS_SPECIES_OBSERVATIONS_PER_MEOW_AREA=org.gcube.dataanalysis.trendylyzeralgorithms.SpeciesObservationsPerMEOWAreaPerYearLineChart +MOST_OBSERVED_SPECIES=org.gcube.dataanalysis.trendylyzeralgorithms.AbsoluteSpeciesBarChartsAlgorithm +SPECIES_OBSERVATIONS_TREND_PER_YEAR=org.gcube.dataanalysis.trendylyzeralgorithms.SpeciesObservationsPerYear +MOST_OBSERVED_TAXA=org.gcube.dataanalysis.trendylyzeralgorithms.TaxaObservationsBarChartAlgorithm +TAXONOMY_OBSERVATIONS_TREND_PER_YEAR=org.gcube.dataanalysis.trendylyzeralgorithms.TaxaObservationsPerYearLineChart +SPECIES_OBSERVATIONS_PER_AREA=org.gcube.dataanalysis.trendylyzeralgorithms.SpeciesObservationsAreaBarChart +SPECIES_OBSERVATION_LME_AREA_PER_YEAR=org.gcube.dataanalysis.trendylyzeralgorithms.SpeciesObservationsPerLMEAreaPerYearLineChart +SPECIES_OBSERVATION_MEOW_AREA_PER_YEAR=org.gcube.dataanalysis.trendylyzeralgorithms.SpeciesObservationsPerMEOWAreaPerYearLineChart +GET_TAXA_ALGORITHM=org.gcube.dataanalysis.JobSMspd.TaxaProcedure +GET_OCCURRENCES_ALGORITHM=org.gcube.dataanalysis.JobSMspd.OccurencesProcedure +FIN_GSAY_MATCH=org.gcube.dataanalysis.fin.gsay.GSAYTransducer +POINTS_TO_MAP=org.gcube.dataanalysis.geo.algorithms.PointsMapsCreator +POLYGONS_TO_MAP=org.gcube.dataanalysis.geo.algorithms.PolygonMapsCreator +SPECIES_MAP_FROM_CSQUARES=org.gcube.dataanalysis.geo.algorithms.SpeciesDistributionsMapsCreatorFromCsquares +SPECIES_MAP_FROM_POINTS=org.gcube.dataanalysis.geo.algorithms.SpeciesDistributionsMapsCreatorFromPoints +BIONYM_LOCAL=org.gcube.dataanalysis.executor.nodes.transducers.bionym.BionymLocalTransducer \ No newline at end of file diff --git a/cfg/userperspective.properties b/cfg/userperspective.properties new file mode 100644 index 0000000..0e32d0f --- /dev/null +++ b/cfg/userperspective.properties @@ -0,0 +1,18 @@ +ANOMALIES_DETECTION=DBSCAN,LOF,KMEANS,XMEANS +CLASSIFICATION=FEED_FORWARD_A_N_N_DISTRIBUTION +CLIMATE=BIOCLIMATE_HSPEC,BIOCLIMATE_HCAF,BIOCLIMATE_HSPEN,HCAF_INTERPOLATION +CORRELATION_ANALYSIS=HRS +DATA_CLUSTERING=DBSCAN,LOF,KMEANS,XMEANS +FILTERING=HCAF_FILTER,HSPEN_FILTER +FUNCTION_SIMULATION=FEED_FORWARD_A_N_N_DISTRIBUTION +OCCURRENCES=ABSENCE_CELLS_FROM_AQUAMAPS,PRESENCE_CELLS_GENERATION,OCCURRENCES_MERGER,OCCURRENCES_INTERSECTOR,OCCURRENCES_MARINE_TERRESTRIAL,OCCURRENCES_DUPLICATES_DELETER,OCCURRENCES_SUBTRACTION +PERFORMANCES_EVALUATION=QUALITY_ANALYSIS,DISCREPANCY_ANALYSIS +SPECIES_SIMULATION=AQUAMAPS_SUITABLE,AQUAMAPS_NATIVE,AQUAMAPS_NATIVE_2050,AQUAMAPS_SUITABLE_2050,AQUAMAPS_NATIVE_NEURALNETWORK,AQUAMAPS_SUITABLE_NEURALNETWORK +TRAINING=HSPEN,AQUAMAPSNN,FEED_FORWARD_ANN +TIME_SERIES=HCAF_INTERPOLATION +TAXA=FIN_TAXA_MATCH,BIONYM,BIONYM_BIODIV,BIONYM_LOCAL,FIN_GSAY_MATCH +MAPS=MAPS_COMPARISON,DISCREPANCY_ANALYSIS,POINTS_TO_MAP,POLYGONS_TO_MAP,SPECIES_MAP_FROM_CSQUARES,SPECIES_MAP_FROM_POINTS +BAYESIAN_METHODS=LWR,FEED_FORWARD_A_N_N_DISTRIBUTION +OBIS_OBSERVATIONS_SPECIES_DATA=MOST_OBSERVED_SPECIES,MOST_OBSERVED_TAXA,SPECIES_OBSERVATIONS_PER_AREA +OBIS_OBSERVATIONS_TRENDS=SPECIES_OBSERVATIONS_TREND_PER_YEAR,TAXONOMY_OBSERVATIONS_TREND_PER_YEAR,SPECIES_OBSERVATION_MEOW_AREA_PER_YEAR,SPECIES_OBSERVATION_LME_AREA_PER_YEAR +SPD_PROCEDURES=GET_TAXA_ALGORITHM,GET_OCCURRENCES_ALGORITHM \ No newline at end of file diff --git a/files/SmartSampleOnTable.txt b/files/SmartSampleOnTable.txt new file mode 100644 index 0000000..1c0329f --- /dev/null +++ b/files/SmartSampleOnTable.txt @@ -0,0 +1,100 @@ +3712:100:2,0,0,0,,,,,,,,,,,,,0,,,,, +3214:110:1,0,0,0,,,,,,,,,,,,,0,,,,, +3803:237:3,0,0,0,,,,,,,,,,,,,0,,,,, +1215:476:3,5889,6272,6065,73.92,24.7,0.45,29.31,19.41,9.9,,34.93,,35.07,34.79,,252,0,0,0,0,0 +1701:131:1,1854,2090,1961,46.78,5.32,0.57,10.62,1.51,9.11,-0.96,35.04,,35.11,34.97,34.9099,369,4.7E-4,0,0,0,0 +1208:383:3,0,0,0,,,,,,,,,,,,,0,,,,, +7007:384:4,0,0,0,,,,,,,,,,,,,0,,,,, +7509:237:3,0,0,0,,,,,,,,,,,,,0,,,,, +7012:360:2,3589,4357,4099,91.46,27.66,0.47,29.31,25.82,3.49,1.46,34.22,,34.6,33.91,34.6926,304,0,0,0,0,0 +7008:455:3,1523,2841,2032,373.74,28.16,0.54,30.93,26.11,4.82,2.31,32.62,,33.21,31.66,34.6493,325,0,0,0,0,0 +7806:114:4,0,600,369,179.81,-1.79,0,-1.54,-1.79,0.25,,30.45,,31.36,28.8,,169,0.88,0.95,0.78,0.84,0.94 +3000:215:4,1333,3759,3360,393.52,26.43,0.45,29.62,22.16,7.46,2.46,34.34,,35.53,33.29,34.8958,701,0,0,0,0,0 +5708:466:1,0,0,0,,,,,,,,,,,,,0,,,,, +3114:123:1,0,163,10,21.79,26.99,0.39,29.59,23.8,5.79,26.99,34.93,,35.58,34.15,34.8772,547,0,0,0,0,0 +5213:392:3,4125,4339,4197,33.44,21.53,0.44,26.26,17.8,8.46,1.49,35.37,,35.64,34.99,34.6944,136,0,0,0,0,0 +3608:229:2,3914,4276,4089,63.4,-1,0.1,1.37,-1.79,3.16,-0.26,33.81,,34.18,33.61,34.6651,304,0.33,0.05,0.64,0.61,0.01 +3005:114:2,4168,4777,4590,79.03,28.22,0.23,30.76,25.35,5.41,1.34,35.33,,35.48,35.14,34.7173,376,0,0,0,0,0 +1616:228:1,0,0,0,,,,,,,,,,,,,0,,,,, +7611:495:4,0,0,0,,,,,,,,,,,,,0,,,,, +7215:218:4,819,4520,2642,1134.08,25.38,0.3,27.73,23.36,4.37,1.72,35,,35.11,34.84,34.6492,227,0,0,0,0,0 +7213:384:2,3825,4712,4419,180.49,20.31,0.41,23.63,17.74,5.89,1.56,34.79,,35.13,34.65,34.6861,229,0,0,0,0,0 +1707:350:1,31,185,80,46.54,-1.67,0.13,0.64,-1.79,2.43,-1.34,29.53,,31.29,26.14,34.1963,521,0.69,0.95,0.44,0.39,0.96 +3512:123:2,4065,4637,4319,121.94,4.79,0.29,7.48,2.46,5.02,0.18,33.94,,34.08,33.8,34.6884,293,9.667e-,0,0,0,0 +5306:475:1,0,0,0,,,,,,,,,,,,,0,,,,, +3014:353:4,0,0,0,,,,,,,,,,,,,0,,,,, +1317:110:1,5391,5686,5527,54.8,21.65,0.43,27.17,16.48,10.69,1.5,34.78,,34.99,34.56,34.6963,324,0,0,0,0,0 +7500:496:3,140,1029,372,199.17,10.06,0.23,13.8,7.6,6.2,8.53,35.22,,35.27,35.18,35.3121,619,0,0,0,0,0 +3613:489:4,0,0,0,,,,,,,,,,,,,0,,,,, +5015:489:2,3549,5818,4716,610.91,29.02,0.38,30.11,27.67,2.44,1.26,35.51,,35.76,35.19,34.7055,279,0,0,0,0,0 +1400:248:3,0,0,0,,,,,,,,,,,,,0,,,,, +1003:144:2,0,0,0,,,,,,,,,,,,,0,,,,, +1700:476:3,1560,2640,2125,197.33,0.22,0.91,4.57,-1.79,6.36,-0.97,34.58,,34.9,34.24,34.9096,484,0.00157,0.00333,0,0,0 +1101:360:2,0,0,0,,,,,,,,,,,,,0,,,,, +3710:238:1,0,0,0,,,,,,,,,,,,,0,,,,, +1109:219:2,0,114,59,32.51,28.95,0.27,30.98,27.11,3.87,28.55,31.6,,32.61,29.1,32.7094,828,0,0,0,0,0 +3005:206:3,4431,5008,4648,87.6,28.48,0.25,30.77,26.25,4.52,1.35,35.33,,35.49,35.07,34.7162,357,0,0,0,0,0 +7514:475:1,3730,4019,3853,41.42,7.96,0.43,15.67,3.7,11.97,1.47,32.35,,32.65,31.8,34.696,438,4.9E-4,0,0,0,0 +5304:475:2,5086,5177,5122,20.48,17.57,0.33,22.38,13.7,8.68,0.22,35.5,,35.89,35.12,34.6691,380,0,0,0,0,0 +3512:495:1,4615,4673,4634,13.29,1.11,0.26,4.12,-1.72,5.84,-0.2,33.92,,34.08,33.8,34.6847,215,4.0E-5,0,0,0,0 +5606:499:2,0,541,207,120.54,-1.76,0.07,-0.43,-1.79,1.36,-0.06,32.06,,32.4,31.54,34.4431,342,0.6,0.33,0.65,0.71,0.69 +3717:457:4,475,623,513,42.39,-1.79,0.01,-1.63,-1.79,0.16,-0.06,34.32,,34.5,34.24,34.7085,716,0.56,0.66,0.83,0.7,0.02 +3511:374:1,4512,4614,4550,15.01,1.71,0.3,4.44,-1.1,5.54,-0.11,33.95,,34.03,33.85,34.6796,210,9.667e-,0,0,0,0 +7406:495:4,197,369,300,44.07,4.47,0.87,15.02,-1.79,16.81,,29.5,,30.96,27.95,,182,0.1,0.11,0,0.01,0.27 +7511:465:1,0,0,0,,,,,,,,,,,,,0,,,,, +3107:103:3,4647,5465,5120,148.99,27.55,0.26,29.95,24.84,5.11,1.42,34.57,,35.06,34.12,34.715,273,0,0,0,0,0 +3317:495:3,0,27,13,9.05,15.9,0.48,20.66,12.53,8.13,15.19,35,,35.24,34.81,34.9984,101,0,0,0,0,0 +5806:468:4,0,0,0,,,,,,,,,,,,,0,,,,, +3312:144:2,8,3740,1665,1441.75,17.63,0.34,21.02,14.85,6.17,2.51,35.75,,35.95,35.58,34.6496,557,0,0,0,0,0 +1202:360:2,0,0,0,,,,,,,,,,,,,0,,,,, +3801:351:3,0,0,0,,,,,,,,,,,,,0,,,,, +5505:371:3,2999,4241,3957,262.49,1.72,0.43,5.13,-1.51,6.64,0.03,33.99,,34.18,33.93,34.6731,265,0.01,0,0.03,0.01,0 +5509:216:4,4492,4979,4686,82.95,7.16,0.29,10.07,4.95,5.12,0.71,34.17,,34.22,34.11,34.7095,237,0,0,0,0,0 +1109:381:4,1921,2197,2041,49.84,28.08,0.3,30.62,24.75,5.87,2.69,30.67,,32.45,27.11,34.8055,701,0,0,0,0,0 +3715:476:3,0,0,0,,,,,,,,,,,,,0,,,,, +5706:390:1,1,1,1,0,-1.79,0,-1.78,-1.79,0.01,-1.38,34.49,,34.79,33.9,34.4578,0,1.5,1.5,1.5,1.5,1.5 +3305:206:4,3639,5326,4596,418.01,21.2,0.21,25.3,18,7.3,0.96,35.51,,35.6,35.4,34.7139,203,0,0,0,0,0 +5805:392:4,0,0,0,,,,,,,,,,,,,0,,,,, +7200:226:3,0,0,0,,,,,,,,,,,,,0,,,,, +3201:111:2,1142,2372,1695,307.26,18.2,0.34,22.52,15.27,7.25,3.33,35.29,,35.41,35.18,34.8833,180,0,0,0,0,0 +7003:372:4,2569,5496,4368,560.13,27.31,0.27,28.93,24.91,4.02,2.39,35.85,,36.12,35.49,34.9086,374,0,0,0,0,0 +3317:111:3,2672,4221,3383,339.74,19.89,0.42,23.91,17.04,6.87,1.86,35.79,,35.98,35.71,34.6866,321,0,0,0,0,0 +1313:373:2,300,1902,1376,307.88,17.07,0.58,27.99,9.06,18.93,0.16,33.88,,34.34,33,34.0637,525,0,0,0,0,0 +5015:496:4,1828,5706,5026,962.02,28.95,0.37,30.13,27.51,2.62,1.29,35.56,,35.79,35.19,34.7062,238,0,0,0,0,0 +5012:468:3,4236,4770,4565,101.81,26.93,0.53,29.99,24.99,5,1.46,35.52,,35.76,35.27,34.694,311,0,0,0,0,0 +5408:478:1,3831,4264,4081,90.62,8.82,0.35,11.9,6.3,5.6,0.89,33.96,,34.08,33.7,34.7117,226,0,0,0,0,0 +3206:456:2,3674,4807,4145,218.76,23.36,0.29,27.46,20.13,7.33,1.56,35.43,,35.56,35.26,34.7224,184,0,0,0,0,0 +1807:239:1,2630,3163,2925,147.72,-1.79,0,-1.78,-1.79,0.01,-0.78,31.19,,32.81,27.81,34.9295,0,0.93,0.98,0.89,0.87,0.96 +5608:130:2,4406,4617,4522,59.01,1.2,0.46,5.31,-1.39,6.7,0.46,33.89,,33.98,33.71,34.6962,170,0.00688,0,0.01,0.00666,0 +7209:458:2,0,0,0,,,,,,,,,,,,,0,,,,, +7515:235:1,3936,4506,4302,105.49,7.16,0.4,13.88,3.18,10.7,1.51,32.55,,32.8,31.49,34.6837,491,6.7E-4,0,0,0,0 +3406:475:3,1256,2363,1969,270.64,4.79,0.36,8.43,2.49,5.94,1.7,33.86,,34.03,33.72,34.722,297,0,0,0,0,0 +1411:237:4,0,0,0,,,,,,,,,,,,,0,,,,, +1303:381:1,0,0,0,,,,,,,,,,,,,0,,,,, +3316:350:1,2366,4360,3305,506.59,19.16,0.38,24.43,15.73,8.7,1.28,35.7,,35.86,35.59,34.7234,374,0,0,0,0,0 +3417:132:1,0,0,0,,,,,,,,,,,,,0,,,,, +7205:363:4,4699,6127,5466,334.74,25.31,0.2,28.84,22.24,6.6,2.11,36.98,,37.16,36.85,34.854,197,0,0,0,0,0 +5313:384:3,4921,5181,5027,52.25,15.12,0.35,19.63,11.63,8,1.26,34.41,,34.55,34.28,34.7122,262,0,0,0,0,0 +5417:248:1,411,1060,665,207.1,12.63,0.49,17.62,9.14,8.48,5.83,34.59,,34.74,34.26,34.3562,797,0,0,0,0,0 +5804:207:3,0,1,1,0,-1.79,0,-1.78,-1.79,0.01,,34.5,,34.77,34.06,,106,1.5,1.5,1.5,1.5,1.5 +7401:111:4,2738,4691,3679,499,16.44,0.34,20.57,13.05,7.52,2.63,35.81,,35.89,35.7,34.9243,541,0,0,0,0,0 +3312:100:2,0,0,0,,,,,,,,,,,,,0,,,,, +1704:134:1,308,363,333,11.13,0.78,0.54,7.11,-1.79,8.9,-0.88,34.68,,34.84,34.47,34.9043,421,0.04,0.12,0.01,0,0.02 +7306:102:1,4584,5254,5003,128.8,23.88,0.22,28.88,19.53,9.35,2.24,36.62,,36.72,36.47,34.8884,252,0,0,0,0,0 +1315:465:3,5619,5801,5714,46.45,18.71,0.6,27.24,12.16,15.08,1.55,34.46,,34.64,34.26,34.693,424,0,0,0,0,0 +5417:394:3,5248,5536,5351,53.39,10.28,0.4,14.15,7.32,6.83,1.21,34.45,,34.71,34.34,34.7159,383,0,0,0,0,0 +5617:468:2,2942,3608,3293,209.01,-1.13,0.15,2.22,-1.79,4.01,0.09,34.02,,34.2,33.76,34.6875,261,0.34,0.04,0.6,0.66,0.05 +1012:362:4,326,4077,2526,1093.78,28.86,0.12,30.1,27.25,2.85,3.58,33.88,,34.29,33.36,34.5877,616,0,0,0,0,0 +1014:238:4,3404,5093,4312,372.23,29.41,0.22,30.24,28.21,2.03,1.53,34.38,,34.77,34.09,34.6879,247,0,0,0,0,0 +3708:373:4,0,0,0,,,,,,,,,,,,,0,,,,, +1407:486:4,0,0,0,,,,,,,,,,,,,0,,,,, +7109:374:4,0,0,0,,,,,,,,,,,,,0,,,,, +5109:468:3,3645,4184,3924,121.58,22.85,0.34,25.17,20.54,4.63,1.83,35.89,,36.2,35.58,34.6884,256,0,0,0,0,0 +3408:216:4,2439,2964,2707,95.29,12.37,0.33,15.67,10,5.67,2,34.78,,34.99,34.57,34.7453,389,0,0,0,0,0 +7113:374:4,4989,5295,5141,64.43,24.61,0.42,27.21,21.73,5.48,1.45,34.46,,34.72,34.18,34.6938,247,0,0,0,0,0 +7510:495:1,0,0,0,,,,,,,,,,,,,0,,,,, +3510:392:2,4090,4586,4384,64.91,0.02,0.29,2.8,-1.79,4.59,-0.15,33.95,,34.08,33.81,34.6813,219,0.02,0,0.01,0.06,0 +1817:488:3,1387,3509,2297,642.94,-1.79,0,-1.78,-1.79,0.01,-0.52,31.15,,31.74,30.65,34.951,0,1,1,1,1,1 +1207:353:3,0,0,0,,,,,,,,,,,,,0,,,,, +5512:206:4,3797,4078,3933,51.68,7.98,0.42,11.7,6.23,5.47,1.4,34.33,,34.41,34.2,34.7207,319,0,0,0,0,0 +1804:476:1,2846,3866,3433,308.99,-1.79,0,-1.78,-1.79,0.01,-0.6,31.95,,32.67,31.28,34.9552,0,0.94,0.98,0.93,0.9,0.97 diff --git a/files/table.txt b/files/table.txt new file mode 100644 index 0000000..063ad74 --- /dev/null +++ b/files/table.txt @@ -0,0 +1,100 @@ +3214:370:3,0,0,0,,,,,,,,,,,,,0,,,,, +1514:228:3,1080,1320,1217,49.45,3.95,0.46,14.12,-1.79,15.91,2.3,32.66,,32.95,32.3,34.4143,572,0.1,0.21,0,0,0.2 +3302:459:4,4485,4698,4627,48.74,20.55,0.28,24,17.58,6.42,1.21,35.47,,35.52,35.35,34.7433,635,0,0,0,0,0 +5511:245:2,3156,3347,3278,32.85,6.24,0.49,9.43,4.32,5.11,1.29,34.17,,34.25,34.06,34.7168,283,0,0,0,0,0 +1007:456:4,2277,2607,2441,75.86,28.6,0.3,30.92,27.34,3.58,2.06,34.67,,35.43,33.71,34.7468,697,0,0,0,0,0 +5006:134:3,0,0,0,,,,,,,,,,,,,0,,,,, +1602:238:1,0,0,0,,,,,,,,,,,,,0,,,,, +3102:393:2,0,0,0,,,,,,,,,,,,,0,,,,, +3307:102:2,3521,4511,3866,182.68,20.17,0.3,24.72,16.23,8.49,1.12,35.77,,35.98,35.67,34.7112,170,0,0,0,0,0 +5716:248:4,3280,3498,3417,51.34,-1.79,0,-1.78,-1.79,0.01,0.31,34,,34.31,33.79,34.7005,388,0.75,0.81,0.93,0.91,0.39 +3706:111:4,0,0,0,,,,,,,,,,,,,0,,,,, +3100:228:4,4469,4750,4598,77.81,23.32,0.47,28.18,18.91,9.27,2.39,35.9,,36.37,34.76,34.8874,662,0,0,0,0,0 +3802:142:2,0,0,0,,,,,,,,,,,,,0,,,,, +7217:394:2,5097,5464,5317,67.82,22.27,0.5,27.24,17.19,10.05,1.55,35.1,,35.34,34.93,34.6917,267,0,0,0,0,0 +7115:458:3,4413,6028,5527,238.07,26.16,0.35,27.89,24.23,3.66,1.43,34.62,,34.96,34.44,34.6986,203,0,0,0,0,0 +3115:468:1,2646,3449,3112,197.07,27.13,0.37,29.63,24.39,5.24,1.88,35.07,,35.28,34.82,34.698,253,0,0,0,0,0 +1810:235:3,2370,3650,3427,342.62,-1.79,0,-1.78,-1.79,0.01,-0.71,30.63,,32.43,28.69,34.939,0,0.93,0.98,0.89,0.88,0.97 +3815:351:3,0,0,0,,,,,,,,,,,,,0,,,,, +7512:458:3,0,0,0,,,,,,,,,,,,,0,,,,, +5605:218:1,570,5146,2644,1252.42,-0.53,0.3,2.35,-1.79,4.14,0.43,33.88,,34.04,33.61,34.6985,220,0.05,0.00333,0.13,0.08,0 +1603:133:1,0,0,0,,,,,,,,,,,,,0,,,,, +1110:465:2,0,0,0,,,,,,,,,,,,,0,,,,, +5005:238:4,0,0,0,,,,,,,,,,,,,0,,,,, +3711:114:3,0,0,0,,,,,,,,,,,,,0,,,,, +1600:104:1,271,304,288,6.96,9.48,0.39,17.71,5.12,12.59,6.97,33.4,,34.06,32.13,35.1901,862,0.01,0.02,0,0,0.03 +7111:468:1,3034,4116,3825,202.95,25.98,0.31,28.57,23.68,4.89,1.5,34.17,,34.44,33.81,34.6885,332,0,0,0,0,0 +5313:134:4,4413,4800,4604,87.77,18.5,0.39,23.68,14.94,8.74,1.3,34.99,,35.26,34.76,34.7075,179,0,0,0,0,0 +7810:363:1,1596,2076,1832,188.49,-1.79,0,-1.78,-1.79,0.01,-0.4,30.94,,31.19,30.55,34.9299,0,0.97,0.99,0.96,0.95,0.99 +1717:352:1,97,180,138,22.07,-1.79,0,-1.76,-1.79,0.03,-1.04,29.35,,30.13,28.53,33.4049,0,0.92,0.97,0.84,0.87,0.97 +3404:382:3,3887,4494,4191,162.59,4.78,0.36,7.19,2.47,4.72,0.43,33.9,,34.12,33.77,34.651,322,8.25,0,0,0,0 +7515:476:4,0,0,0,,,,,,,,,,,,,0,,,,, +1709:123:3,0,0,0,,,,,,,,,,,,,0,,,,, +5205:114:1,0,0,0,,,,,,,,,,,,,0,,,,, +7006:112:1,0,0,0,,,,,,,,,,,,,0,,,,, +3513:487:1,4294,4692,4475,90.74,1.95,0.28,4.89,-0.32,5.21,-0.05,33.82,,33.91,33.72,34.6773,206,1.3E-4,0,0,0,0 +3102:370:2,0,0,0,,,,,,,,,,,,,0,,,,, +5613:247:1,4237,4904,4573,173.63,-0.9,0.42,2.01,-1.79,3.8,0.24,33.92,,34.07,33.63,34.7061,165,0.2,0,0.21,0.51,0.06 +3517:352:2,5068,5548,5372,62.83,6.65,0.34,9.58,4.95,4.63,0.96,34.14,,34.39,33.95,34.707,287,0,0,0,0,0 +5201:103:2,2807,4255,3603,327.26,23.47,0.35,26.85,20.99,5.86,2.23,36.64,,36.79,36.46,34.8601,155,0,0,0,0,0 +3710:457:2,0,0,0,,,,,,,,,,,,,0,,,,, +7406:350:4,0,131,21,29.77,6.73,0.33,18.93,-1.54,20.47,4.47,30.64,,31.26,29.89,30.8745,130,0.06,0.13,0,0.00487,0.13 +1602:382:2,0,0,0,,,,,,,,,,,,,0,,,,, +3208:392:3,4032,4337,4168,62.46,20.3,0.31,24.85,16.45,8.4,1.36,35.78,,36.03,35.67,34.7223,166,0,0,0,0,0 +1004:382:3,0,0,0,,,,,,,,,,,,,0,,,,, +1406:372:3,0,0,0,,,,,,,,,,,,,0,,,,, +3504:102:2,3980,4204,4075,52.37,3.06,0.28,5.55,1.2,4.35,0.2,33.95,,34.07,33.87,34.6437,348,2.667e-,0,0,0,0 +1316:124:4,6183,6290,6262,15.09,21.37,0.5,27.11,16.29,10.82,,34.67,,34.85,34.54,,376,0,0,0,0,0 +1603:487:3,0,121,68,30.23,3.98,0.6,10.49,-1.73,12.22,3.01,34.93,,35.2,34.75,34.6867,633,0.01,0.02,0.01,0.02,0.02 +3717:497:2,1,1,1,0,-1.79,0,-1.78,-1.79,0.01,-1.45,34.31,,34.54,33.98,34.3521,661,1.5,1.5,1.5,1.5,1.5 +3111:380:2,4117,5047,4532,218.35,26.05,0.43,29.54,23.3,6.24,1.18,34.79,,35.01,34.6,34.7135,255,0,0,0,0,0 +7503:228:2,2978,3873,3569,175.58,8.44,0.66,14.13,4.87,9.26,2.64,34.71,,34.9,34.44,34.9337,629,0,0,0,0,0 +3312:237:2,3,862,344,269.72,17.86,0.29,21.21,14.9,6.31,11.84,35.81,,36.04,35.56,35.1271,622,0,0,0,0,0 +1604:100:4,0,0,0,,,,,,,,,,,,,0,,,,, +5805:207:1,1,1,1,0,-1.79,0,-1.78,-1.79,0.01,-1.3,34.64,,34.92,34.24,34.6741,0,1.5,1.5,1.5,1.5,1.5 +3808:486:4,0,0,0,,,,,,,,,,,,,0,,,,, +1113:111:4,5468,6222,5905,126.52,28.77,0.26,30.48,26.93,3.55,,34.21,,34.43,34.01,,194,0,0,0,0,0 +5504:459:4,3503,4373,3978,241.42,3.39,0.47,6.6,-0.19,6.79,0.32,33.99,,34.09,33.86,34.6805,311,6.833e-,0,0,0,0 +3015:478:1,0,2301,880,817.09,29.26,0.34,30.5,27.3,3.2,4.84,34.63,,35.06,34.22,34.5298,392,0,0,0,0,0 +7001:487:4,1974,4794,4251,503.36,27.47,0.34,29.33,24.24,5.09,2.3,35.12,,35.77,34.45,34.8818,548,0,0,0,0,0 +1202:469:2,0,0,0,,,,,,,,,,,,,0,,,,, +3809:143:4,0,0,0,,,,,,,,,,,,,0,,,,, +1303:123:3,1226,1584,1385,89.6,21.98,0.31,28.4,16.09,12.31,13.63,39.09,,39.29,38.84,38.7163,317,0,0,0,0,0 +5406:102:2,0,0,0,,,,,,,,,,,,,0,,,,, +5116:228:4,5100,5258,5181,27.52,28.82,0.29,30.09,27.09,3,1.09,35.3,,35.76,34.9,34.7102,207,0,0,0,0,0 +7305:380:2,5152,5443,5393,34.25,21.41,0.25,27.56,17.45,10.11,2.27,36.15,,36.31,35.84,34.8778,374,0,0,0,0,0 +3709:226:4,0,0,0,,,,,,,,,,,,,0,,,,, +1808:133:3,3273,3484,3392,55.93,-1.79,0,-1.78,-1.79,0.01,-0.7,30.72,,32.6,27.39,34.9412,0,0.93,0.98,0.89,0.88,0.96 +7807:248:3,1171,1276,1199,25.78,-1.79,0,-1.78,-1.79,0.01,-0.19,30.52,,31.41,29.24,34.9273,0,0.98,0.99,0.96,0.97,0.99 +7710:225:3,0,44,11,13.03,-1.79,0.02,-1.29,-1.79,0.5,-0.84,30.95,,37.68,27.2,31.0727,49,0.82,0.91,0.69,0.79,0.89 +7312:371:3,0,0,0,,,,,,,,,,,,,0,,,,, +5212:122:1,3115,3815,3527,187.11,25.03,0.25,27.63,22.24,5.39,1.7,36.44,,36.68,36.14,34.6819,127,0,0,0,0,0 +5410:465:2,3154,3727,3575,83.87,9.48,0.32,13.08,6.87,6.21,1.35,34.12,,34.22,34.02,34.7125,245,0,0,0,0,0 +3610:372:3,0,0,0,,,,,,,,,,,,,0,,,,, +3010:103:2,0,64,10,9.18,29.1,0.35,31.21,27.1,4.11,29.11,29.87,,31.57,27.72,30.2693,183,0,0,0,0,0 +3204:144:4,0,0,0,,,,,,,,,,,,,0,,,,, +3109:487:3,5178,5741,5503,75.15,24.94,0.38,27.5,22.65,4.85,1.17,34.71,,34.99,34.53,34.7109,218,0,0,0,0,0 +5416:218:3,4236,4744,4580,91.81,14.1,0.54,19.23,10.43,8.8,0.89,34.84,,34.91,34.66,34.706,371,0,0,0,0,0 +3217:218:1,3375,4010,3710,141.95,25.18,0.39,28.43,21.84,6.59,1.85,35.39,,35.59,35.19,34.6822,301,0,0,0,0,0 +5706:110:2,0,37,19,9.76,-1.79,0,-1.78,-1.79,0.01,-1.62,34.16,,34.61,33.2,34.2117,210,0.88,0.89,0.89,0.87,0.88 +7708:110:3,0,0,0,,,,,,,,,,,,,0,,,,, +5107:225:2,0,0,0,,,,,,,,,,,,,0,,,,, +7015:140:1,4730,5183,5003,78.85,27.81,0.64,29.86,24.94,4.92,1.43,34.81,,34.88,34.71,34.695,357,0,0,0,0,0 +3009:477:1,4152,5179,4685,222.74,28.49,0.38,30.45,25.89,4.56,1.17,34.09,,34.38,33.62,34.7137,274,0,0,0,0,0 +3414:478:4,1057,1673,1340,169.31,10.41,0.43,13.23,8.46,4.77,3.29,34.61,,34.8,34.36,34.429,393,0,0,0,0,0 +7200:142:3,0,0,0,,,,,,,,,,,,,0,,,,, +5314:364:1,4516,5384,4890,166.04,16.27,0.32,21.4,13,8.4,1.2,34.74,,34.87,34.57,34.7034,246,0,0,0,0,0 +7306:390:1,4322,5333,5132,137.96,21.54,0.37,27.23,16.05,11.18,2.28,35.67,,36.09,35.35,34.8922,495,0,0,0,0,0 +5700:390:3,0,0,0,,,,,,,,,,,,,0,,,,, +3106:485:3,2042,3309,2699,250.96,25.62,0.28,28.86,22.75,6.11,1.86,34.85,,35.16,34.67,34.7201,203,0,0,0,0,0 +5707:371:4,1,1,1,0,-1.79,0,-1.78,-1.79,0.01,-1.7,33.9,,34.25,33.5,33.4427,0,1.5,1.5,1.5,1.5,1.5 +7602:123:2,1285,1531,1359,56.7,8.32,0.32,12.4,5.81,6.59,3.78,35.25,,35.33,35.14,34.9939,441,0,0,0,0,0 +3712:390:1,0,0,0,,,,,,,,,,,,,0,,,,, +1412:372:4,0,0,0,,,,,,,,,,,,,0,,,,, +5700:217:2,0,103,43,29.45,-1.78,0.01,-1.55,-1.79,0.24,-1.72,34.11,,34.56,33.45,34.2757,187,1.5,1.5,1.5,1.5,1.5 +1616:102:1,0,69,55,17.46,2.82,0.37,12.45,-1.79,14.24,0.41,32.36,,33,31.51,32.7001,908,0.34,0.53,0.03,0.08,0.7 +7501:361:2,2571,2809,2668,54.47,11.33,0.26,15.48,9.1,6.38,3.08,35.34,,35.42,35.27,34.9611,501,0,0,0,0,0 +5413:495:4,4357,4747,4546,92.09,8.74,0.44,12.57,6.41,6.16,1.28,34.33,,34.51,34.19,34.7173,281,0,0,0,0,0 +1007:485:1,1563,2842,2458,320.42,28.53,0.29,31.05,26.28,4.77,2.07,34.54,,35.09,33.44,34.7277,907,0,0,0,0,0 +7114:229:2,5187,5440,5317,54.71,26.63,0.34,28.64,24.51,4.13,1.47,34.36,,34.68,34.04,34.6959,248,0,0,0,0,0 +7414:101:3,4582,4876,4718,53.09,14.79,0.75,21.34,9.31,12.03,1.54,33.32,,33.43,33.06,34.6921,430,0,0,0,0,0 diff --git a/log.txt b/log.txt new file mode 100644 index 0000000..04a9dc3 --- /dev/null +++ b/log.txt @@ -0,0 +1,44 @@ +0 [main] INFO org.gcube.common.scope.impl.ScopeProviderScanner - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@685e95ae +633 [main] INFO org.gcube.common.scan.DefaultScanner - matched 13 resources from 62 urls in 313 ms +653 [main] INFO org.gcube.common.scope.impl.ServiceMapScanner - loading jar:file:/home/loredana/.m2/repository/org/gcube/core/common-scope-maps/1.0.2-SNAPSHOT/common-scope-maps-1.0.2-SNAPSHOT.jar!/d4science.servicemap +682 [main] INFO org.gcube.common.scope.impl.ServiceMapScanner - loading jar:file:/home/loredana/.m2/repository/org/gcube/core/common-scope-maps/1.0.2-SNAPSHOT/common-scope-maps-1.0.2-SNAPSHOT.jar!/vo1.servicemap +683 [main] INFO org.gcube.common.scope.impl.ServiceMapScanner - loading jar:file:/home/loredana/.m2/repository/org/gcube/core/common-scope-maps/1.0.2-SNAPSHOT/common-scope-maps-1.0.2-SNAPSHOT.jar!/openbio.servicemap +685 [main] INFO org.gcube.common.scope.impl.ServiceMapScanner - loading jar:file:/home/loredana/.m2/repository/org/gcube/core/common-scope-maps/1.0.2-SNAPSHOT/common-scope-maps-1.0.2-SNAPSHOT.jar!/cnr.servicemap +687 [main] INFO org.gcube.common.scope.impl.ServiceMapScanner - loading jar:file:/home/loredana/.m2/repository/org/gcube/core/common-scope-maps/1.0.2-SNAPSHOT/common-scope-maps-1.0.2-SNAPSHOT.jar!/testing.servicemap +689 [main] INFO org.gcube.common.scope.impl.ServiceMapScanner - loading jar:file:/home/loredana/.m2/repository/org/gcube/core/common-scope-maps/1.0.2-SNAPSHOT/common-scope-maps-1.0.2-SNAPSHOT.jar!/gcube.servicemap +690 [main] INFO org.gcube.common.scope.impl.ServiceMapScanner - loading jar:file:/home/loredana/.m2/repository/org/gcube/core/common-scope-maps/1.0.2-SNAPSHOT/common-scope-maps-1.0.2-SNAPSHOT.jar!/farm.servicemap +692 [main] INFO org.gcube.common.scope.impl.ServiceMapScanner - loading jar:file:/home/loredana/.m2/repository/org/gcube/core/common-scope-maps/1.0.2-SNAPSHOT/common-scope-maps-1.0.2-SNAPSHOT.jar!/isti.servicemap +694 [main] INFO org.gcube.common.scope.impl.ServiceMapScanner - loading jar:file:/home/loredana/.m2/repository/org/gcube/core/common-scope-maps/1.0.2-SNAPSHOT/common-scope-maps-1.0.2-SNAPSHOT.jar!/gcubeapps.servicemap +696 [main] INFO org.gcube.common.scope.impl.ServiceMapScanner - loading jar:file:/home/loredana/.m2/repository/org/gcube/core/common-scope-maps/1.0.2-SNAPSHOT/common-scope-maps-1.0.2-SNAPSHOT.jar!/ecosystem.servicemap +697 [main] INFO org.gcube.common.scope.impl.ServiceMapScanner - loading jar:file:/home/loredana/.m2/repository/org/gcube/core/common-scope-maps/1.0.2-SNAPSHOT/common-scope-maps-1.0.2-SNAPSHOT.jar!/securevo.servicemap +699 [main] INFO org.gcube.common.scope.impl.ServiceMapScanner - loading jar:file:/home/loredana/.m2/repository/org/gcube/core/common-scope-maps/1.0.2-SNAPSHOT/common-scope-maps-1.0.2-SNAPSHOT.jar!/devsec.servicemap +701 [main] INFO org.gcube.common.scope.impl.ServiceMapScanner - loading jar:file:/home/loredana/.m2/repository/org/gcube/core/common-scope-maps/1.0.2-SNAPSHOT/common-scope-maps-1.0.2-SNAPSHOT.jar!/devnext.servicemap +879 [main] INFO org.gcube.common.clients.stubs.jaxws.StubFactory - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +1420 [main] INFO org.gcube.resources.discovery.icclient.ICClient - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database') return $resource +1497 [main] INFO org.gcube.common.clients.stubs.jaxws.handlers.HandlerRegistry - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@1c7c0f04 +1498 [main] INFO org.gcube.common.clients.stubs.jaxws.handlers.HandlerRegistry - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@16bc6a12 +1499 [main] INFO org.gcube.common.clients.stubs.jaxws.handlers.HandlerRegistry - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@2f1c7c32 +1500 [main] INFO org.gcube.common.clients.stubs.jaxws.handlers.HandlerRegistry - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@281de7b2 +1624 [main] INFO org.gcube.resources.discovery.icclient.ICClient - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database') return $resource in 204 ms +0 [main] INFO org.gcube.common.scope.impl.ScopeProviderScanner - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@3c22d5b5 +508 [main] INFO org.gcube.common.scan.DefaultScanner - matched 13 resources from 62 urls in 314 ms +529 [main] INFO org.gcube.common.scope.impl.ServiceMapScanner - loading jar:file:/home/loredana/.m2/repository/org/gcube/core/common-scope-maps/1.0.2-SNAPSHOT/common-scope-maps-1.0.2-SNAPSHOT.jar!/d4science.servicemap +557 [main] INFO org.gcube.common.scope.impl.ServiceMapScanner - loading jar:file:/home/loredana/.m2/repository/org/gcube/core/common-scope-maps/1.0.2-SNAPSHOT/common-scope-maps-1.0.2-SNAPSHOT.jar!/vo1.servicemap +559 [main] INFO org.gcube.common.scope.impl.ServiceMapScanner - loading jar:file:/home/loredana/.m2/repository/org/gcube/core/common-scope-maps/1.0.2-SNAPSHOT/common-scope-maps-1.0.2-SNAPSHOT.jar!/openbio.servicemap +560 [main] INFO org.gcube.common.scope.impl.ServiceMapScanner - loading jar:file:/home/loredana/.m2/repository/org/gcube/core/common-scope-maps/1.0.2-SNAPSHOT/common-scope-maps-1.0.2-SNAPSHOT.jar!/cnr.servicemap +562 [main] INFO org.gcube.common.scope.impl.ServiceMapScanner - loading jar:file:/home/loredana/.m2/repository/org/gcube/core/common-scope-maps/1.0.2-SNAPSHOT/common-scope-maps-1.0.2-SNAPSHOT.jar!/testing.servicemap +564 [main] INFO org.gcube.common.scope.impl.ServiceMapScanner - loading jar:file:/home/loredana/.m2/repository/org/gcube/core/common-scope-maps/1.0.2-SNAPSHOT/common-scope-maps-1.0.2-SNAPSHOT.jar!/gcube.servicemap +566 [main] INFO org.gcube.common.scope.impl.ServiceMapScanner - loading jar:file:/home/loredana/.m2/repository/org/gcube/core/common-scope-maps/1.0.2-SNAPSHOT/common-scope-maps-1.0.2-SNAPSHOT.jar!/farm.servicemap +568 [main] INFO org.gcube.common.scope.impl.ServiceMapScanner - loading jar:file:/home/loredana/.m2/repository/org/gcube/core/common-scope-maps/1.0.2-SNAPSHOT/common-scope-maps-1.0.2-SNAPSHOT.jar!/isti.servicemap +570 [main] INFO org.gcube.common.scope.impl.ServiceMapScanner - loading jar:file:/home/loredana/.m2/repository/org/gcube/core/common-scope-maps/1.0.2-SNAPSHOT/common-scope-maps-1.0.2-SNAPSHOT.jar!/gcubeapps.servicemap +572 [main] INFO org.gcube.common.scope.impl.ServiceMapScanner - loading jar:file:/home/loredana/.m2/repository/org/gcube/core/common-scope-maps/1.0.2-SNAPSHOT/common-scope-maps-1.0.2-SNAPSHOT.jar!/ecosystem.servicemap +574 [main] INFO org.gcube.common.scope.impl.ServiceMapScanner - loading jar:file:/home/loredana/.m2/repository/org/gcube/core/common-scope-maps/1.0.2-SNAPSHOT/common-scope-maps-1.0.2-SNAPSHOT.jar!/securevo.servicemap +576 [main] INFO org.gcube.common.scope.impl.ServiceMapScanner - loading jar:file:/home/loredana/.m2/repository/org/gcube/core/common-scope-maps/1.0.2-SNAPSHOT/common-scope-maps-1.0.2-SNAPSHOT.jar!/devsec.servicemap +578 [main] INFO org.gcube.common.scope.impl.ServiceMapScanner - loading jar:file:/home/loredana/.m2/repository/org/gcube/core/common-scope-maps/1.0.2-SNAPSHOT/common-scope-maps-1.0.2-SNAPSHOT.jar!/devnext.servicemap +717 [main] INFO org.gcube.common.clients.stubs.jaxws.StubFactory - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +892 [main] INFO org.gcube.resources.discovery.icclient.ICClient - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database') return $resource +926 [main] INFO org.gcube.common.clients.stubs.jaxws.handlers.HandlerRegistry - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@3d48392b +927 [main] INFO org.gcube.common.clients.stubs.jaxws.handlers.HandlerRegistry - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@657189ad +928 [main] INFO org.gcube.common.clients.stubs.jaxws.handlers.HandlerRegistry - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@45f2a7e9 +929 [main] INFO org.gcube.common.clients.stubs.jaxws.handlers.HandlerRegistry - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@5eb85400 +1022 [main] INFO org.gcube.resources.discovery.icclient.ICClient - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database') return $resource in 130 ms diff --git a/pom.xml b/pom.xml new file mode 100644 index 0000000..84097f9 --- /dev/null +++ b/pom.xml @@ -0,0 +1,147 @@ + + 4.0.0 + + org.gcube.dataanalysis + 1.0.0-SNAPSHOT + + + org.gcube.tools + maven-parent + 1.0.0 + + + + org.gcube.contentmanagement + storage-manager-core + [1.0.0-SNAPSHOT, 3.0.0-SNAPSHOT) + + + + org.gcube.core + common-scope-maps + [1.0.0-SNAPSHOT, 3.0.0-SNAPSHOT) + provided + + + org.gcube.resources.discovery + ic-client + [1.0.0-SNAPSHOT, 3.0.0-SNAPSHOT) + provided + + + + + org.gcube.contentmanagement + storage-manager-wrapper + [1.0.0-SNAPSHOT, 3.0.0-SNAPSHOT) + provided + + + + junit + junit + 4.11 + + + org.gcube.core + common-scope + [1.0.0-SNAPSHOT, 3.0.0-SNAPSHOT) + provided + + + + org.gcube.resources + common-gcore-resources + [1.0.0-SNAPSHOT,2.0.0-SNAPSHOT) + provided + + + + + org.gcube.core + common-encryption + [1.0.1-SNAPSHOT,3.0.0-SNAPSHOT) + provided + + + + org.gcube.dataanalysis + ecological-engine + [1.7.2-SNAPSHOT,1.8.0-SNAPSHOT) + + + postgresql + postgresql + + + + + + + + + + + + com.mysql + mysql-connector + 5.1.7 + + + + postgresql + postgresql + 9.1-901-1.jdbc4 + + + org.gcube.resources.discovery + discovery-client + [1.0.0-SNAPSHOT,2.0.0-SNAPSHOT) + + + + rapidminer-custom + swissql-api + 1.0.0 + + + + + + + + + + + + + dnet-deps + dnet-deps + http://maven.research-infrastructures.eu/nexus/content/repositories/dnet-deps// + + + + + + + maven-assembly-plugin + + + jar-with-dependencies + + + + + package + + single + + + + + + + databases-resources-manager + diff --git a/src/main/java/org/gcube/dataanalysis/databases/access/DatabasesDiscoverer.java b/src/main/java/org/gcube/dataanalysis/databases/access/DatabasesDiscoverer.java new file mode 100644 index 0000000..6e73f60 --- /dev/null +++ b/src/main/java/org/gcube/dataanalysis/databases/access/DatabasesDiscoverer.java @@ -0,0 +1,37 @@ +package org.gcube.dataanalysis.databases.access; + +import static org.gcube.resources.discovery.icclient.ICFactory.clientFor; +import static org.gcube.resources.discovery.icclient.ICFactory.queryFor; + +import java.util.List; + +import org.gcube.common.resources.gcore.ServiceEndpoint; +import org.gcube.dataanalysis.databases.resources.DBResource; +import org.gcube.resources.discovery.client.api.DiscoveryClient; +import org.gcube.resources.discovery.client.api.DiscoveryException; +import org.gcube.resources.discovery.client.api.InvalidResultException; +import org.gcube.resources.discovery.client.queries.impl.XQuery; + + +/** Class that allows to discover databases submitting a query to the IS. + * It retrieves a list of DBResource object */ +public class DatabasesDiscoverer extends ResourceDiscoverer{ + + public DatabasesDiscoverer(){ + + + } + + /** Method that performs the discovery process of database resources */ + public List discover() throws IllegalStateException, DiscoveryException, InvalidResultException{ + + XQuery query = queryFor(ServiceEndpoint.class); + query.addCondition("$resource/Profile/Category/text() eq 'Database'"); + + DiscoveryClient discovery = clientFor(DBResource.class); + List resources = discovery.submit(query); + + return resources; + } + +} diff --git a/src/main/java/org/gcube/dataanalysis/databases/access/ResourceDiscoverer.java b/src/main/java/org/gcube/dataanalysis/databases/access/ResourceDiscoverer.java new file mode 100644 index 0000000..5dcdf96 --- /dev/null +++ b/src/main/java/org/gcube/dataanalysis/databases/access/ResourceDiscoverer.java @@ -0,0 +1,13 @@ +package org.gcube.dataanalysis.databases.access; + +import java.util.List; + +import org.gcube.dataanalysis.databases.resources.DBResource; + +/** Class that allows to discover generic resources submitting a query to the IS. + * It retrieves a list of DBResource objects. */ +public abstract class ResourceDiscoverer { + + + public abstract List discover(); +} diff --git a/src/main/java/org/gcube/dataanalysis/databases/accessold/AddressesDiscoverer.java b/src/main/java/org/gcube/dataanalysis/databases/accessold/AddressesDiscoverer.java new file mode 100644 index 0000000..b81ceec --- /dev/null +++ b/src/main/java/org/gcube/dataanalysis/databases/accessold/AddressesDiscoverer.java @@ -0,0 +1,63 @@ +package org.gcube.dataanalysis.databases.accessold; + +import static org.gcube.resources.discovery.icclient.ICFactory.clientFor; +import static org.gcube.resources.discovery.icclient.ICFactory.queryFor; + +import java.util.ArrayList; +import java.util.List; + +import org.gcube.common.resources.gcore.ServiceEndpoint; +import org.gcube.common.scope.api.ScopeProvider; +import org.gcube.dataanalysis.databases.resources.DBResource; +import org.gcube.resources.discovery.client.api.DiscoveryClient; +import org.gcube.resources.discovery.client.queries.impl.XQuery; + +/** + * Class that, allowing to set a scope and to submit a query to the IS, recover + * a list of url for each DBResource object + */ + +public class AddressesDiscoverer { + + /** Method to set the scope */ + public void setScope(String scope) { + + ScopeProvider.instance.set(scope); + + } + + /** Method to recover the url's list */ + public List retrieveAddress(String Category) { + + List addresses = new ArrayList(); + + XQuery query = queryFor(ServiceEndpoint.class); + query.addCondition("$resource/Profile/Category/text() eq '" + Category + + "'"); + + DiscoveryClient submitop = clientFor(DBResource.class); + List access = submitop.submit(query); + + // System.out.println("size resource: "+access.size()); + + int APsize = 0; + String address = ""; + + for (int i = 0; i < access.size(); i++) { + + APsize = access.get(i).getAccessPoints().size(); + + for (int j = 0; j < APsize; j++) { + + address = access.get(i).getAccessPoints().get(j).address(); + addresses.add(address); + + } + + } + + return addresses; + + } + +} diff --git a/src/main/java/org/gcube/dataanalysis/databases/accessold/DatabasesDiscoverer.java b/src/main/java/org/gcube/dataanalysis/databases/accessold/DatabasesDiscoverer.java new file mode 100644 index 0000000..0e7df88 --- /dev/null +++ b/src/main/java/org/gcube/dataanalysis/databases/accessold/DatabasesDiscoverer.java @@ -0,0 +1,38 @@ +package org.gcube.dataanalysis.databases.accessold; + +import static org.gcube.resources.discovery.icclient.ICFactory.clientFor; +import static org.gcube.resources.discovery.icclient.ICFactory.queryFor; + +import java.util.List; + +import org.gcube.common.resources.gcore.ServiceEndpoint; +import org.gcube.common.scope.api.ScopeProvider; +import org.gcube.dataanalysis.databases.resources.DBResource; +import org.gcube.resources.discovery.client.api.DiscoveryClient; +import org.gcube.resources.discovery.client.queries.impl.XQuery; + + +/** Class that allows to discover databases submitting a query to the IS and given a certain scope. + * It retrieves a list of DBResource object */ +public class DatabasesDiscoverer extends ResourceDiscoverer{ + + public DatabasesDiscoverer(){ + + + } + + /** Method that performs the discovery process of database resources */ + public List discovery(String scope) { + + ScopeProvider.instance.set(scope); + + XQuery query = queryFor(ServiceEndpoint.class); + query.addCondition("$resource/Profile/Category/text() eq 'Database'"); + + DiscoveryClient discovery = clientFor(DBResource.class); + List resources = discovery.submit(query); + + return resources; + } + +} diff --git a/src/main/java/org/gcube/dataanalysis/databases/accessold/ResourceDiscoverer.java b/src/main/java/org/gcube/dataanalysis/databases/accessold/ResourceDiscoverer.java new file mode 100644 index 0000000..c04b047 --- /dev/null +++ b/src/main/java/org/gcube/dataanalysis/databases/accessold/ResourceDiscoverer.java @@ -0,0 +1,15 @@ +package org.gcube.dataanalysis.databases.accessold; + +import java.util.List; + +import org.gcube.dataanalysis.databases.resources.DBResource; + +/** Class that allows to discover generic resources submitting a query to the IS and given a certain scope. + * It retrieves a list of DBResource objects. */ +public abstract class ResourceDiscoverer { + + + public abstract List discovery(String scope); + + +} diff --git a/src/main/java/org/gcube/dataanalysis/databases/converter/SqlDialectConverter.java b/src/main/java/org/gcube/dataanalysis/databases/converter/SqlDialectConverter.java new file mode 100644 index 0000000..dd3e88a --- /dev/null +++ b/src/main/java/org/gcube/dataanalysis/databases/converter/SqlDialectConverter.java @@ -0,0 +1,36 @@ +package org.gcube.dataanalysis.databases.converter; + +import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger; + +import com.adventnet.swissqlapi.SwisSQLAPI; +import com.adventnet.swissqlapi.sql.exception.ConvertException; +import com.adventnet.swissqlapi.sql.parser.ParseException; + +/** + * Class that allows to convert a query specifying a sql dialect by means of a SwisSQL API + */ +public class SqlDialectConverter { + + private SwisSQLAPI obj; + + // Constructor + public SqlDialectConverter(String query) { + + obj = new SwisSQLAPI(query); + + } + + public String convert(int dialect) throws ParseException, ConvertException { + + String queryConverted = null; + + queryConverted = obj.convert(dialect); + + AnalysisLogger.getLogger().debug( + "In SqlDialectConverter-> query converted: " + queryConverted); + + return queryConverted; + + } + +} diff --git a/src/main/java/org/gcube/dataanalysis/databases/lexer/LexicalAnalyzer.java b/src/main/java/org/gcube/dataanalysis/databases/lexer/LexicalAnalyzer.java new file mode 100644 index 0000000..342b871 --- /dev/null +++ b/src/main/java/org/gcube/dataanalysis/databases/lexer/LexicalAnalyzer.java @@ -0,0 +1,203 @@ +package org.gcube.dataanalysis.databases.lexer; + +import java.util.ArrayList; + +import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger; + +/** + * Class that allows to filter queries no read-only compliant by means of a + * lexical analysis + */ +public class LexicalAnalyzer { + + private ArrayList BlackList = new ArrayList(); // Keywords' + + // blacklist + + // Method that performs the lexical analysis + public boolean analyze(String query) throws Exception { + + boolean NotAllowed = false; + + // building the keywords' blacklist + // the following keywords are common for MySQL and PostgreSQL databases. + + BlackList.add("INSERT"); + BlackList.add("DELETE "); + BlackList.add("UPDATE "); + BlackList.add("CREATE"); + BlackList.add("ALTER"); + BlackList.add("DROP"); + BlackList.add("GRANT"); + BlackList.add("REVOKE"); + BlackList.add("TRUNCATE"); + + BlackList.add("DO"); + BlackList.add("START TRANSACTION"); + BlackList.add("COMMIT"); + BlackList.add("BEGIN"); + BlackList.add("ROLLBACK"); + BlackList.add("SET"); + BlackList.add("SAVEPOINT"); + BlackList.add("RELEASE SAVEPOINT"); + BlackList.add("LOCK"); + + BlackList.add("DECLARE"); + BlackList.add("PREPARE"); + BlackList.add("FETCH"); + // BlackList.add("EXPLAIN"); + BlackList.add("ANALYZE"); + BlackList.add("EXECUTE"); + BlackList.add("SHOW"); + BlackList.add("RESET"); + + String queryParsed = null; + + int NumOfBlackList = BlackList.size(); + + AnalysisLogger.getLogger().debug( + "LexicalAnalyzer->blacklist size: " + NumOfBlackList); + + // System.out.println("size blacklist: " + NumOfBlackList); + + // parse the query using the regular expressions + queryParsed = LexicalAnalyzer.parseRegularExpressions(query); + + // check if the query contains a word defined in the blacklist + NotAllowed = check(queryParsed); + + AnalysisLogger.getLogger().debug( + "LexicalAnalyzer->query not allowed: " + NotAllowed); + + if (NotAllowed == true) { + + throw new Exception("Only read-only queries are allowed"); + + } + + return NotAllowed; + + } + + // Method that checks if the query contains a word defined in the blacklist + public boolean check(String query) { + + boolean NotAllowed = false; + + check_tokens: for (int j = 0; j < BlackList.size(); j++) { + + // System.out.println("BlackList word: " + BlackList.get(j)); + + // keyword check with regex regular expression + + String Keyword = BlackList.get(j); + + String regex = ".*\\b" + Keyword.replaceAll(" +", "[ ]\\+") + + "\\b.*"; + + if (query.toUpperCase().matches(regex)) { + + NotAllowed = true; + + break check_tokens; + + } + + // if (Keyword.contains(" ")) { + // + // String[] arrayKeyword = Keyword.split(" "); + // + // int i; + // // boolean notContained = false; + // + // String Regex = ""; + // String regexKeyword = regexKeyword = ".*\\b" + arrayKeyword[0] + // + "\\s*"; + // Regex = regexKeyword; + // + // for (i = 1; i < arrayKeyword.length; i++) { + // + // if (i == arrayKeyword.length - 1) { + // + // Regex = Regex + arrayKeyword[i] + "\\b.*"; + // + // } else { + // + // Regex = Regex + arrayKeyword[i] + "\\s*"; + // + // } + // + // } + // + // if (query.toUpperCase().matches(Regex)) { + // + // // notContained = true; + // + // AnalysisLogger.getLogger().debug( + // "LexicalAnalyzer-> : the query contains the word in the blacklist " + // + BlackList.get(j)); + // + // NotAllowed = true; + // + // break check_tokens; + // + // } + // + // } else { + // + // String regexKeyword = ".*\\b" + BlackList.get(j) + "\\b.*"; + // + // if (query.toUpperCase().matches(regexKeyword)) { + // + // AnalysisLogger.getLogger().debug( + // "LexicalAnalyzer-> : the query contains the word in the blacklist " + // + BlackList.get(j)); + // + // NotAllowed = true; + // + // break check_tokens; + // + // } + // + // } + + } + + return NotAllowed; + + } + + public ArrayList getBlackList() { + + return BlackList; + + } + + private static String parseRegularExpressions(String phrase) { + + // replacement of the punctuation characters + // String todelete = "[\\]\\[!#$%&()*+,./:;<=>?@\\^_{|}~-]"; + String todelete = "[\\]\\[!#$%&()*+,./:;<=>?@\\^{|}~-]"; + phrase = phrase.replaceAll(todelete, " "); + phrase = phrase.replaceAll("[ ]+", " "); + + AnalysisLogger.getLogger().debug( + "LexicalAnalyzer-> : replacing query " + phrase); + + // elimination by means of a replacement of the word enclosed in '', + // "",`` + String apex = "'.*'"; + phrase = phrase.replaceAll(apex, ""); + String apex2 = "\".*\""; + phrase = phrase.replaceAll(apex2, ""); + String apex3 = "`.*`"; + phrase = phrase.replaceAll(apex3, ""); + + AnalysisLogger.getLogger().debug( + "LexicalAnalyzer-> : parsed string " + phrase); + + return phrase; + + } + +} diff --git a/src/main/java/org/gcube/dataanalysis/databases/lexer/MySQLLexicalAnalyzer.java b/src/main/java/org/gcube/dataanalysis/databases/lexer/MySQLLexicalAnalyzer.java new file mode 100644 index 0000000..180cdb7 --- /dev/null +++ b/src/main/java/org/gcube/dataanalysis/databases/lexer/MySQLLexicalAnalyzer.java @@ -0,0 +1,56 @@ +package org.gcube.dataanalysis.databases.lexer; + +import java.util.ArrayList; + +import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger; + +/** + * Class that allows to filter queries no read-only compliant considering the + * MySQL database and by means of a lexical analysis + */ + +public class MySQLLexicalAnalyzer extends LexicalAnalyzer { + + + + public MySQLLexicalAnalyzer() { + + ArrayList BlackList = new ArrayList(); // Keywords' blacklist + BlackList = super.getBlackList(); + + updateBlackList(BlackList); + + } + + // update the keywords' blacklist + private void updateBlackList(ArrayList BlackList) { + + // Mysql's keywords + BlackList.add("RENAME"); + BlackList.add("REPLACE"); + BlackList.add("LOAD DATA INFILE"); + BlackList.add("CALL"); + BlackList.add("HANDLER"); + BlackList.add("UNLOCK"); + BlackList.add("DEALLOCATE PREPARE"); + BlackList.add("OPEN"); + BlackList.add("CLOSE"); + BlackList.add("BACKUP"); + BlackList.add("CHECK"); + BlackList.add("CHECKSUM"); + BlackList.add("OPTIMIZE"); + BlackList.add("REPAIR"); + BlackList.add("RESTORE"); + BlackList.add("CACHE"); + BlackList.add("FLUSH"); + BlackList.add("KILL"); + BlackList.add("LOAD INDEX INTO CACHE"); + BlackList.add("PURGE BINARY LOGS"); +// BlackList.add("RESET"); + + AnalysisLogger.getLogger().debug( + "MySQLLexicalAnalyzer->: blacklist updated"); + + } + +} diff --git a/src/main/java/org/gcube/dataanalysis/databases/lexer/PostgresLexicalAnalyzer.java b/src/main/java/org/gcube/dataanalysis/databases/lexer/PostgresLexicalAnalyzer.java new file mode 100644 index 0000000..13db1b3 --- /dev/null +++ b/src/main/java/org/gcube/dataanalysis/databases/lexer/PostgresLexicalAnalyzer.java @@ -0,0 +1,67 @@ +package org.gcube.dataanalysis.databases.lexer; + +import java.util.ArrayList; + +import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger; + +/** + * Class that allows to filter queries no read-only compliant considering the + * Postgres database and by means of a lexical analysis + */ +public class PostgresLexicalAnalyzer extends LexicalAnalyzer { + +// private ArrayList BlackList = new ArrayList(); // Keywords' + + // blacklist + + public PostgresLexicalAnalyzer() { + ArrayList BlackList = new ArrayList(); // Keywords' blacklist + BlackList = super.getBlackList(); + + updateBlackList(BlackList); + + } + + // update the keywords' blacklist + private void updateBlackList(ArrayList BlackList) { + + // Keywords' + // blacklist + + BlackList.add("COPY"); + BlackList.add("COMMENT"); + BlackList.add("SELECT INTO"); + BlackList.add("UNLISTEN"); + BlackList.add("VACUUM"); + BlackList.add("VALUES"); + BlackList.add("SECURITY LABEL"); + BlackList.add("REASSIGN OWNED"); + BlackList.add("ABORT"); + BlackList.add("CHECKPOINT"); + BlackList.add("CLOSE"); + BlackList.add("CLUSTER"); + BlackList.add("DEALLOCATE"); + BlackList.add("DISCARD"); + BlackList.add("END"); + BlackList.add("LISTEN"); + BlackList.add("LOAD"); + BlackList.add("MOVE"); + BlackList.add("NOTIFY"); + BlackList.add("REFRESH MATERIALIZED VIEW"); + BlackList.add("REINDEX"); +// BlackList.add("RESET"); + + // BlackList.add("SET ROLE"); + + // BlackList.add("SET SESSION AUTHORIZATION"); + + // BlackList.add("SET TRANSACTION"); + + // BlackList.add("SET CONSTRAINTS"); + + AnalysisLogger.getLogger().debug( + "PostgresLexicalAnalyzer->: blacklist updated"); + + } + +} diff --git a/src/main/java/org/gcube/dataanalysis/databases/resources/DBResource.java b/src/main/java/org/gcube/dataanalysis/databases/resources/DBResource.java new file mode 100644 index 0000000..77eba9d --- /dev/null +++ b/src/main/java/org/gcube/dataanalysis/databases/resources/DBResource.java @@ -0,0 +1,803 @@ +package org.gcube.dataanalysis.databases.resources; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; + +import javax.xml.bind.annotation.XmlAttribute; +import javax.xml.bind.annotation.XmlElement; +import javax.xml.bind.annotation.XmlElementRef; +import javax.xml.bind.annotation.XmlElementWrapper; +import javax.xml.bind.annotation.XmlRootElement; +import javax.xml.bind.annotation.XmlValue; + +import org.gcube.common.encryption.StringEncrypter; +import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger; +import org.gcube.dataanalysis.databases.resources.processing.Normalizer; + +/** Class that describes a resource database considering information specified from the user in a xml file. + * Information are retrieved from the xml file Through the JAXB and the relative object is initialized */ + +//Database Resource Description class +@XmlRootElement(name = "Resource") +public class DBResource { + + // Variables + private String ResourceName; + private String PlatformName; + private String PlatformVersion; + private String HostedOn; + private String Port = null; + private String dbguessed = null; + + @XmlElement(name = "ID") + private String id; + + @XmlElementRef + private Profile profile; + + // Methods + public String getID() { + + if (id == null) + id = ""; + + return id; + + } + + public String getPort() { + + if (Port == null) { + + Port = ""; + + } + + return Port; + + } + + public void setPort(String value) { + + Port = value; + + } + + public String getResourceName() { + + ResourceName = this.profile.getname().trim(); + + return ResourceName; + + } + + public String getHostedOn() { + + HostedOn = this.profile.getHostedOn(); + + return HostedOn; + + } + + public String getPlatformName() { + + PlatformName = this.profile.getPlatformName(); + + if (PlatformName.contains(" ")) { + + PlatformName = PlatformName.trim(); + + } + + return PlatformName; + + } + + public void setPlatformName(String name) { + + this.profile.platform.name = name; + + } + + public String getPlatformVersion() { + + PlatformVersion = this.profile.getPlatformVersion(); + + return PlatformVersion; + + } + + public void setHostedOn(String value) { + + HostedOn = value; + + } + + public String getDBguessed() { + + return dbguessed; + + } + + public void setDBguessed(String name) { + + dbguessed = name; + + } + + public void normalize(int index) throws IOException{ + + try { + Normalizer.normalize(this, index); + } catch (IOException e) { + +// e.printStackTrace(); + + throw e; + } + + } + + public List getAccessPoints() { + + List ap = this.profile.accessPoints(); + + return ap; + + } + + // Class Profile + + @XmlRootElement(name = "Profile") + static class Profile { + + @XmlElement(name = "Name") + private String name; + + @XmlElementRef + private Platform platform; + + @XmlElementRef + private Runtime runtime; + + @XmlElementRef + private List accessPoints = new ArrayList(); + + public String getname() { + if (name == null) + name = ""; + + return name; + } + + public List accessPoints() { + + return accessPoints; + + } + + public String getHostedOn() { + + return this.runtime.getHostedOn().trim(); + + } + + public String getPlatformName() { + + return this.platform.getName(); + + } + + public String getPlatformVersion() { + + return this.platform.getVersion(); + + } + + } + + // Class Runtime + + @XmlRootElement(name = "RunTime") + public static class Runtime { + + @XmlElement(name = "HostedOn") + private String hostedOn; + + public String getHostedOn() { + + if (hostedOn == null) + hostedOn = ""; + + return hostedOn; + } + + } + + // Class Platform + + @XmlRootElement(name = "Platform") + public static class Platform { + + private String version; + + @XmlElement(name = "Name") + private String name; + + @XmlElement(name = "Version") + private String Version; + + @XmlElement(name = "MinorVersion") + private String minorVersion; + + @XmlElement(name = "RevisionVersion") + private String revisionVersion; + + public String getVersion() { + + // Version's computation + if ((Version == null) || (Version.equals(""))) { + + // Version="8"; + // minorVersion="4"; + // revisionVersion="0"; + + Version = ""; + minorVersion = ""; + revisionVersion = ""; + + version = Version + "." + minorVersion + "." + revisionVersion; + + } else { + version = Version; + + if ((minorVersion != null) && (!(minorVersion.equals("")))) { + + version = version.concat(".").concat(minorVersion); + + if ((revisionVersion != null) + && (!(revisionVersion.equals("")))) { + + version = version.concat(".").concat(revisionVersion); + + } + + } + + } + + return version; + + } + + public String getName() { + + return name; + + } + + } + + // Class AccessPoint + + @XmlRootElement(name = "AccessPoint") + public static class AccessPoint { + + private String endpoint; + // private String port; + private String username; + private String password; + private String DatabaseName = null; + private String Driver = null; + private String Dialect = null; + private String MaxConnections = null; + private String schema = null; + private String tableSpaceCount = null; + private String tableSpacePrefix = null; + + /* it contains the variables aquamapsWorldTable,aquamapsDataStore */ + private HashMap auxiliaryProperties = new HashMap(); + + @XmlElementRef + private Interface itfce = new Interface(); + + @XmlElementRef + private AccessData accessData = new AccessData(); + + @XmlElementWrapper(name = "Properties") + @XmlElementRef + private List properties = new ArrayList(); + + @XmlElement(name = "Description") + private String description; + + public String name() { + + return itfce.endpoint().name(); + } + + public String address() { + + endpoint = itfce.endpoint().address().trim(); + + if (endpoint == null) + endpoint = ""; + + return endpoint; + } + + public void setUrl(String value) { + + itfce.endpoint.address = value; + endpoint = itfce.endpoint.address.trim(); + + } + + public String getUsername() { + + username = this.accessData.username(); + + return username; + + } + + public String getPassword() throws Exception{ + + String pwd = this.accessData.password(); + + try { + password = StringEncrypter.getEncrypter().decrypt(pwd); + } catch (Exception e) { + +// e.printStackTrace(); + throw e; + + } + + return password; + + } + + public String getDescription() { + + if ((description == null) || (description.equals(""))) { + + description = "jdbc connection url"; + + } + + return description; + + } + + public String getDatabaseName() { + + if (properties.size() == 0) { + + Property p = new Property(); + p.name = "dbname"; + p.value = ""; + properties.add(p); + + return DatabaseName = ""; + + } else { + + for (int i = 0; i < properties.size(); i++) { + + if (((properties.get(i).name()).toLowerCase() + .contains("dbname")) + || (properties.get(i).name()).toLowerCase() + .contains("databasename") + || ((properties.get(i).name()).toLowerCase() + .contains("database"))) { + DatabaseName = properties.get(i).value(); + + } + + } + + if (DatabaseName == null) { + + Property p = new Property(); + p.name = "dbname"; + p.value = ""; + properties.add(p); + + DatabaseName = ""; + return DatabaseName; + } + + } + + return DatabaseName; + + } + + public void setDatabaseName(String value) { + + DatabaseName=value; + + for (int i = 0; i < properties.size(); i++) { + + if (((properties.get(i).name()).toLowerCase() + .contains("dbname")) + || (properties.get(i).name()).toLowerCase().contains( + "databasename") + || ((properties.get(i).name()).toLowerCase() + .contains("database"))) { + DatabaseName = properties.get(i).setvalue(value); + + AnalysisLogger.getLogger().debug( + "In class DBResource->setting the database's name to value : " + DatabaseName); + + } + + } + + } + + public String getDriver() { + + if (properties.size() == 0) { + + Property p = new Property(); + p.name = "driver"; + p.value = ""; + properties.add(p); + + return Driver = ""; + + } else { + + for (int i = 0; i < properties.size(); i++) { + + if ((properties.get(i).name()).toLowerCase().contains( + "driver")) { + Driver = properties.get(i).value(); + + } + + } + + if (Driver == null) { + + Property p = new Property(); + p.name = "driver"; + p.value = ""; + properties.add(p); + + Driver = ""; + + return Driver; + } + + } + + return Driver; + + } + + public void SetDriver(String value) { + + // Driver="org"+"."+value+"."+"Driver"; + + for (int i = 0; i < properties.size(); i++) { + + if ((properties.get(i).name()).toLowerCase().contains("driver")) { + Driver = properties.get(i).setvalue(value); + + AnalysisLogger.getLogger().debug( + "In class DBResource->setting the driver's name to value : " + Driver); + + } + + } + + } + + public String getDialect() { + + if (properties.size() == 0) { + + Property p = new Property(); + p.name = "dialect"; + p.value = ""; + properties.add(p); + + return Dialect = ""; + + } else { + + for (int i = 0; i < properties.size(); i++) { + + if ((properties.get(i).name()).toLowerCase().contains( + "dialect")) { + Dialect = properties.get(i).value(); + + } + + } + + if (Dialect == null) { + + Property p = new Property(); + p.name = "dialect"; + p.value = ""; + properties.add(p); + + Dialect = ""; + + return Dialect; + } + + } + + return Dialect; + + } + + public void SetDialect(String value) { + + // Driver="org"+"."+value+"."+"Driver"; + + for (int i = 0; i < properties.size(); i++) { + + if ((properties.get(i).name()).toLowerCase() + .contains("dialect")) { + + Dialect = properties.get(i).setvalue(value); + + AnalysisLogger.getLogger().debug( + "In class DBResource->Setting the dialect: " + + Dialect); + + } + + } + + } + + public String getMaxConnections() { + + /* Check if the AccessPoint object does not have a Properties section */ + if (properties.size() == 0) { + + return MaxConnections = "2"; + } + + else { + + for (int i = 0; i < properties.size(); i++) { + + if ((properties.get(i).name()).equals("maxConnection")) { + MaxConnections = properties.get(i).value(); + + } + + } + + if (MaxConnections == null) + return MaxConnections = "2"; + else + return MaxConnections; + + } + + } + + public String getSchema() { + + /* Check if the AccessPoint object does not have a Properties section */ + if (properties.size() == 0) + return schema = "public"; + + else { + + for (int i = 0; i < properties.size(); i++) { + + // + if ((properties.get(i).name()).equals("schema")) { + schema = properties.get(i).value(); + + } + + } + + if (schema == null) + return schema = "public"; + else + return schema; + + } + + } + + public String getTableSpaceCount() { + + /* Check if the AccessPoint object does not have a Properties section */ + if (properties.size() == 0) + return tableSpaceCount = "0"; + + else { + + for (int i = 0; i < properties.size(); i++) { + + if ((properties.get(i).name()).equals("tableSpaceCount")) { + tableSpaceCount = properties.get(i).value(); + + } + + } + + if (tableSpaceCount == null) + return tableSpaceCount = "0"; + else + return tableSpaceCount; + + } + + } + + public String getTableSpacePrefix() { + + /* Check if the AccessPoint object does not have a Properties section */ + if (properties.size() == 0) + return tableSpacePrefix = ""; + + else { + + for (int i = 0; i < properties.size(); i++) { + + if ((properties.get(i).name()).equals("tableSpacePrefix")) { + tableSpacePrefix = properties.get(i).value(); + + } + + } + + if (tableSpacePrefix == null) + return tableSpacePrefix = ""; + else + return tableSpacePrefix; + + } + + } + + public HashMap getAuxiliaryProperties() { + + String AuxiliaryProperty; + + /* Check if the AccessPoint object does not have a Properties section */ + if (properties.size() == 0) + return auxiliaryProperties; + + else { + + for (int i = 0; i < properties.size(); i++) { + + if ((properties.get(i).name()).equals("aquamapsWorldTable")) { + AuxiliaryProperty = properties.get(i).value(); + + auxiliaryProperties.put("aquamapsWorldTable", + AuxiliaryProperty); + + } + + if ((properties.get(i).name()).equals("aquamapsDataStore")) { + AuxiliaryProperty = properties.get(i).value(); + + auxiliaryProperties.put("aquamapsDataStore", + AuxiliaryProperty); + + } + + } + + return auxiliaryProperties; + + } + + } + + } + + // Class Interface + @XmlRootElement(name = "Interface") + public static class Interface { + + @XmlElementRef + private Endpoint endpoint = new Endpoint(); + + public Endpoint endpoint() { + return endpoint; + } + } + + // Class Endpoint + @XmlRootElement(name = "Endpoint") + public static class Endpoint { + + @XmlAttribute(name = "EntryName") + private String name; + + @XmlValue + private String address; + + public String name() { + return name; + } + + public String address() { + + if (address == null) + address = ""; + return address; + } + + } + + // Class AccessData + @XmlRootElement(name = "AccessData") + public static class AccessData { + + @XmlElement(name = "Username") + private String username; + + @XmlElement(name = "Password") + private String password; + + public String username() { + + if ((username == null) || (username.equals(""))) { + + username = "gcube"; + + } + + return username; + } + + public String password() { + + if ((password == null) || (password.equals(""))) { + + password = "d4science"; + + } + return password; + } + + } + + // Class Property + @XmlRootElement(name = "Property") + public static class Property { + + @XmlElement(name = "Name") + private String name; + + @XmlElement(name = "Value") + private String value; + + public String name() { + return name; + } + + public String value() { + return value; + } + + public String setvalue(String val) { + + value = val; + return value; + + } + + } + +} diff --git a/src/main/java/org/gcube/dataanalysis/databases/resources/processing/Decider.java b/src/main/java/org/gcube/dataanalysis/databases/resources/processing/Decider.java new file mode 100644 index 0000000..aa92c16 --- /dev/null +++ b/src/main/java/org/gcube/dataanalysis/databases/resources/processing/Decider.java @@ -0,0 +1,503 @@ +package org.gcube.dataanalysis.databases.resources.processing; + +import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger; +import org.gcube.dataanalysis.databases.resources.DBResource; + +/** + * Class that, taking the url as input, performs a parsing process of the field + * Url through a tree decision + */ +public class Decider { + + // Method that implements the tree decision to parse and build the field + // Url. + public static void decide(DBResource obj, int index) { + + AnalysisLogger.getLogger().debug( + "In class Decider->starting the parsing process"); + + String EntireUrl = ""; + + String[] SplitOne = null; + + boolean varone = false; + boolean var = false; + + if (obj.getAccessPoints().get(index).address().contains("//")) { // the + // url + // contains + // the + // character + // "//" + + AnalysisLogger.getLogger().debug( + "In class Decider->the url contains the character '//' "); + + varone = true; + + SplitOne = obj.getAccessPoints().get(index).address().split("//"); // Split + // on + // the + // node + // of + // the + // tree + + AnalysisLogger.getLogger().debug( + "In class Decider->split operation on '//' "); + + AnalysisLogger.getLogger().debug( + "In class Decider->SplitOne's lenght: " + SplitOne.length); + + // Test Print + for (int i = 0; i < SplitOne.length; i++) { + + AnalysisLogger.getLogger().debug( + "In class Decider->Split_one: " + SplitOne[i]); + + } + + if (SplitOne.length > 1) { // with the split operation there is two + // parts on the left of "//" and on the + // right of "//". + + // try to build the first part of the string url + + // recover the url in the left part of the url + + if (SplitOne[0].length() == 0) { // There is not information on + // the left of "//". + // Left Node LevelTree=2 + + EntireUrl = EntireUrl + "jdbc" + ":"; + + if (obj.getPlatformName().toLowerCase().contains("mysql")) { + + EntireUrl = EntireUrl + "mysql" + ":"; + + } else { + + EntireUrl = EntireUrl + "postgresql" + ":"; + + } + + AnalysisLogger.getLogger().debug( + "In class Decider->result: " + EntireUrl); + + } + + // Split operation on the Left Node LevelTree=2 + + else { // there is information on the left of "//" + + // if (SplitOne[0].contains(":")){ + String[] SplitTwo = SplitOne[0].split(":"); + + // System.out.println("split ':' one"); + + AnalysisLogger.getLogger().debug( + "In class Decider->split operation on '/'"); + + AnalysisLogger.getLogger().debug( + "In class Decider->Split_two's lenght: " + + SplitTwo.length); + + // Test Print + for (int i = 0; i < SplitTwo.length; i++) { + + AnalysisLogger.getLogger().debug( + "In class Decider->Split_two: " + SplitTwo[i]); + + } + + // check on the lenght + + if (SplitTwo.length == 2) { // the two strings related to + // "jdbc" and the driver's name + // are presented. + + if ((obj.getPlatformName().toLowerCase() + .contains("postgres"))) { + + AnalysisLogger + .getLogger() + .debug("In class Decider->setting the url using the driver"); + + EntireUrl = SplitTwo[0] + ":" + "postgresql" + ":"; + } + if (obj.getPlatformName().toLowerCase() + .contains("mysql")) { + + EntireUrl = SplitTwo[0] + ":" + "mysql" + ":"; + + } + + } else { // there is one string: or the "jdbc" or the + // driver's name. + + if (SplitTwo[0].toLowerCase().equals("jdbc")) { + + EntireUrl = "jdbc" + ":"; + + // if + // ((this.getPlatformName().toLowerCase().contains("postgres"))||(this.getPlatformName().toLowerCase().contains("postgis"))){ + + if ((obj.getPlatformName().toLowerCase() + .contains("postgres"))) { + EntireUrl = EntireUrl + "postgresql" + ":"; + + } + + if (obj.getPlatformName().toLowerCase() + .contains("mysql")) { + + EntireUrl = EntireUrl + "mysql" + ":"; + + } + + } else { // there is the driver's name. I check the + // variable db, set by the method guessDB, + // to set the url properly. + + if (obj.getPlatformName().toLowerCase() + .contains("postgres")) { + + AnalysisLogger + .getLogger() + .debug("In class Decider->setting the url using the driver postgres"); + + EntireUrl = "jdbc" + ":" + "postgresql" + ":"; + + } + + else if (obj.getPlatformName().toLowerCase() + .contains("mysql")) { + + AnalysisLogger + .getLogger() + .debug("In class Decider->setting the url using the driver mysql"); + + EntireUrl = "jdbc" + ":" + "mysql" + ":"; + + } + + } + + } + // } + + } + + } else { // with the split operation there is one part on the left + // of "//". + + EntireUrl = obj.getAccessPoints().get(index).address(); + + if ((obj.getAccessPoints().get(index).address().toLowerCase() + .contains("postgres")) + || (obj.getAccessPoints().get(index).address() + .toLowerCase().contains("postgis"))) { + + EntireUrl = "jdbc:postgresql://"; + + } + if (obj.getAccessPoints().get(index).address().toLowerCase() + .contains("mysql")) { + + EntireUrl = "jdbc:mysql://"; + + } + + // the url is built using the available information. + EntireUrl = EntireUrl + obj.getHostedOn() + ":" + obj.getPort() + + "/" + + obj.getAccessPoints().get(index).getDatabaseName(); + + AnalysisLogger.getLogger().debug( + "In class Decider->result: " + EntireUrl); + + var = true; + } + + } + + if ((obj.getAccessPoints().get(index).address().contains("/")) + && (var == false)) { // it manages several cases. It is selected + // if: 1) there is only the part on the + // right of "//", 2) there is the entire + // string, 3) there is the part on the + // right of "/" + + // System.out.println("manage '/'"); + + AnalysisLogger.getLogger().debug( + "In class Decider->the url contains characther '/'"); + + if (varone == true) { // There is the hostname, the port number or + // both. A split on "/" is performed to + // retrieve the database's name + + // Split operation on the Right Node LevelTree=2 + + String[] SplitThree = SplitOne[1].split("/"); + + AnalysisLogger.getLogger().debug( + "In class Decider->split operation on '/'"); + + AnalysisLogger.getLogger().debug( + "In class Decider->Split_three's lenght: " + + SplitThree.length); + + for (int i = 0; i < SplitThree.length; i++) { + + AnalysisLogger.getLogger().debug( + "In class Decider->Split_three: " + SplitThree[i]); + + } + + if (SplitThree[0].length() == 0) { // There are not the hostname + // and the port number. + // Left Node LevelTree= 3 + + EntireUrl = EntireUrl + "//" + obj.getHostedOn() + ":" + + obj.getPort(); + + } else { // Recovery host and the port number + + String[] SplitFour = SplitThree[0].split(":"); + + AnalysisLogger.getLogger().debug( + "In class Decider->split operation on ':'"); + + AnalysisLogger.getLogger().debug( + "In class Decider->Split_four's lenght: " + + SplitFour.length); + + for (int i = 0; i < SplitFour.length; i++) { + + AnalysisLogger.getLogger() + .debug("In class Decider->Split_four: " + + SplitFour[i]); + + } + + if (SplitFour[0].length() == 0) { // there is not the + // hostname. + // Left Node LevelTree=4 + + EntireUrl = EntireUrl + "//" + obj.getHostedOn(); + } else { // there is the hostname + obj.setHostedOn(SplitFour[0]); + + EntireUrl = EntireUrl + "//" + SplitFour[0]; + + } + + if (SplitFour.length > 1) { // the url contains the port + // number too. + + // the url contains the port number + + obj.setPort(SplitFour[1]); + + EntireUrl = EntireUrl + ":" + SplitFour[1]; + + } else { + // the url does not contain the port number + + EntireUrl = EntireUrl + ":" + obj.getPort(); + + } + + } + + if (SplitThree.length > 1) { // Right Node LevelTree= 3 + // the url contains the database's name + + obj.getAccessPoints().get(index) + .setDatabaseName(SplitThree[1]); + + EntireUrl = EntireUrl + "/" + SplitThree[1]; + + } + + else { + + // The url does not contain the database's name + + EntireUrl = EntireUrl + + "/" + + obj.getAccessPoints().get(index) + .getDatabaseName(); + + } + + } + + else { // there is only the database's name + + String[] SplitThree = obj.getAccessPoints().get(index) + .address().split("/"); + + AnalysisLogger.getLogger().debug( + "In class Decider->split operation on '/'"); + + obj.getAccessPoints().get(index).setDatabaseName(SplitThree[1]); + + if (SplitThree[0].length() == 0) { // only the database's name + // is retrieved + + // if + // ((this.getPlatformName().equals("postgres"))||(this.getPlatformName().equals("postgis"))){ + if ((obj.getPlatformName().equals("postgres"))) { + + EntireUrl = "jdbc:" + "postgresql" + ":" + "//" + + obj.getHostedOn() + ":" + obj.getPort() + "/" + + SplitThree[1]; + + } + + if (obj.getPlatformName().toLowerCase().contains("mysql")) { + + EntireUrl = "jdbc:" + "mysql" + ":" + "//" + + obj.getHostedOn() + ":" + obj.getPort() + "/" + + SplitThree[1]; + + } + } + + if (SplitThree[0].length() != 0) { // on the left of "/" there + // are other information + + String[] SplitTwo = SplitThree[0].split(":"); + + AnalysisLogger.getLogger().debug( + "In class Decider->split operation on ':'"); + + AnalysisLogger.getLogger().debug( + "In class Decider->Split_two's lenght"); + + for (int i = 0; i < SplitTwo.length; i++) { + + AnalysisLogger.getLogger().debug( + "In class Decider->Split_two: " + SplitTwo[i]); + + } + + // check on the lenght + + if (SplitTwo.length == 2) { // The two strings "jdbc" and + // driver's name are presented. + + // sono presenti stringa "jdbc" e "nome driver" + + // *this.getAccessPoints().get(index).SetDriver(SplitTwo[1]); + + if ((SplitTwo[1].toLowerCase().contains("postgres")) + || (SplitTwo[1].toLowerCase() + .contains("postgis"))) { + + EntireUrl = SplitTwo[0] + ":" + "postgresql" + ":"; + } + if (SplitTwo[1].toLowerCase().contains("mysql")) { + + EntireUrl = SplitTwo[0] + ":" + "mysql" + ":"; + + } + + // EntireUrl=SplitTwo[0]+":"+SplitTwo[1]+":"; + + // if + // ((this.getPlatformName().toLowerCase().contains("postgres"))||(this.getPlatformName().toLowerCase().contains("postgis"))){ + if ((obj.getPlatformName().toLowerCase() + .contains("postgres"))) { + + EntireUrl = EntireUrl + "//" + obj.getHostedOn() + + ":" + obj.getPort() + "/" + SplitThree[1]; + + } + + if (obj.getPlatformName().toLowerCase() + .contains("mysql")) { + + EntireUrl = EntireUrl + "//" + obj.getHostedOn() + + ":" + obj.getPort() + "/" + SplitThree[1]; + + } + + } else { // only one string between "jdbc" or driver's name + // is presented + + if (SplitTwo[0].toLowerCase().equals("jdbc")) { // the + // string + // "jdbc" + // is + // presented + + EntireUrl = "jdbc" + ":"; + + if ((obj.getPlatformName().toLowerCase() + .contains("postgres")) + || (obj.getPlatformName().toLowerCase() + .contains("postgis"))) { + + EntireUrl = EntireUrl + "postgresql" + ":" + + "//" + obj.getHostedOn() + ":" + + obj.getPort() + "/" + SplitThree[1]; + + } + + if (obj.getPlatformName().toLowerCase() + .contains("mysql")) { + + EntireUrl = EntireUrl + "mysql" + ":" + "//" + + obj.getHostedOn() + ":" + + obj.getPort() + "/" + SplitThree[1]; + + } + + } else { // the string related to the driver's name is + // presented + + // EntireUrl="jdbc"+":"+SplitTwo[0]+":"+"//"+this.getHostedOn()+":"+this.getPort()+"/"+SplitThree[1];; + // *this.getAccessPoints().get(index).SetDriver(SplitTwo[0]); + + if ((obj.getDBguessed().contains("postgis")) + || (obj.getDBguessed().contains("postgres"))) { + // EntireUrl="jdbc"+":"+SplitTwo[0]+":"+"//"+this.getHostedOn()+":"+this.getPort()+"/"+SplitThree[1];} + + // *this.getAccessPoints().get(index).SetDriver(SplitTwo[0]); + + EntireUrl = "jdbc" + ":" + "postgresql" + ":" + + "//" + obj.getHostedOn() + ":" + + obj.getPort() + "/" + SplitThree[1]; + } + if (obj.getDBguessed().contains("mysql")) { + + EntireUrl = "jdbc" + ":" + "mysql" + ":" + "//" + + obj.getHostedOn() + ":" + + obj.getPort() + "/" + SplitThree[1]; + + } + + } + + } + + } + + } + + } + + // if(this.getAccessPoints().get(index).address().contains(":")){} + + obj.getAccessPoints().get(index).setUrl(EntireUrl); + + AnalysisLogger.getLogger().debug( + "In class Decider->Url normalized: " + + obj.getAccessPoints().get(index).address()); + + } + +} diff --git a/src/main/java/org/gcube/dataanalysis/databases/resources/processing/Guesser.java b/src/main/java/org/gcube/dataanalysis/databases/resources/processing/Guesser.java new file mode 100644 index 0000000..fba7ede --- /dev/null +++ b/src/main/java/org/gcube/dataanalysis/databases/resources/processing/Guesser.java @@ -0,0 +1,361 @@ +package org.gcube.dataanalysis.databases.resources.processing; + +import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger; +import org.gcube.dataanalysis.databases.resources.DBResource; + +/** + * Class that uses as inputs the available information (platform, driver and + * dialect) to determine the database's type + */ + +public class Guesser { + + private String db = ""; // the database's type + + // Method that determines the database's type using the available + // information as platform, driver and dialect and set these parameters to + // the correct values if they are not specified according to a well-formed + // mode. + public String guessDB(DBResource obj, int index) { + + AnalysisLogger.getLogger().debug( + "In class Guesser->starting the guess process"); + +// String platform = ""; + + if ((obj.getPlatformName().trim().equals("")) + && (obj.getAccessPoints().get(index).getDriver().equals("")) + && (obj.getAccessPoints().get(index).getDialect().equals("")) + && (obj.getAccessPoints().get(index).address().toLowerCase() + .contains("mysql"))) { // it is used the 'mysql' + // driver's name + + AnalysisLogger + .getLogger() + .debug("In class Guesser->database's name and port number determined using the 'mysql' driver's name"); + + // System.out.println("Set condition default"); + + db = "mysql"; + + obj.setPort("3306"); + + // return db; + + } + + else if ((obj.getPlatformName().trim().equals("")) + && (obj.getAccessPoints().get(index).getDriver().equals("")) + && (obj.getAccessPoints().get(index).getDialect().equals("")) + && (obj.getAccessPoints().get(index).address().toLowerCase() + .contains("postgis"))) { // it is used the 'postgis' + // dialect's name + + AnalysisLogger + .getLogger() + .debug("In class Guesser->database's name and port number determined using the address information: " + + obj.getAccessPoints().get(index).address() + .toLowerCase()); + + db = "postgis"; + + obj.setPort("5432"); + + } + + if ((obj.getPlatformName().trim().equals("")) + && (obj.getAccessPoints().get(index).getDriver().equals("")) + && (obj.getAccessPoints().get(index).getDialect().equals("")) + && (!(obj.getAccessPoints().get(index).address().toLowerCase() + .contains("mysql"))) + && (!(obj.getAccessPoints().get(index).address().toLowerCase() + .contains("postgres")))) { // in this case there are not + // enough information so the + // database's name and the + // port number are set to + // the default values. + + AnalysisLogger + .getLogger() + .debug("In class Guesser->database's name and port number are set to the default values"); + + db = "postgres"; + + obj.setPort("5432"); + + } + + if ((obj.getPlatformName().trim().equals("")) + && (obj.getAccessPoints().get(index).getDriver().equals("")) + && (obj.getAccessPoints().get(index).getDialect().equals("")) + && (obj.getAccessPoints().get(index).address().toLowerCase() + .contains("postgres"))) { // it is used the 'postgres' + // driver's name + + AnalysisLogger + .getLogger() + .debug("In class Guesser->database's name and port number determined using the address information: " + + obj.getAccessPoints().get(index).address() + .toLowerCase()); + + db = "postgres"; + + obj.setPort("5432"); + + } + + if (((obj.getPlatformName() != "")) + && (obj.getPlatformName().toLowerCase().contains("mysql"))) { + + AnalysisLogger + .getLogger() + .debug("In class Guesser->database's name and port number determined using the platform's name: " + + obj.getPlatformName().toLowerCase()); + + db = "mysql"; + obj.setPort("3306"); + + } else if ((obj.getAccessPoints().get(index).getDriver() != "") + && (obj.getAccessPoints().get(index).getDriver().toLowerCase() + .contains("mysql"))) { + + AnalysisLogger + .getLogger() + .debug("In class Guesser->database's name and port number determined using the driver's name: " + + obj.getAccessPoints().get(index).getDriver() + .toLowerCase()); + + db = "mysql"; + obj.setPort("3306"); + } else if ((obj.getAccessPoints().get(index).getDialect() != "") + && (obj.getAccessPoints().get(index).getDialect().toLowerCase() + .contains("mysql"))) { + + AnalysisLogger + .getLogger() + .debug("In class Guesser->database's name and port number determined using the dialect's name: " + + obj.getAccessPoints().get(index).getDialect() + .toLowerCase()); + + db = "mysql"; + obj.setPort("3306"); + + } + + if ((obj.getPlatformName() != "") + && (obj.getPlatformName().toLowerCase().contains("postgres"))) { + + AnalysisLogger + .getLogger() + .debug("In class Guesser->database's name and port number determined using the platform's name: " + + obj.getPlatformName().toLowerCase()); + + db = "postgres"; + obj.setPort("5432"); + + } else if ((obj.getAccessPoints().get(index).getDriver() != "") + && (obj.getAccessPoints().get(index).getDriver().toLowerCase() + .contains("postgres"))) { + + AnalysisLogger + .getLogger() + .debug("In class Guesser->database's name and port number determined using the driver's name: " + + obj.getAccessPoints().get(index).getDriver() + .toLowerCase()); + + db = "postgres"; + obj.setPort("5432"); + } else if ((obj.getAccessPoints().get(index).getDialect() != "") + && (obj.getAccessPoints().get(index).getDialect().toLowerCase() + .contains("postgres"))) { + + AnalysisLogger + .getLogger() + .debug("In class Guesser->database's name and port number determined using the dialect's name: " + + obj.getAccessPoints().get(index).getDialect() + .toLowerCase()); + + db = "postgres"; + obj.setPort("5432"); + + } + + if ((obj.getPlatformName() != "") + && (obj.getPlatformName().toLowerCase().contains("postgis"))) { + + AnalysisLogger + .getLogger() + .debug("In class Guesser->database's name and port number determined using the platform's name: " + + obj.getPlatformName().toLowerCase()); + + db = "postgis"; + obj.setPort("5432"); + + // }else if + // ((this.getAccessPoints().get(index).Driver.toLowerCase().contains("postgis"))&&(!(this.getAccessPoints().get(index).Driver.equals("")))){ + } else if ((obj.getAccessPoints().get(index).getDriver() != "") + && (obj.getAccessPoints().get(index).getDriver().toLowerCase() + .contains("postgis"))) { + + AnalysisLogger + .getLogger() + .debug("In class Guesser->database's name and port number determined using the driver's name: " + + obj.getAccessPoints().get(index).getDriver() + .toLowerCase()); + + db = "postgis"; + obj.setPort("5432"); + + // }else if + // ((this.getAccessPoints().get(index).getDialect()!="")&&(this.getAccessPoints().get(index).getDialect().toLowerCase().contains("postgis"))) + // { + } else if ((obj.getAccessPoints().get(index).getDialect() != "") + && (obj.getAccessPoints().get(index).getDialect().toLowerCase() + .contains("postgis"))) { + + AnalysisLogger + .getLogger() + .debug("In class Guesser->database's name and port number determined using the dialect's name: " + + obj.getAccessPoints().get(index).getDialect() + .toLowerCase()); + + db = "postgis"; + obj.setPort("5432"); + + } else if (obj.getAccessPoints().get(index).address().toLowerCase() + .contains("postgis")) { + + AnalysisLogger + .getLogger() + .debug("In class Guesser->database's name and port number determined using the address information: " + + obj.getAccessPoints().get(index).address() + .toLowerCase()); + + db = "postgis"; + obj.setPort("5432"); + + } + + if (db.equals("")) { + + AnalysisLogger + .getLogger() + .debug("In class Guesser->database's name and port number are set to default values because the database's type is not determined : " + + obj.getAccessPoints().get(index).address() + .toLowerCase()); + + db = "postgres"; + obj.setPort("5432"); + + } + + // 'Set' process of the platform, driver and dialect parameters + + // Set Platform's name Operation + + if ((db.equals("mysql")) || (db.equals("postgres"))) { + + obj.setPlatformName(db); + + AnalysisLogger.getLogger().debug( + "In class Guesser->setting platform's name: " + + obj.getPlatformName()); + + } else if (db.equals("postgis")) { + + obj.setPlatformName("postgres"); + + AnalysisLogger.getLogger().debug( + "In class Guesser->setting platform's name: " + + obj.getPlatformName()); + + } + + // Set Driver's name Operation + + if ((obj.getAccessPoints().get(index).getDriver() == "") + || (!(obj.getAccessPoints().get(index).getDriver() + .contains(".")))) { + + if (db.contains("postgres")) { + + obj.getAccessPoints().get(index) + .SetDriver("org.postgresql.Driver"); + + AnalysisLogger.getLogger().debug( + "In class Guesser->setting driver's name: " + + obj.getAccessPoints().get(index).getDriver()); + + } else if (db.contains("postgis")) { + + obj.getAccessPoints().get(index) + .SetDriver("org.postgresql.Driver"); + + AnalysisLogger.getLogger().debug( + "In class Guesser->setting driver's name: " + + obj.getAccessPoints().get(index).getDriver()); + + } else if (db.contains("mysql")) { + + obj.getAccessPoints().get(index) + .SetDriver("com.mysql.jdbc.Driver"); + + AnalysisLogger.getLogger().debug( + "In class Guesser->setting driver's name: " + + obj.getAccessPoints().get(index).getDriver()); + } + + } + + // Set Dialect's name operation + if ((obj.getAccessPoints().get(index).getDialect() == "") + || (!(obj.getAccessPoints().get(index).getDialect() + .contains(".")))) { + + if (db.contains("postgres")) { + + obj.getAccessPoints().get(index) + .SetDialect("org.hibernate.dialect.PostgreSQLDialect"); + + AnalysisLogger + .getLogger() + .debug("In class Guesser->setting dialect's name: " + + obj.getAccessPoints().get(index).getDialect()); + + } else if (db.contains("postgis")) { + + obj.getAccessPoints() + .get(index) + .SetDialect( + "org.hibernatespatial.postgis.PostgisDialect"); + + AnalysisLogger + .getLogger() + .debug("In class Guesser->setting dialect's name: " + + obj.getAccessPoints().get(index).getDialect()); + + } else if (db.contains("mysql")) { + + obj.getAccessPoints().get(index) + .SetDialect("org.hibernate.dialect.MySQLDialect"); + + AnalysisLogger + .getLogger() + .debug("In class Guesser->setting dialect's name: " + + obj.getAccessPoints().get(index).getDialect()); + } + + } + + return db; + + } + + // it returns the db field of the object Guesser + public String getDB() { + + return db; + + } + +} diff --git a/src/main/java/org/gcube/dataanalysis/databases/resources/processing/Normalizer.java b/src/main/java/org/gcube/dataanalysis/databases/resources/processing/Normalizer.java new file mode 100644 index 0000000..94f6310 --- /dev/null +++ b/src/main/java/org/gcube/dataanalysis/databases/resources/processing/Normalizer.java @@ -0,0 +1,145 @@ +package org.gcube.dataanalysis.databases.resources.processing; + +import java.io.IOException; +import java.net.UnknownHostException; + +import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger; +import org.gcube.dataanalysis.databases.resources.DBResource; +import org.gcube.dataanalysis.databases.resources.processing.Normalizer; +import org.gcube.dataanalysis.databases.resources.processing.Guesser; + + +/** Class that performs the normalization process using the available information specified from the user */ + +public class Normalizer { + + /* It performs the normalization process considering as input an access point field of a DBResource resource */ + public static void normalize(DBResource obj, int index) throws IOException { + + AnalysisLogger.getLogger().debug( + "In class Normalizer->starting to guess the database's type"); + + Guesser guess=new Guesser(); + + String db = guess.guessDB(obj, index); + + obj.setDBguessed(db); + + AnalysisLogger.getLogger().debug( + "In class Normalizer->starting the normalization process"); + + // the Url contains at least the character between the "//", "/" or ":" + if ((obj.getAccessPoints().get(index).address().contains("//")) + || (obj.getAccessPoints().get(index).address().contains("/")) + || (obj.getAccessPoints().get(index).address().contains(":"))) + { + + AnalysisLogger.getLogger().debug( + "In class Normalizer->calling the parsing process of the url"); + Decider.decide(obj, index); + + } else { // the Url does not contain none of the characters "//", "/" or + // ":" so there is an indecision to be managed + + AnalysisLogger.getLogger().debug( + "In class Normalizer->starting to manage an indecision"); + + if ((obj.getAccessPoints().get(index).address().equals(obj + .getHostedOn())) && (!(obj.getHostedOn().equals("")))) { /* the address is the hostname */ + + obj.getAccessPoints() + .get(index) + .setUrl("//" + + obj.getAccessPoints().get(index).address()); + + AnalysisLogger.getLogger().debug( + "In class Normalizer->starting the tree decision process using the hostname"); + + Decider.decide(obj, index); + + } else if ((!(obj.getAccessPoints().get(index).address().equals(""))) + && (!(obj.getAccessPoints().get(index).address().equals(obj + .getHostedOn())) && (obj.getHostedOn().equals("")))) { + + // throw new UnknownHostException("The host is unknown"); + + throw new UnknownHostException( + "the available information are not sufficient to determine the complete address: please fill the field 'Hosted On'"); + + } else if ((!(obj.getAccessPoints().get(index).address() + .equals("jdbc"))) + && (!(obj.getAccessPoints().get(index).address() + .toLowerCase().toLowerCase().contains("mysql"))) + + && (!(obj.getAccessPoints().get(index).address() + .toLowerCase().contains("postgres"))) + && (!(obj.getAccessPoints().get(index).address() + .toLowerCase().contains("postgis"))) + && (!(obj.getAccessPoints().get(index).address() + .toLowerCase().contains(obj.getPort()))) + && (!(obj.getAccessPoints().get(index).address().equals("")))) { /* the address is the database's name */ + + obj.getAccessPoints() + .get(index) + .setUrl("/" + + obj.getAccessPoints().get(index).address()); + + AnalysisLogger.getLogger().debug( + "In class Normalizer->starting the tree decision process using the database's name"); + + Decider.decide(obj, index); + + } else if ((obj.getAccessPoints().get(index).address() + .toLowerCase().contains("postgres")) + || (obj.getAccessPoints().get(index).address() + .toLowerCase().contains("postgis")) + || (obj.getAccessPoints().get(index).address() + .toLowerCase().contains("mysql"))) { /* the address is the driver's name */ + + obj.getAccessPoints() + .get(index) + .setUrl("jdbc:" + + obj.getAccessPoints().get(index).address() + + "://"); + + AnalysisLogger.getLogger().debug( + "In class Normalizer->starting the tree decision process using the driver's name"); + + Decider.decide(obj, index); + + } + + // if ((obj.getAccessPoints().get(index).address().equals(""))){ + if ((obj.getAccessPoints().get(index).address().equals(""))) { /* the address is empty so several available information are used to build the Url */ + + AnalysisLogger.getLogger().debug( + "In class Normalizer->managing the address null"); + + // Empty address management + + if (!(obj.getHostedOn().equals("")) + && (obj.getHostedOn() != null)) { /* the hostname is used if it is not null. */ + +// AnalysisLogger.getLogger().debug( +// "In class Normalizer->using the hostname" +// + obj.getHostedOn()); + + AnalysisLogger.getLogger().debug( + "In class Normalizer->managing the address null using the hostname"); + + obj.getAccessPoints().get(index).setUrl(obj.getHostedOn()); + + AnalysisLogger.getLogger().debug( + "In class Normalizer->recalling the 'normalize' method"); + + normalize(obj, index); + + } + + } + + } + + } + +} diff --git a/src/main/java/org/gcube/dataanalysis/databases/sampler/RowScore.java b/src/main/java/org/gcube/dataanalysis/databases/sampler/RowScore.java new file mode 100644 index 0000000..ea27ded --- /dev/null +++ b/src/main/java/org/gcube/dataanalysis/databases/sampler/RowScore.java @@ -0,0 +1,47 @@ +package org.gcube.dataanalysis.databases.sampler; + +/** + * Class that allows to describe + */ +public class RowScore implements Comparable { + + private Object row; + private int score; + + public RowScore(Object r, int s) { + + row = r; + score = s; + + } + + // to get the row + public Object getRow() { + + return row; + } + + // to get the score + public int getScore() { + + return score; + } + + // to compare two RowScore objects in order to sort a list of this objects + @Override + public int compareTo(RowScore o) { + // TODO Auto-generated method stub + + if (this.score > o.getScore()) + return 1; + + if (this.score == o.getScore()) + return 0; + + if (this.score < o.getScore()) + return -1; + + return 0; + } + +} diff --git a/src/main/java/org/gcube/dataanalysis/databases/sampler/Sampler.java b/src/main/java/org/gcube/dataanalysis/databases/sampler/Sampler.java new file mode 100644 index 0000000..7e869d9 --- /dev/null +++ b/src/main/java/org/gcube/dataanalysis/databases/sampler/Sampler.java @@ -0,0 +1,1286 @@ +package org.gcube.dataanalysis.databases.sampler; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.LinkedHashMap; +import java.util.Random; + +import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger; +import org.gcube.dataanalysis.databases.utils.ConnectionManager; +import org.hibernate.SessionFactory; + +/** + * Class that allows to perform different types of Sample operations on a table: + * SampleOnTable, SmartSampleOnTable, RandomSampleOnTable + */ + +public class Sampler { + + // query to perform sample operation on the table + private static final String queryForSampleOnTablePostgres = "select %1$s from \"%2$s\" limit 100"; + private static final String queryForSampleOnTableMysql = "select %1$s from %2$s limit 100"; + + // query to perform a smart sample operation randomly on the table + private static final String queryForSmartSampleOnTablePostgres = "select %1$s from \"%2$s\" order by random() limit 200"; + private static final String queryForSmartSampleOnTableMysql = "select %1$s from %2$s order by rand() limit 200"; + // private static final String queryForSmartSampleOnTablePostgres = + // "select * from \"%1$s\" order by random() limit 200"; + // private static final String queryForSmartSampleOnTableMysql = + // "select * from %1$s order by rand() limit 200"; + + // query to perform a smart sample operation on the table considering the + // threshold + private static final String queryForSmartSampleWithThresholdOnTablePostgres = "select %1$s from \"%2$s\" limit 200 offset %3$s"; + // private static final String queryForSmartSampleWithThresholdOnTableMysql + // = "select %1$s from %2$s limit 200 offset %3$s"; + + // query to perform a sample operation randomly on a table + // private static final String queryForRandomSampleOnTablePostgres = + // "select %1$s from \"%2$s\" order by random() limit 100"; + // query to perform a smart sample operation on the table considering the + // threshold + private static final String queryForRandomSampleWithThresholdOnTablePostgres = "select %1$s from \"%2$s\" limit 100 offset %3$s"; + private static final String queryForRandomSampleOnTableMysql = "select %1$s from %2$s order by rand() limit 100"; + private static final String queryForRandomSampleOnTablePostgres = "select %1$s from %2$s order by random() limit 100"; + + // query to get columns' name + private static final String queryForColumnsPostgres = "SELECT column_name FROM information_schema.COLUMNS WHERE table_name ='%1$s' and table_schema='%2$s'"; + private static final String queryForColumnsMysql = "SELECT column_name FROM information_schema.COLUMNS WHERE table_name ='%1$s' and table_schema='%2$s'"; + + private static final String MYSQL = "MySQL"; + private static final String POSTGRES = "Postgres"; + + private List listColumns = null; + + public Sampler() { + + } + + // retrieve the first 100 rows of a table + public List sampleOnTable(ConnectionManager connection, + SessionFactory dbSession, String DBType, String tableName, + String schemaName, List DataTypeColumns) throws Exception { + + AnalysisLogger.getLogger().debug( + "Sampler->starting the Sample on table operation"); + + AnalysisLogger.getLogger().debug( + "Sampler->retrieving the first 100 rows"); + + // preparing the query to get the first 100 rows of a table + + List resultSet = null; + + String querySampleOnTable = null; + + // get a formatted list columns + + String listAttributes = null; + listAttributes = getQuery(connection, dbSession, DBType, tableName, + schemaName, DataTypeColumns); + + // preparing the query + + if (DBType.equals(POSTGRES)) { + + querySampleOnTable = String.format(queryForSampleOnTablePostgres, + listAttributes, tableName); + + } + + if (DBType.equals(MYSQL)) { + + querySampleOnTable = String.format(queryForSampleOnTableMysql, + listAttributes, tableName); + + } + + AnalysisLogger.getLogger() + .debug("Sampler->preparing to submit the query: " + + querySampleOnTable); + + resultSet = connection.executeQuery(querySampleOnTable, dbSession); + + AnalysisLogger.getLogger().debug( + "Sampler->query submitted successfully"); + + if (resultSet == null) { + AnalysisLogger + .getLogger() + .debug("Sampler->Error: The table has not rows. Sample operation not possible"); + + throw new Exception( + "The resulting table has not rows. Sample operation not possible"); + + } + + // return the first 100 rows + return resultSet; + + } + + // preparing the query to get the first 100 rows of a table + private String getQuery(ConnectionManager connection, + SessionFactory dbSession, String DBType, String tableName, + String schemaName, List DataTypeColumns) throws Exception { + + // List listColumns = null; + + // get columns list + listColumns = getListColumns(connection, dbSession, DBType, tableName, + schemaName); + + // String querySampleOnTable = null; + String listAttributes = null; + + if (listColumns != null) { + + // preparing the query to get the first 100 rows of a table + + // preparing the query with formatted list column names + + listAttributes = ""; + String attribute = null; + + for (int i = 0; i < listColumns.size(); i++) { + + if (DBType.equals(POSTGRES)) { + + attribute = "CAST(" + listColumns.get(i) + " as text), "; + + if (i == (listColumns.size() - 1)) { + + attribute = "CAST(" + listColumns.get(i) + " as text)"; + + } + + } + + // for a value whose datatype is char or varchar a cast to utf8 + // is performed while for other datatypes in order to return a + // correct value a cast to binary and a second to char are + // performed.(because a cast of large numerical values to char + // are truncated) + + if (DBType.equals(MYSQL)) { + + if (DataTypeColumns.get(i).contains("char")) { + + // attribute = "CAST(" + listColumns.get(i) + + // " as CHAR CHARACTER SET utf8), "; + // attribute = "CONVERT(" + listColumns.get(i) + + // ", CHAR), "; + attribute = "CAST(" + listColumns.get(i) + + " as CHAR CHARACTER SET utf8), "; + + if (i == (listColumns.size() - 1)) { + + // attribute = "CAST(" + listColumns.get(i) + + // " as CHAR CHARACTER SET utf8)"; + + // attribute = "CONVERT(" + listColumns.get(i) + + // ", BINARY)"; + + attribute = "CAST(" + listColumns.get(i) + + " as CHAR CHARACTER SET utf8)"; + + } + + } else { + + attribute = "CAST(CAST(" + listColumns.get(i) + + " as BINARY) as CHAR CHARACTER SET utf8), "; + + if (i == (listColumns.size() - 1)) { + + // attribute = "CAST(" + listColumns.get(i) + + // " as CHAR CHARACTER SET utf8)"; + + // attribute = "CONVERT(" + listColumns.get(i) + + // ", BINARY)"; + + attribute = "CAST(CAST(" + listColumns.get(i) + + " as BINARY) as CHAR CHARACTER SET utf8)"; + + } + + } + + } + + listAttributes = listAttributes + attribute; + + } + + } + + return listAttributes; + + } + + // get list columns of a table + private List getListColumns(ConnectionManager connection, + SessionFactory dbSession, String DBType, String tableName, + String schemaName) throws Exception { + + AnalysisLogger.getLogger().debug("Sampler->retrieving column names"); + + // preparing the query to get columns' names + String queryColumns = null; + + // build the query for database postgres. The parameter "schemaName" is + // the schema name. + if (DBType.equals(POSTGRES)) { + + queryColumns = String.format(queryForColumnsPostgres, tableName, + schemaName); + + } + + // build the query for database mysql. The parameter "schemaName" is the + // database name. + if (DBType.equals(MYSQL)) { + + queryColumns = String.format(queryForColumnsMysql, tableName, + schemaName); + + } + + List columnsSet = null; + List listColumns = null; + + columnsSet = connection.executeQuery(queryColumns, dbSession); + + AnalysisLogger.getLogger().debug( + "Sampler->query submitted successfully: " + queryColumns); + + if (columnsSet != null) { + + listColumns = new ArrayList(); + + // //print check + // AnalysisLogger.getLogger().debug( + // "DatabaseManagement->size: " + columnsSet.size()); + + for (int i = 0; i < columnsSet.size(); i++) { + + Object element = columnsSet.get(i); + + // //print check + // AnalysisLogger.getLogger().debug( + // "Sampler->values: " + element); + + ArrayList listvalues = new ArrayList( + ((LinkedHashMap) element).values()); + + // //print check + // AnalysisLogger.getLogger().debug( + // "Sampler->values: " + listvalues); + + listColumns.add((String) listvalues.get(0)); + + } + + } + + return listColumns; + + } + + // retrieve 100 rows of a table randomly that have the maximum number of + // columns not null + public List smartSampleOnTable(ConnectionManager connection, + SessionFactory dbSession, String DBType, String tableName, + String schemaName, long NumRows, List DataTypeColumns) + throws Exception { + + AnalysisLogger.getLogger().debug( + "Sampler->starting the Smart Sample on table operation"); + + if (NumRows == 0) { + throw new Exception( + "The table has not rows. Smart Sample operation not possible"); + } + + // // computation of the iterations number + // int NIterations = computeNumberIterations(NumRows); + // + // AnalysisLogger.getLogger().debug( + // "Sampler->Iterations number: " + NIterations); + // + // // computation of the 100 rows randomly + // + // AnalysisLogger.getLogger().debug("Sampler->retrieving rows"); + + List rows = null; + + // if is rows number <= 700000 then the pure smart sample procedure is + // performed otherwise a not pure smart sample procedure is performed in + // order to solve a bug with the random function in postgres + + if ((NumRows > 700000) && (DBType.equals(POSTGRES))) { // Postgres + // compute the smart sample on a table + rows = computeSmartSampleWithThreshold(connection, dbSession, + DBType, tableName, schemaName, NumRows, DataTypeColumns); + + } else { + + // computation of the iterations number + int NIterations = computeNumberIterations(NumRows); + + AnalysisLogger.getLogger().debug( + "Sampler->Iterations number: " + NIterations); + + // computation of the 100 rows randomly + + AnalysisLogger.getLogger().debug("Sampler->retrieving rows"); + + // compute the smart sample on a table + rows = computeSmartSample(connection, dbSession, DBType, tableName, + schemaName, NIterations, DataTypeColumns, + DataTypeColumns.size()); + + } + + // if ((NumRows <= 700000) && (DBType.equals(POSTGRES))) { // Postgres + // + // // computation of the iterations number + // int NIterations = computeNumberIterations(NumRows); + // + // AnalysisLogger.getLogger().debug( + // "Sampler->Iterations number: " + NIterations); + // + // // computation of the 100 rows randomly + // + // AnalysisLogger.getLogger().debug("Sampler->retrieving rows"); + // + // // compute the smart sample on a table + // rows = computeSmartSample(connection, dbSession, DBType, tableName, + // schemaName, NIterations, DataTypeColumns, + // DataTypeColumns.size()); + // + // } + // + // else if ((NumRows > 700000) && (DBType.equals(POSTGRES))) { // + // Postgres + // // compute the smart sample on a table + // rows = computeSmartSampleWithThreshold(connection, dbSession, + // DBType, tableName, schemaName, NumRows, DataTypeColumns); + // + // } else { // MySQL + // + // // computation of the iterations number + // int NIterations = computeNumberIterations(NumRows); + // + // AnalysisLogger.getLogger().debug( + // "Sampler->Iterations number: " + NIterations); + // + // // computation of the 100 rows randomly + // + // AnalysisLogger.getLogger().debug("Sampler->retrieving rows"); + // + // // compute the smart sample on a table + // rows = computeSmartSample(connection, dbSession, DBType, tableName, + // schemaName, NIterations, DataTypeColumns, + // DataTypeColumns.size()); + // + // } + + if (rows == null) { + + AnalysisLogger + .getLogger() + .debug("Sampler->Error: the Smart Sample operation on table has not returned rows"); + + throw new Exception( + "The Smart Sample operation on table has not returned rows"); + + } + + AnalysisLogger.getLogger().debug("Sampler->rows retrieved"); + + // return the first 100 rows + return rows; + + } + + private int computeNumberIterations(long NumRows) { + + AnalysisLogger.getLogger().debug( + "Sampler->processing iterations number"); + + AnalysisLogger.getLogger().debug("Sampler->rows number: " + NumRows); + + // build the formula k=(((-0.8)*NumRows)/10000)+1 + double k = (((-0.8) * NumRows) / 10000) + 1.0; + + // if the the parameter k is negative, the sign must be changed + double paramK = 0.0; + if (Double.compare(k, 0.0) < 0) { + + paramK = k * (-1); + + } else { + paramK = k; + + } + + AnalysisLogger.getLogger().debug( + "Sampler->parameter K value: " + paramK); + + long NumElements = Math.min(NumRows, (long) 10000); + + AnalysisLogger.getLogger().debug( + "Sampler->choosing the min value of elements: " + NumElements); + + // to build the formula NIterations=(k/200)*Nelementi + + double NumIterations = (paramK / 200) * NumElements; + + AnalysisLogger.getLogger().debug( + "Sampler->iterations number: " + NumIterations); + + double Iterations = Math.max(Math.round(NumIterations), 1); + + AnalysisLogger.getLogger() + .debug("Sampler-> choosing the max value of iterations: " + + Iterations); + + double NumIts = Math.min(Iterations, 2.0); + + AnalysisLogger.getLogger().debug( + "Sampler-> choosing the min value of iterations: " + NumIts); + + // to round the value (with a rint logic) + return (int) (Math.rint(NumIts)); + + } + + // compute the SmartSampleTable. It extracts 200 rows randomly for each + // iteration. Then it checks if the row with index equal to 100 has the max + // score (that is equal to the columns' number). In this case the row list + // is cut in order to return the first 100 rows. + private List computeSmartSample(ConnectionManager connection, + SessionFactory dbSession, String DBType, String tablename, + String schemaName, int NIterations, List DataTypeColumns, + int ColumnSize) throws Exception { + + List resultSet = null; + + String query = null; + + boolean removal = false; + + // map that contains for each row two information: the index + // corresponding to the row and the score number that is the columns' + // number with not null value + // HashMap MapRows = new HashMap(); + + List listRows = new ArrayList(); + + // get a formatted list columns + + String listAttributes = null; + listAttributes = getQuery(connection, dbSession, DBType, tablename, + schemaName, DataTypeColumns); + + // compute score for each row of the table + + // build the query for database postgres + if (DBType.equals(POSTGRES)) { + + query = String.format(queryForSmartSampleOnTablePostgres, + listAttributes, tablename); + + } + // build the query for database mysql + if (DBType.equals(MYSQL)) { + + query = String.format(queryForSmartSampleOnTableMysql, + listAttributes, tablename); + + } + + AnalysisLogger.getLogger().debug( + "Sampler->building the query extracting 200 rows randomly"); + + Object[] columnArray = null; + + // define columns number with the threshold + AnalysisLogger.getLogger().debug( + "Sampler-> column array dimension: " + ColumnSize); + + double thresholdRank = ((ColumnSize) * 80); + thresholdRank = thresholdRank / 100; + + double valCeil = Math.round(thresholdRank); + + AnalysisLogger.getLogger().debug( + "Sampler-> number column generated by the threshold: " + + thresholdRank + " rounded value: " + valCeil); + + // extract 200 rows randomly for each iteration + extractionRows: for (int i = 0; i < NIterations; i++) { + + System.out.println("index iteration: " + i); + + AnalysisLogger.getLogger().debug( + "Sampler->executing the query: " + query); + + resultSet = connection.executeQuery(query, dbSession); + + if (resultSet != null) { + + int numrows = resultSet.size(); + + AnalysisLogger.getLogger().debug( + "Sampler->rows number: " + numrows); + + AnalysisLogger + .getLogger() + .debug("Sampler->computing the score and sort the row list in a reverse natural order"); + + // build the list with 200 rows + for (int j = 0; j < numrows; j++) { + + Object element = resultSet.get(j); + + ArrayList listvalues = new ArrayList( + ((LinkedHashMap) element).values()); + + columnArray = listvalues.toArray(); + + // compute the score for each row of the table + int score = computeColumnScore(columnArray); + + // //check row score + // AnalysisLogger.getLogger().debug( + // "Sampler->row: " + j + " " + "score: " + score); + + RowScore rs = new RowScore(element, score); + + // insert the row in the list + listRows.add(rs); + + // //check the sorting of a row + // AnalysisLogger + // .getLogger() + // .debug("Sampler->sorting the list in a reverse natural order"); + + // sort by reverse natural order + Collections.sort(listRows, Collections.reverseOrder()); + + // After each iteration there is a check to verify the row + // (corresponding to the element with index=100). If the + // score + // of + // this row is equal to the columns' number then the + // previous + // rows + // in the list have all columns value not null + + // int size = listRows.size(); + + // to check if the row with index 100 has the max score + + // thresholdRank 80% + + if (listRows.size() >= 100) { + int value = listRows.get(99).getScore(); + + // if (value == columnArray.length) { + + // AnalysisLogger.getLogger().debug( + // "Sampler-> column array dimension: " + // + columnArray.length); + // + // double thresholdRank = ((columnArray.length) * 80); + // thresholdRank = thresholdRank / 100; + // + // double valCeil = Math.round(thresholdRank); + // + // AnalysisLogger.getLogger().debug( + // "Sampler-> number column generated by the threshold: " + // + thresholdRank + // + " rounded value: " + valCeil); + + if (value >= (int) valCeil) { + + // //check row score + // for (int k = 0; k < listRows.size(); k++) { + // + // + // AnalysisLogger.getLogger().debug( + // "Sampler->row with index: " + k + // + " score " + // + listRows.get(k).getScore()); + // } + + removal = true; + + AnalysisLogger.getLogger().debug( + "Sampler->row 100 with score: " + value); + + AnalysisLogger.getLogger().debug( + "Sampler->starting the removal operation"); + + // Remove the elements from index 100 if the list's + // size + // is + // greater than 100 + if (listRows.size() > 100) { + + int numElemToDelete = listRows.size() - 100; + + AnalysisLogger.getLogger().debug( + "Sampler->number of rows to delete: " + + numElemToDelete); + + while (numElemToDelete != 0) { + + listRows.remove(100); + + numElemToDelete = numElemToDelete - 1; + + } + + } + + break extractionRows; + + } + } + + } + + } + + else { + + return null; + + } + + } + + // Remove the elements from index 100 if the list's size is + // greater than 100 and if this operation has not been done previously. + + if ((listRows.size() > 100) && (removal == false)) { + + for (int k = 0; k < listRows.size(); k++) { + + AnalysisLogger.getLogger().debug( + "Sampler->row with index: " + k + " score " + + listRows.get(k).getScore()); + } + + AnalysisLogger.getLogger().debug( + "Sampler->starting the removal operation"); + + int numElemToDelete = listRows.size() - 100; + + AnalysisLogger.getLogger().debug( + "Sampler->number of rows to delete: " + numElemToDelete); + + // cut the list of rows in order to have only 100 rows + while (numElemToDelete != 0) { + + RowScore row = listRows.remove(100); + + AnalysisLogger.getLogger().debug( + "Sampler->removing row with score: " + row.getScore()); + + numElemToDelete = numElemToDelete - 1; + + } + + } + + // return the list of 100 rows + List rows = new ArrayList(); + + AnalysisLogger.getLogger().debug( + "Sampler->preparing the result (the row list): "); + + for (int i = 0; i < listRows.size(); i++) { + + // //check rows added in the final result + // AnalysisLogger.getLogger().debug( + // "Sampler->adding row with index: " + i); + + rows.add(listRows.get(i).getRow()); + + } + + return rows; + + } + + // compute the SmartSampleTable considering the threshold 700000 on the rows + // number . It extracts 200 rows randomly for each + // iteration. Then it checks if the row with index equal to 100 has the max + // score (that is equal to the columns' number). In this case the row list + // is cut in order to return the first 100 rows. + private List computeSmartSampleWithThreshold( + ConnectionManager connection, SessionFactory dbSession, + String DBType, String tablename, String schemaName, long NumRows, + List DataTypeColumns) throws Exception { + + // Define threshold + int threshold = 700000; + + int X, Y; + + // Generate randomly two indexes used to execute two queries + + Random rn = new Random(); + + if ((threshold + 200) <= NumRows) { + + AnalysisLogger.getLogger().debug( + "Sampler-> 700000+200 <= rows number"); + + X = rn.nextInt(threshold + 1) + 200; // generate a number in + // range [200-700000] + + AnalysisLogger.getLogger().debug("Sampler->X index: " + X); + + // Generate a Y index + + // Define Lower and Upper Index (LI and UL) of a range + + int LI = X + 200; + int UI = X - 200; + + AnalysisLogger.getLogger().debug( + "Sampler->Lower Index of the range: " + LI); + + AnalysisLogger.getLogger().debug( + "Sampler->Upper Index of the range: " + UI); + + int a; + + do { + + a = rn.nextInt(threshold + 1) + 0; + + } while (!((a < UI) || (a > LI))); + + Y = a; + + AnalysisLogger.getLogger().debug("Sampler->Y index: " + Y); + + } else { + + AnalysisLogger.getLogger().debug( + "Sampler-> 700000+200 > rows number"); + int offset = ((int) NumRows - threshold); + int valForUpperIndex = 200 - offset; + int UpperIndex = threshold - valForUpperIndex; + + // Generate an X index + X = rn.nextInt(UpperIndex + 1) + 200; // generate a number in + // range + // [200-UpperIndex] + + AnalysisLogger.getLogger().debug("Sampler->X index: " + X); + + // Generate a Y index + + // Define Lower and Upper Index (LI and UL) of a range + + int LI = X + 200; + int UI = X - 200; + + AnalysisLogger.getLogger().debug( + "Sampler->Lower Index of the range: " + LI); + + AnalysisLogger.getLogger().debug( + "Sampler->Upper Index of the range: " + UI); + + int a; + + do { + + a = rn.nextInt(UpperIndex + 1) + 0; + + } while (!((a < UI) || (a > LI))); + + Y = a; + + } + + int[] indexes = new int[2]; + + indexes[0] = X; + indexes[1] = Y; + + // AnalysisLogger.getLogger().debug("Sampler->X index: " + indexes[0]); + + // AnalysisLogger.getLogger().debug("Sampler->Y index: " + indexes[1]); + + // start sample operation + + List resultSet = null; + + String query = null; + + boolean removal = false; + + // map that contains for each row two information: the index + // corresponding to the row and the score number that is the columns' + // number with not null value + // HashMap MapRows = new HashMap(); + + List listRows = new ArrayList(); + + // get a formatted list columns + + String listAttributes = null; + listAttributes = getQuery(connection, dbSession, DBType, tablename, + schemaName, DataTypeColumns); + + // compute score for each row of the table + + // // build the query for database postgres + // if (DBType.equals(POSTGRES)) { + // + // query = String.format( + // queryForSmartSampleWithThresholdOnTablePostgres, + // listAttributes, tablename); + // + // } + // // build the query for database mysql + // if (DBType.equals(MYSQL)) { + // + // query = String.format(queryForSmartSampleOnTableMysql, + // listAttributes, tablename); + // + // } + + AnalysisLogger.getLogger().debug( + "Sampler->building the query extracting 200 rows randomly"); + + Object[] columnArray = null; + + // extract 200 rows randomly for each iteration + + // Define the two queries.One query uses the X index, one query uses the + // Y index. Each query extract 200 rows. + + // computation for the smart procedure + + extractionRows: for (int i = 0; i < 2; i++) { + + // build the query for database postgres + if (DBType.equals(POSTGRES)) { + + query = String.format( + queryForSmartSampleWithThresholdOnTablePostgres, + listAttributes, tablename, indexes[i]); + + } + + AnalysisLogger.getLogger().debug( + "Sampler->executing the query: " + query); + + resultSet = connection.executeQuery(query, dbSession); + + if (resultSet != null) { + + int numrows = resultSet.size(); + + AnalysisLogger.getLogger().debug( + "Sampler->rows number: " + numrows); + + AnalysisLogger + .getLogger() + .debug("Sampler->computing the score and sorting the row list in a reverse natural order"); + + // build the list with 200 rows + for (int j = 0; j < numrows; j++) { + + Object element = resultSet.get(j); + + ArrayList listvalues = new ArrayList( + ((LinkedHashMap) element).values()); + + columnArray = listvalues.toArray(); + + // compute the score for each row of the table + int score = computeColumnScore(columnArray); + + // //check row score + // AnalysisLogger.getLogger().debug( + // "Sampler->row: " + j + " " + "score: " + score); + + RowScore rs = new RowScore(element, score); + + // insert the row in the list + listRows.add(rs); + + // //check the sorting of a row + // AnalysisLogger + // .getLogger() + // .debug("Sampler->sorting the list in a reverse natural order"); + + // sort by reverse natural order + Collections.sort(listRows, Collections.reverseOrder()); + + // After each iteration there is a check to verify the row + // (corresponding to the element with index=100). If the + // score + // of + // this row is equal to the columns' number then the + // previous + // rows + // in the list have all columns value not null + + // int size = listRows.size(); + + // to check if the row with index 100 has the max score + + // threshold 80% + + if (listRows.size() >= 100) { + int value = listRows.get(99).getScore(); + + // if (value == columnArray.length) { + + AnalysisLogger.getLogger().debug( + "Sampler-> column array dimension: " + + columnArray.length); + + double thresholdRank = ((columnArray.length) * 80); + thresholdRank = thresholdRank / 100; + + double valCeil = Math.round(thresholdRank); + + AnalysisLogger.getLogger().debug( + "Sampler-> threshold: " + thresholdRank + + " rounded value: " + valCeil); + + if (value >= (int) valCeil) { + + // //check row score + // for (int k = 0; k < listRows.size(); k++) { + // + // AnalysisLogger.getLogger().debug( + // "Sampler->row with index: " + k + // + " score " + // + listRows.get(k).getScore()); + // } + + removal = true; + + AnalysisLogger.getLogger().debug( + "Sampler->row 100 with score: " + value); + + AnalysisLogger.getLogger().debug( + "Sampler->starting the removal operation"); + + // Remove the elements from index 100 if the list's + // size + // is + // greater than 100 + if (listRows.size() > 100) { + + int numElemToDelete = listRows.size() - 100; + + AnalysisLogger.getLogger().debug( + "Sampler->number of rows to delete: " + + numElemToDelete); + + while (numElemToDelete != 0) { + + listRows.remove(100); + + numElemToDelete = numElemToDelete - 1; + + } + + } + + break extractionRows; + + } + } + + } + + } + + else { + + return null; + + } + } + + // Remove the elements from index 100 if the list's size is + // greater than 100 and if this operation has not been done previously. + + if ((listRows.size() > 100) && (removal == false)) { + + // check score of the row list + + // for (int k = 0; k < listRows.size(); k++) { + // + // AnalysisLogger.getLogger().debug( + // "Sampler->row with index: " + k + " score " + // + listRows.get(k).getScore()); + // } + + AnalysisLogger.getLogger().debug( + "Sampler->starting the removal operation"); + + int numElemToDelete = listRows.size() - 100; + + AnalysisLogger.getLogger().debug( + "Sampler->number of rows to delete: " + numElemToDelete); + + // cut the list of rows in order to have only 100 rows + while (numElemToDelete != 0) { + + RowScore row = listRows.remove(100); + + AnalysisLogger.getLogger().debug( + "Sampler->removing row with score: " + row.getScore()); + + numElemToDelete = numElemToDelete - 1; + + } + + } + + // return the list of 100 rows + List rows = new ArrayList(); + + AnalysisLogger.getLogger().debug( + "Sampler->preparing the result (the row list): "); + + for (int i = 0; i < listRows.size(); i++) { + + // //check the row list result + // AnalysisLogger.getLogger().debug( + // "Sampler->adding row with index: " + i); + + rows.add(listRows.get(i).getRow()); + + } + + return rows; + + } + + // compute the score for each array (the score is the number of table + // columns with value not null and not empty) + private int computeColumnScore(Object[] columnArray) { + + int score = 0; + + for (int i = 0; i < columnArray.length; i++) { + + if (columnArray[i] != null) { + + if (!(columnArray[i].toString().equals(""))) { + + score++; + + } + + } + + } + + return score; + + } + + // // retrieve 100 rows of a table randomly + // public List randomSampleOnTable(ConnectionManager connection, + // SessionFactory dbSession, String DBType, String tableName, + // String schemaName, List DataTypeColumns) throws Exception { + // + // AnalysisLogger.getLogger().debug( + // "Sampler->starting the Random Sample on table operation"); + // + // AnalysisLogger.getLogger().debug("Sampler->retrieving the 100 rows"); + // + // // preparing the query to get the first 100 rows of a table + // + // List resultSet = null; + // + // String querySampleOnTable = null; + // + // // get a formatted list columns + // + // String listAttributes = null; + // listAttributes = getQuery(connection, dbSession, DBType, tableName, + // schemaName, DataTypeColumns); + // + // // preparing the query + // + // if (DBType.equals(POSTGRES)) { + // + // querySampleOnTable = String.format( + // queryForRandomSampleOnTablePostgres, listAttributes, + // tableName); + // + // } + // + // if (DBType.equals(MYSQL)) { + // + // querySampleOnTable = String + // .format(queryForRandomSampleOnTableMysql, listAttributes, + // tableName); + // + // } + // + // AnalysisLogger.getLogger() + // .debug("Sampler->preparing to submit the query: " + // + querySampleOnTable); + // + // resultSet = connection.executeQuery(querySampleOnTable, dbSession); + // + // AnalysisLogger.getLogger().debug( + // "Sampler->query submitted successfully"); + // + // if (resultSet == null) { + // AnalysisLogger + // .getLogger() + // .debug("Sampler->Error: The resulting table has not rows. Sample operation not possible"); + // + // throw new Exception( + // "The resulting table has not rows. Sample operation not possible"); + // + // } + // + // // return the first 100 rows + // return resultSet; + // + // } + + // retrieve 100 rows of a table randomly + public List randomSampleOnTable(ConnectionManager connection, + SessionFactory dbSession, String DBType, String tableName, + String schemaName, long NumRows, List DataTypeColumns) + throws Exception { + + AnalysisLogger.getLogger().debug( + "Sampler->starting the Random Sample on table operation"); + + AnalysisLogger.getLogger().debug("Sampler->retrieving the 100 rows"); + + // preparing the query to get the first 100 rows of a table + + List resultSet = null; + + String querySampleOnTable = null; + + // get a formatted list columns + + String listAttributes = null; + + listAttributes = getQuery(connection, dbSession, DBType, tableName, + schemaName, DataTypeColumns); + + // preparing the query + + // if is rows number <= 700000 then the pure random sample procedure is + // performed otherwise a not pure ranom sample procedure is performed in + // order to solve a bug with the random function in postgres + + if ((NumRows <= 700000) && (DBType.equals(POSTGRES))) { // Postgres + + querySampleOnTable = String.format( + queryForRandomSampleOnTablePostgres, listAttributes, + tableName); + + } + + if ((NumRows > 700000) && (DBType.equals(POSTGRES))) { // Postgres + + // generate an index randomly to execute the query + + // Define threshold + int threshold = 700000; + int X; + + // generate an index used to execute the query + Random rn = new Random(); + + if ((threshold + 100) <= NumRows) { + + X = rn.nextInt(threshold + 1) + 100; // generate a number in + // range [100-700000] + AnalysisLogger.getLogger().debug("Sampler->X index: " + X); + + } + + else { + + AnalysisLogger.getLogger().debug( + "Sampler-> 700000+100 > rows number"); + + int offset = ((int) NumRows - threshold); + int valForUpperIndex = 100 - offset; + int UpperIndex = threshold - valForUpperIndex; + + // Generate an X index + X = rn.nextInt(UpperIndex + 1) + 100; // generate a number in + // range + // [100-UpperIndex] + + AnalysisLogger.getLogger().debug("Sampler->X index: " + X); + + } + + querySampleOnTable = String.format( + queryForRandomSampleWithThresholdOnTablePostgres, + listAttributes, tableName, X); + + } + + if (DBType.equals(MYSQL)) { // MySQL + + querySampleOnTable = String + .format(queryForRandomSampleOnTableMysql, listAttributes, + tableName); + + } + + AnalysisLogger.getLogger() + .debug("Sampler->preparing to submit the query: " + + querySampleOnTable); + + resultSet = connection.executeQuery(querySampleOnTable, dbSession); + + AnalysisLogger.getLogger().debug( + "Sampler->query submitted successfully"); + + if (resultSet == null) { + AnalysisLogger + .getLogger() + .debug("Sampler->Error: The resulting table has not rows. Sample operation not possible"); + + throw new Exception( + "The resulting table has not rows. Sample operation not possible"); + + } + + // return the first 100 rows + return resultSet; + + } + + // to retrieve the columns names of a table + public List getListColumns() { + + return listColumns; + + } + +} diff --git a/src/main/java/org/gcube/dataanalysis/databases/structure/AbstractTableStructure.java b/src/main/java/org/gcube/dataanalysis/databases/structure/AbstractTableStructure.java new file mode 100644 index 0000000..07a56bb --- /dev/null +++ b/src/main/java/org/gcube/dataanalysis/databases/structure/AbstractTableStructure.java @@ -0,0 +1,168 @@ +package org.gcube.dataanalysis.databases.structure; + +import java.util.ArrayList; +import java.util.List; + +import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger; +import org.hibernate.SessionFactory; + +/** Class that allows to create a table for a database */ +public abstract class AbstractTableStructure { + + protected List ColumnNames; + protected List TypesList; + protected List TypesLengths; + protected List DefaultValues; + protected List CompleteTypes; + protected List ColumnKeys; + protected List UniqueKeys; + protected List ForeignKeys; + protected List Indexes; + protected List IsNullables; + protected String databaseName; + protected String charset; + protected String tableName; + + + // create table query + protected static String createTableQueryElement = "\"%1$s\" %2$s %3$s %4$s"; + protected static String defaultTableQueryElement = "DEFAULT %1$s"; + protected static String createTableQuery = "CREATE TABLE %1$s ( %2$s );"; + + protected static String primaryKeyStatement = "PRIMARY KEY"; + protected static String uniqueKeyStatement = "UNIQUE"; + protected static String foreignKeyStatement = "FOREIGN KEY"; + + + //Abstracts methods + protected abstract void buildStructure(SessionFactory dbSession) + throws Exception; + + protected abstract String getQueryForTableStructure(SessionFactory dbSession) + throws Exception; + + protected abstract String getQueryForIndexes(SessionFactory dbSession) + throws Exception; + + public AbstractTableStructure(String Databasename, String TableName, + SessionFactory dbSession, boolean buildStructure) throws Exception { + + try { + ColumnNames = new ArrayList(); + TypesList = new ArrayList(); + TypesLengths = new ArrayList(); + DefaultValues = new ArrayList(); + CompleteTypes = new ArrayList(); + ColumnKeys = new ArrayList(); + UniqueKeys = new ArrayList(); + ForeignKeys = new ArrayList(); + Indexes = new ArrayList(); + IsNullables = new ArrayList(); + tableName= TableName; + databaseName = Databasename; + + + if (buildStructure) + buildStructure(dbSession); + + } catch (Exception e) { + + throw e; + +// String error = e.getCause().toString(); +// +// if ((error.contains("Table")) && (error.contains("doesn't exist"))) { +// +// System.out.println("Table " + TableName + " doesn't exist"); +// +// } + } + + } + + public AbstractTableStructure(String Databasename, String TableName, + SessionFactory dbSession) throws Exception { + + this(Databasename, TableName, dbSession, true); + } + + // builds a table by merging information in data structure + public String buildUpCreateTable() { + int numOfElements = ColumnNames.size(); + StringBuffer elementsBuffer = new StringBuffer(); + + // build up create statement elements + for (int i = 0; i < numOfElements; i++) { + String nullable = ""; + if (!IsNullables.get(i).booleanValue()) + nullable = "NOT NULL"; + + String defaultvalue = ""; + + if (DefaultValues.size()!=0){ + + if ((DefaultValues.get(i) != null) + && (DefaultValues.get(i).trim().length() > 0) + && (nullable.equals("NOT NULL"))) { + defaultvalue = DefaultValues.get(i); + + defaultvalue = String.format(defaultTableQueryElement, + defaultvalue); + } + } + + + String createStatementElement = String.format( + createTableQueryElement, ColumnNames.get(i), + TypesList.get(i), nullable, defaultvalue); + + elementsBuffer.append(createStatementElement); + + if (i < numOfElements - 1) + elementsBuffer.append(","); + } + + // build up primary keys statements + elementsBuffer + .append(buildUPConstraint(primaryKeyStatement, ColumnKeys)); + elementsBuffer + .append(buildUPConstraint(uniqueKeyStatement, UniqueKeys)); + elementsBuffer.append(buildUPConstraint(foreignKeyStatement, + ForeignKeys)); + + // build up create statement + String createStatement = String.format(createTableQuery, tableName, + elementsBuffer.toString(), charset); + + AnalysisLogger.getLogger().debug( + "AbstractTableStructure->Create Table Query: " + + createStatement); + + return createStatement; + } + + private String buildUPConstraint(String statement, List Keys) { + + // build up primary keys statements + StringBuffer elementsBuffer = new StringBuffer(); + int numKeys = Keys.size(); + if (numKeys > 0) { + elementsBuffer.append(", " + statement + "("); + for (int i = 0; i < numKeys; i++) { + String columnKey = Keys.get(i); + if (columnKey != null) { + elementsBuffer.append("\"" + columnKey + "\""); + + if (i < numKeys - 1) + elementsBuffer.append(","); + } + + } + elementsBuffer.append(")"); + } + + return elementsBuffer.toString(); + + } + +} diff --git a/src/main/java/org/gcube/dataanalysis/databases/structure/MySQLTableStructure.java b/src/main/java/org/gcube/dataanalysis/databases/structure/MySQLTableStructure.java new file mode 100644 index 0000000..8af783b --- /dev/null +++ b/src/main/java/org/gcube/dataanalysis/databases/structure/MySQLTableStructure.java @@ -0,0 +1,194 @@ +package org.gcube.dataanalysis.databases.structure; + +import java.util.List; + +import org.gcube.dataanalysis.databases.utils.ConnectionManager; +import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger; +import org.hibernate.SessionFactory; + +/** Class that allows to manage the MySQL database. */ +public class MySQLTableStructure extends AbstractTableStructure { + + public MySQLTableStructure(String Databasename, String TableName, + SessionFactory dbSession) throws Exception { + super(Databasename, TableName, dbSession, false); + } + + // Method that executes the query "show create table" in order to retrieve + // the "create table" statement + public String showCreateTable(ConnectionManager connection, SessionFactory dbSession) throws Exception { + + // Retrieve the query + String queryForIndexes = getQueryForIndexes(dbSession); + + try { + +// List indexSet = DatabaseFactory.executeSQLQuery( +// String.format(queryForIndexes, tableName), dbSession); + + List indexSet = connection.executeQuery(String.format(queryForIndexes, tableName), dbSession); + + String createTableStatement = (String) (((Object[]) indexSet.get(0))[1]); + + AnalysisLogger.getLogger().debug( + "MySQLTableStructure->'Create Table' statement: " + + createTableStatement); + + return createTableStatement; + + } catch (Exception e) { + + throw e; + + } + + } + + // Method that returns the query to build the table's structure. This method + // is not useful for mysql. + @Override + protected String getQueryForTableStructure(SessionFactory dbSession) + throws Exception { + // TODO Auto-generated method stub + + String queryForStructure = "SELECT table_schema,table_name,column_name,column_default,is_nullable,data_type,character_maximum_length,character_set_name,column_type,column_key FROM information_schema.COLUMNS WHERE table_name ='%1$s' and table_schema='%2$s';"; + + return queryForStructure; + + } + + // Method that returns the query to show the create statement + @Override + protected String getQueryForIndexes(SessionFactory dbSession) + throws Exception { + // TODO Auto-generated method stub + + String queryForIndexes = "SHOW CREATE TABLE `%1$s`;"; + + return queryForIndexes; + + } + + // This method is not useful for the database mysql because the slq query + // "show create" makes available the create table statement. + @Override + protected void buildStructure(SessionFactory dbSession) throws Exception { + + // //retrieve the query + // String queryForStructure=getQueryForTableStructure(dbSession); + // + // String queryStructure = + // String.format(queryForStructure,tableName,databaseName); + // List resultSet = + // DatabaseFactory.executeSQLQuery(queryStructure, dbSession); + // + // + // AnalysisLogger.getLogger().debug("MySQLTableStructure->Building Structure with query: "+queryStructure); + // + // int resultsNumber = resultSet.size(); + // + // for (int i=0;i0)) + // // + // System.err.println("MySQLTableStructure->KEY NOT CONTEMPLATED : "+columnKey); + // + // /* + // if (columnKey.equals("PRI")) + // ColumnKeys.add(columnname); + // else if (columnKey.equals("UNI")) + // UniqueKeys.add(columnname); + // else if (columnKey.equals("MUL")) + // UniqueKeys.add(columnname); + // + // else if ((columnKey != null) && (columnKey.length()>0)) + // System.err.println("MySQLTableStructure->KEY NOT CONTEMPLATED : "+columnKey); + // */ + // } catch (Exception e) { + // e.printStackTrace(); + // } + // } + // + // // parseIndexes2(dbSession); + // //fill up the indexes array + // + // //***Commentata + // + // // for (String index:Indexes){ + // //// + // AnalysisLogger.getLogger().debug("unique index removal: "+index); + // // //eliminate the unicity + // // UniqueKeys.remove(index); + // // } + + } + + // This method is not useful for the database mysql because the slq query + // "show create" makes available the create table statement. + + // private void parseIndexes2 (SessionFactory dbSession) throws Exception{ + // + // + // //Retrieve the query + // String queryForIndexes=getQueryForIndexes(dbSession); + // + // List indexSet = + // DatabaseFactory.executeSQLQuery(String.format(queryForIndexes,tableName), + // dbSession); + // + // String createTableStatement = (String)(((Object[])indexSet.get(0))[1]); + // String [] splitted = createTableStatement.split("\n"); + // + // for (int i=0;i resultSet = DatabaseFactory.executeSQLQuery( + queryStructure, dbSession); + + // if (resultSet==null){ + // + // throw new Exception("The resulting table has not rows"); + // + // } + + // manage an error that postgres does not signal. In this case the + // "queryForStructure" query is case sensitive so the field "table_name" + // must be set to the table name well formatted. + if (resultSet == null) { + AnalysisLogger + .getLogger() + .debug("PostgresTableStructure->Error: Results not available. Check that the database and schema names are correct" + + queryStructure); + + throw new Exception( + "Results not available. Check that the database and schema names are correct"); + + } + + AnalysisLogger.getLogger().debug( + "PostgresTableStructure->Building Structure with query: " + + queryStructure); + + int resultsNumber = resultSet.size(); + + for (int i = 0; i < resultsNumber; i++) { + + try { + Object result = resultSet.get(i); + Object[] resultArray = (Object[]) result; + + // retrieve the column name + String columnname = ((String) resultArray[0]).toLowerCase(); + if (columnname.equalsIgnoreCase("class")) + columnname = "classcolumn"; + + ColumnNames.add(columnname); + + // retrieve the nullable value + String yesno = (String) resultArray[2]; + if (yesno.equalsIgnoreCase("YES")) + IsNullables.add(true); + else + IsNullables.add(false); + + // retrieve the data type + String type = (String) resultArray[3]; + + if ((resultArray[1] != null) + && (resultArray[3].toString().equals("integer"))) { + + if ((resultArray[1]).toString().startsWith("nextval('")) { + + type = "serial"; + + } + // else{ + // + // //recover the default value + // // DefaultValues.add(resultArray[1]).toString()); + // DefaultValues.add((String)(resultArray[1])); + // + // + // } + + if (resultArray[4] != null) { + String tot = ""; + + charset = resultArray[4].toString(); + + tot = type + "(" + charset + ")"; + + // retrieve the data type + TypesList.add(tot); + + } + + else { + // retrieve the data type + TypesList.add(type); + + } + + } else if (type.equals("USER-DEFINED")) { + + type = (String) resultArray[5]; + + // retrieve the character maximun lenght + if (resultArray[4] != null) { + String tot = ""; + + charset = resultArray[4].toString(); + + tot = type + "(" + charset + ")"; + + // retrieve the data type + TypesList.add(tot); + + } else { + // retrieve the data type + TypesList.add(type); + } + + } else { + + // retrieve the character maximun lenght + + if (resultArray[4] != null) { + String tot = ""; + + charset = resultArray[4].toString(); + + tot = type + "(" + charset + ")"; + + // retrieve the data type + TypesList.add(tot); + + } + + else { + // retrieve the data type + TypesList.add(type); + + } + + } + + // recover the default value + if ((resultArray[1] == null) + || ((resultArray[1]).toString().startsWith("nextval('"))) { + + DefaultValues.add(null); + + } else { + + DefaultValues.add((String) (resultArray[1])); + } + + // String tot = ""; + // + // if (resultArray[4] != null) { + // + // charset = resultArray[4].toString(); + // // AnalysisLogger.getLogger().debug( + // // "PostgresTableStructure->charset: " + charset); + // + // // String type = (String)resultArray[3]; + // + // + // // if((resultArray[3].toString().equals("integer")) && + // (resultArray[1]).toString().startsWith("nextval('")){ + // // + // // + // // type="serial"; + // // + // // } + // // if (type.equals("USER-DEFINED")){ + // // + // // type=(String)resultArray[5]; + // // + // // } + // + // tot = type + "(" + charset + ")"; + // + // TypesList.add(tot); + // + // + // } + + // else { + // String type = (String)resultArray[3]; + // String coldefault=(String)resultArray[1]; + // + // // if((type.equals("integer")) && (coldefault!=null) && + // (coldefault.startsWith("nextval('"))){ + // + // if((type.equals("integer")) && (coldefault!=null)) { + // + // + // type="serial"; + // + // } + // + // TypesList.add(type); + // + // } + + } catch (Exception e) { + // e.printStackTrace(); + throw e; + } + } + + parseIndexes(dbSession); + + } + + // Method that allows to recover the keys of a table. + private void parseIndexes(SessionFactory dbSession) throws Exception { + + // Query that retrieves keys + + String queryForIndexes = getQueryForIndexes(dbSession); + + String queryStructure = String.format(queryForIndexes, tableName); + List resultSet = DatabaseFactory.executeSQLQuery( + queryStructure, dbSession); + + AnalysisLogger.getLogger().debug( + "PostgresTableStructure->Building Structure with query adding keys: " + + queryStructure); + + if (resultSet != null) { + int resultsNumber = resultSet.size(); + + for (int i = 0; i < resultsNumber; i++) { + + Object result = resultSet.get(i); + Object[] resultArray = (Object[]) result; + + String columnKey = (String) resultArray[1]; + + if (columnKey.equals("PRIMARY KEY")) + ColumnKeys.add((String) resultArray[3]); + else if (columnKey.equals("UNIQUE KEY")) + UniqueKeys.add((String) resultArray[3]); + else if (columnKey.equals("FOREIGN KEY")) + UniqueKeys.add((String) resultArray[3]); + + } + + } + + } + + // Method that returns the query to build the table's structure. + @Override + protected String getQueryForTableStructure(SessionFactory dbSession) + throws Exception { + // TODO Auto-generated method stub + + // String queryForStructure = + // "SELECT table_schema,table_name,column_name,column_default,is_nullable,data_type,character_maximum_length,character_set_name FROM information_schema.COLUMNS WHERE table_name ='%1$s' and table_schema='%2$s';"; + String queryForStructure = "SELECT column_name,column_default,is_nullable,data_type,character_maximum_length,udt_name FROM information_schema.COLUMNS WHERE table_name ='%1$s' and table_schema='%2$s';"; + + return queryForStructure; + + } + + // Method that returns the query to show the create statement + @Override + protected String getQueryForIndexes(SessionFactory dbSession) + throws Exception { + // TODO Auto-generated method stub + + String queryForIndexes = "SELECT tc.constraint_name," + + "tc.constraint_type,tc.table_name,kcu.column_name,tc.is_deferrable,tc.initially_deferred,rc.match_option AS match_type,rc.update_rule AS on_update," + + "rc.delete_rule AS on_delete,ccu.table_name AS references_table,ccu.column_name AS references_field FROM information_schema.table_constraints tc " + + "LEFT JOIN information_schema.key_column_usage kcu ON tc.constraint_catalog = kcu.constraint_catalog AND tc.constraint_schema = kcu.constraint_schema AND tc.constraint_name = kcu.constraint_name " + + "LEFT JOIN information_schema.referential_constraints rc ON tc.constraint_catalog = rc.constraint_catalog AND tc.constraint_schema = rc.constraint_schema AND tc.constraint_name = rc.constraint_name " + + "LEFT JOIN information_schema.constraint_column_usage ccu ON rc.unique_constraint_catalog = ccu.constraint_catalog AND rc.unique_constraint_schema = ccu.constraint_schema AND rc.unique_constraint_name = ccu.constraint_name " + + "where tc.table_name='%1$s' and tc.constraint_type<>'CHECK'"; + + return queryForIndexes; + + } + +} diff --git a/src/main/java/org/gcube/dataanalysis/databases/utils/AliasToEntityOrderedMapResultTransformer.java b/src/main/java/org/gcube/dataanalysis/databases/utils/AliasToEntityOrderedMapResultTransformer.java new file mode 100644 index 0000000..edb1a96 --- /dev/null +++ b/src/main/java/org/gcube/dataanalysis/databases/utils/AliasToEntityOrderedMapResultTransformer.java @@ -0,0 +1,62 @@ +package org.gcube.dataanalysis.databases.utils; + +import java.util.LinkedHashMap; +//import java.util.List; +import java.util.Map; + +//import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration; +//import org.hibernate.Query; +//import org.hibernate.Session; +//import org.hibernate.SessionFactory; +import org.hibernate.transform.BasicTransformerAdapter; + +/** + * Class that allows to recover data from database through the class + * BasicTransformerAdapter of Hibernate. It allows to retrieve columns names and + * values. + */ +public class AliasToEntityOrderedMapResultTransformer extends + BasicTransformerAdapter { + + public static final AliasToEntityOrderedMapResultTransformer INSTANCE = new AliasToEntityOrderedMapResultTransformer(); + + /** + * Disallow instantiation of AliasToEntityOrderedMapResultTransformer . + */ + private AliasToEntityOrderedMapResultTransformer() { + super(); + } + + /** + * {@inheritDoc} + */ + public Object transformTuple(Object[] tuple, String[] aliases) { + // linkedhashmap to get table column name in order + Map result = new LinkedHashMap(tuple.length); + for (int i = 0; i < tuple.length; i++) { + String alias = aliases[i]; + if (alias != null) { + result.put(alias, tuple[i]); + } + } + return result; + } + + /** + * {@inheritDoc} + */ + public boolean isTransformedValueATupleElement(String[] aliases, + int tupleLength) { + return false; + } + + /** + * Serialization hook for ensuring singleton uniqueing. + * + * @return The singleton instance : {@link #INSTANCE} + */ + private Object readResolve() { + return INSTANCE; + } + +} diff --git a/src/main/java/org/gcube/dataanalysis/databases/utils/ConnectionManager.java b/src/main/java/org/gcube/dataanalysis/databases/utils/ConnectionManager.java new file mode 100644 index 0000000..a383207 --- /dev/null +++ b/src/main/java/org/gcube/dataanalysis/databases/utils/ConnectionManager.java @@ -0,0 +1,250 @@ +package org.gcube.dataanalysis.databases.utils; + +import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger; +import org.gcube.contentmanagement.lexicalmatcher.utils.FileTools; +import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration; +import org.gcube.dataanalysis.ecoengine.utils.DatabaseUtils; +import org.hibernate.Query; +import org.hibernate.Session; +import org.hibernate.SessionFactory; +import org.hibernate.cfg.Configuration; + +import java.io.ByteArrayInputStream; +import java.io.IOException; +import java.net.MalformedURLException; +import java.util.List; +import java.util.ArrayList; +import java.util.LinkedHashMap; + +import javax.xml.parsers.DocumentBuilderFactory; + +/** + * Class that allows to manage a database selected from a user. It performs to + * set the database configuration, to connect to the database and finally to + * execute a query. + */ +public class ConnectionManager { + + // private org.gcube.dataanalysis.ecoengine.utils.DatabaseFactory df; + + // Constructor + public ConnectionManager() { + + // org.gcube.dataanalysis.ecoengine.utils.DatabaseFactory df = new + // org.gcube.dataanalysis.ecoengine.utils.DatabaseFactory(); + // df = new org.gcube.dataanalysis.ecoengine.utils.DatabaseFactory(); + + } + + // create the database's connection without using the configuration file but + // using the data input + public SessionFactory initDBConnection(AlgorithmConfiguration config) { + + SessionFactory dbconnection = DatabaseUtils.initDBSession(config); + + return dbconnection; + + } + + // create the database's connection using the configuration file + public SessionFactory initDBConnection(String configurationFile) + throws Exception { + String xml = FileTools.readXMLDoc(configurationFile); + SessionFactory DBSessionFactory = null; + Configuration cfg = new Configuration(); + cfg = cfg.configure(DocumentBuilderFactory.newInstance() + .newDocumentBuilder() + .parse(new ByteArrayInputStream(xml.getBytes()))); + DBSessionFactory = cfg.buildSessionFactory(); + return DBSessionFactory; + } + + /** Method that allows to set the configuration */ + public AlgorithmConfiguration setconfiguration(String ConfigPath, + String DatabaseUserName, String DatabasePassword, + String DatabaseDriver, String DatabaseDialect, String DatabaseURL, + String DatabaseName) throws IOException { + + AlgorithmConfiguration config = new AlgorithmConfiguration(); + + if (DatabaseName.equals("")) { + + throw new MalformedURLException( + "Invalid Url: the database's name is not present"); + // return null; + } + + if (!ConfigPath.equals("")) + config.setConfigPath(ConfigPath); + + if (!DatabaseUserName.equals("")) { + config.setParam("DatabaseUserName", DatabaseUserName); + } + + if (!DatabasePassword.equals("")) + config.setParam("DatabasePassword", DatabasePassword); + + if (!DatabaseDriver.equals("")) + config.setParam("DatabaseDriver", DatabaseDriver); + + if (!DatabaseDialect.equals("")) + config.setParam("DatabaseDialect", DatabaseDialect); + + if (!DatabaseURL.equals("")) + config.setParam("DatabaseURL", DatabaseURL); + + return config; + + } + + /** Method that execute a query */ + public List executeQuery(String query, + SessionFactory DBSessionFactory) throws Exception { + + List obj = null; + Session ss = null; + + try { + ss = DBSessionFactory.getCurrentSession(); + + ss.beginTransaction(); + + Query qr = null; + + // statement to check if the query is a "show create table" + String keyword = "show create table"; + + String regex = ".*\\b" + keyword.replaceAll(" +", "[ ]\\+") + + "\\b.*"; + + if ((!(query.toLowerCase().contains("explain"))) + && (!(query.toLowerCase().matches(regex)))) { // it does not + // work if the + // query + // performs an + // explain + // operation + + // Wrapper for a query. It allows the query to operate in a + // proper + // way + + // query check in order to remove the character ";" if the query + // contains it + + query = query.trim(); + + if (query.endsWith(";")) { + + int endIndex = query.indexOf(";"); + + query = query.substring(0, endIndex); + + } + + query = "select * from (" + query + ") as query"; + + } + + AnalysisLogger.getLogger().debug( + "In ConnectionManager-> executing query: " + query); + + qr = ss.createSQLQuery(query); + + qr.setResultTransformer(AliasToEntityOrderedMapResultTransformer.INSTANCE); + + // @SuppressWarnings("unchecked") + List result = qr.list(); + + AnalysisLogger.getLogger().debug( + "In ConnectionManager-> result's size: " + result.size()); + + ss.getTransaction().commit(); + + /* + * if (result == null) System.out.println( + * "Hibernate doesn't return a valid object when org.gcube.contentmanagement.lexicalmatcher retrieve UserState Object" + * ); + * + * if (result != null && result.size() == 0) + * System.out.println(String.format("found nothing in database")); + */ + + if (result != null && result.size() != 0) { + + obj = result; + + } else { + + AnalysisLogger.getLogger().debug( + "ConnectionManager->Error: Result not available"); + + throw new Exception("Result not available"); + + } + + } catch (Exception e) { + + // e.printStackTrace(); + + // System.out.println(String.format("Error while executing query: %1$s %2$s", + // query, e.getMessage())); + // e.printStackTrace(); + // System.out.println(String.format("Error while executing query: %1$s %2$s", + // query, e.getMessage())); + + if (e.getClass().toString() + .contains("org.hibernate.MappingException")) { + + // e.printStackTrace(); + + // System.out.println(e.getClass().toString()); + + AnalysisLogger + .getLogger() + .debug("In ConnectionManager-> ERROR The query could not be executed: Error in retrieving a user defined type. Try to use a store procedure to convert the type"); + + throw new Exception( + "The query could not be executed: Error in retrieving a user defined type. Try to use a store procedure to convert the type"); + } + + if (e.getClass().toString() + .contains("org.hibernate.exception.SQLGrammarException")) { + + // System.out.println(e.getCause().getMessage()); + + // AnalysisLogger.getLogger().debug("In ConnectionManager-> ERROR The query could not be executed: SQL grammar error in the query"); + + // throw new + // Exception("The query could not be executed: SQL grammar error in the query"); + + AnalysisLogger.getLogger().debug( + "In ConnectionManager-> " + + e.getCause().getLocalizedMessage()); + throw new Exception(e.getCause().getMessage()); + + } + + else { + + throw e; + + } + + // throw e; + } + + return obj; + + } + + // /** Method that creates the connection */ + // public SessionFactory createConnection(AlgorithmConfiguration config) { + // + // SessionFactory dbconnection = DatabaseUtils.initDBSession(config); + // + // return dbconnection; + // + // } + +} diff --git a/src/main/java/org/gcube/dataanalysis/databases/utils/DatabaseFactory.java b/src/main/java/org/gcube/dataanalysis/databases/utils/DatabaseFactory.java new file mode 100644 index 0000000..cb17bef --- /dev/null +++ b/src/main/java/org/gcube/dataanalysis/databases/utils/DatabaseFactory.java @@ -0,0 +1,189 @@ +package org.gcube.dataanalysis.databases.utils; + +import java.io.ByteArrayInputStream; +import java.util.List; + +import javax.xml.parsers.DocumentBuilderFactory; + +import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger; +import org.gcube.contentmanagement.lexicalmatcher.utils.FileTools; +import org.hibernate.MappingException; +import org.hibernate.Query; +import org.hibernate.Session; +import org.hibernate.SessionFactory; +import org.hibernate.cfg.Configuration; + +/** Class that allows to connect to a database and to execute a query */ +public class DatabaseFactory { + + // Method that establish a connection with the database + public static SessionFactory initDBConnection(String configurationFile) + throws Exception { + String xml = FileTools.readXMLDoc(configurationFile); + SessionFactory DBSessionFactory = null; + Configuration cfg = new Configuration(); + cfg = cfg.configure(DocumentBuilderFactory.newInstance() + .newDocumentBuilder() + .parse(new ByteArrayInputStream(xml.getBytes()))); + DBSessionFactory = cfg.buildSessionFactory(); + return DBSessionFactory; + } + + // Method that execute the query + public static List executeSQLQuery(String query, + SessionFactory DBSessionFactory) throws Exception { + // System.out.println("QUERY: "+query); + try { + return executeHQLQuery(query, DBSessionFactory, true); + + } catch (Exception e) { + // TODO: handle exception + throw e; + } + + } + + public static List executeHQLQuery(String query, + SessionFactory DBSessionFactory, boolean useSQL) throws Exception, + MappingException { + Session ss = null; + List obj = null; + + try { + + ss = DBSessionFactory.getCurrentSession(); + + ss.beginTransaction(); + + Query qr = null; + + if (useSQL) + qr = ss.createSQLQuery(query); + else + qr = ss.createQuery(query); + + List result = null; + + AnalysisLogger.getLogger().debug( + "In DatabaseFactory->" + qr.getQueryString()); + try { + result = qr.list(); + ss.getTransaction().commit(); + + if (result == null) + System.out + .println("Hibernate doesn't return a valid object when org.gcube.contentmanagement.lexicalmatcher retrieve UserState Object"); + + // if (result != null && result.size() == 0) + // System.out.println(String.format("found nothing in database for query: "+query)); + + if (result != null && result.size() != 0) { + obj = result; + } + + rollback(ss); + + return obj; + + } catch (Exception e) { + + + if (e.getClass() + .toString() + .contains("org.hibernate.exception.SQLGrammarException")) { + + // System.out.println(e.getCause().getMessage()); + + // AnalysisLogger.getLogger().debug("In ConnectionManager-> ERROR The query could not be executed: SQL grammar error in the query"); + + // throw new + // Exception("The query could not be executed: SQL grammar error in the query"); + + AnalysisLogger.getLogger().debug( + "In DatabaseFactory-> " + + e.getCause().getLocalizedMessage()); + throw new Exception(e.getCause().getMessage()); + + } + + // if (e.getClass() + // .toString() + // .contains( + // "com.mysql.jdbc.exceptions.jdbc4.MySQLSyntaxErrorException")) + // { + // + // // System.out.println(e.getClass().toString()); + // + // AnalysisLogger.getLogger().debug( + // "In DatabaseFactory-> " + // + e.getCause().getLocalizedMessage()); + // + // throw new Exception(e.getCause().getMessage()); + // } + + else { + throw e; + + } + + } + + } catch (Exception e) { + + throw e; + } + + } + + public static void rollback(Session ss) { + + try { + if (ss != null && ss.getTransaction() != null) + ss.getTransaction().rollback(); + } catch (Exception ex) { + // throw ex; + + } finally { + try { + ss.close(); + } catch (Exception ee) { + + // throw ee; + } + } + } + + // public static void executeSQLUpdate(String query, SessionFactory + // DBSessionFactory) throws Exception { + // executeHQLUpdate(query, DBSessionFactory, true); + // } + + // public static void executeHQLUpdate(String query, SessionFactory + // DBSessionFactory, boolean useSQL) throws Exception{ + // // System.out.println("executing query: " + query); + // Session ss = null; + // + // try { + // + // ss = DBSessionFactory.getCurrentSession(); + // // System.out.println("executing query"); + // ss.beginTransaction(); + // Query qr = null; + // + // if (useSQL) + // qr = ss.createSQLQuery(query); + // else + // qr = ss.createQuery(query); + // + // qr.executeUpdate(); + // ss.getTransaction().commit(); + // + // } catch (Exception e) { + // AnalysisLogger.getLogger().debug(query); + // rollback(ss); + // // e.printStackTrace(); + // throw e; + // } + // } + +} diff --git a/src/main/java/org/gcube/dataanalysis/databases/utils/DatabaseManagement.java b/src/main/java/org/gcube/dataanalysis/databases/utils/DatabaseManagement.java new file mode 100644 index 0000000..e815b95 --- /dev/null +++ b/src/main/java/org/gcube/dataanalysis/databases/utils/DatabaseManagement.java @@ -0,0 +1,1850 @@ +package org.gcube.dataanalysis.databases.utils; + +import java.io.BufferedWriter; +import java.io.File; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.OutputStreamWriter; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.LinkedHashMap; +import java.util.List; + +import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger; +import org.gcube.dataanalysis.databases.converter.SqlDialectConverter; +import org.gcube.dataanalysis.databases.sampler.Sampler; +import org.gcube.dataanalysis.databases.structure.AbstractTableStructure; +import org.gcube.dataanalysis.databases.structure.MySQLTableStructure; +import org.gcube.dataanalysis.databases.structure.PostgresTableStructure; +import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration; +import org.hibernate.SessionFactory; + +import com.adventnet.swissqlapi.sql.exception.ConvertException; +import com.adventnet.swissqlapi.sql.parser.ParseException; + +/** Class that allows to manage a database offering several functionalities */ +public class DatabaseManagement { + + // AbstractTableStructure crossTableStructure; + // private List tablesname = new ArrayList(); + private List tablesname = null; + // private String configPath = "./cfg/"; + private String configPath = ""; + private String sourceSchemaName = null; + private SessionFactory sourceDBSession; + private String DBType; + private AbstractTableStructure crossTableStructure; + // private DBAdapter typesMap; + private DatabaseOperations op = new DatabaseOperations(); + // private String destinationDBType; + // private String sourceDBType; + private MySQLTableStructure mysqlobj; + + private ConnectionManager connection; + // private Integer estimatedRows = null; + private long estimatedRows = 0; + + // file in which the result is stored when sample and query submit + // operations are executed + private File file = null; + + // file that will contain result + private BufferedWriter out; + + // file in which the table result is stored when sample + // operations are executed + // private File fileSample = null; + + // map which contains the rows of the sample and query submit operations + // private LinkedHashMap mapResult = new + // LinkedHashMap(); + // private HashMap mapResult = new HashMap(); + private HashMap mapResult = new HashMap(); + + // map which contains the rows that constitute the table result + // private LinkedHashMap mapSampleTableResult = new + // LinkedHashMap(); + + // file in which the result is stored when the query is executed + // private File fileQueryResult = null; + + // map that contains the rows generated by the query + // private LinkedHashMap mapQueryResult = new + // LinkedHashMap(); + + private static final String MYSQL = "MySQL"; + private static final String POSTGRES = "Postgres"; + private static final String selectTablesQuery = "SELECT distinct table_name FROM information_schema.COLUMNS where table_schema='%1$s'"; + // private static final String listSchemaNameQuery = + // "select schema_name from information_schema.schemata where schema_name <> 'information_schema' and schema_name !~ E'^pg_'"; + private static final String listSchemaNameQuery = "select nspname from pg_namespace where nspname <> 'information_schema' and nspname !~ E'^pg_'"; + + // query to retrieve datatype columns of a database table + private static final String queryForDataTypeColumnsPostgres = "SELECT data_type FROM information_schema.COLUMNS WHERE table_name ='%1$s' and table_schema='%2$s'"; + private static final String queryForDataTypeColumnsMysql = "SELECT data_type FROM information_schema.COLUMNS WHERE table_name ='%1$s' and table_schema='%2$s'"; + + // query to get columns' name + private static final String queryForColumnsPostgres = "SELECT column_name FROM information_schema.COLUMNS WHERE table_name ='%1$s' and table_schema='%2$s'"; + private static final String queryForColumnsMysql = "SELECT column_name FROM information_schema.COLUMNS WHERE table_name ='%1$s' and table_schema='%2$s'"; + + // Header Table that contains the column names of a table + private String header = ""; + + // list that contains the columns names of a table + List listColumnNamesTable = null; + + // variable to set the language for translation + public static final int POSTGRESQLDialect = 4; + public static final int MYSQLDialect = 5; + + // Constructor + public DatabaseManagement(String configPath) { + this.configPath = configPath; + connection = new ConnectionManager(); + + } + + // for the exact parsing of the obtained results with the values of + // a database, a check is needed against the data type columns + + // to convert from postgres and mysql datatypes to Java datatypes + private String convertToJavaType(String type, String val) throws Exception { + + type = type.toLowerCase(); + String valConverted = val; + + try { + + // parse to Long + valConverted = "" + Long.parseLong(valConverted); + + // AnalysisLogger.getLogger() + // .debug("In DatabaseManagement->parsed value Long: " + // + valConverted); + + } + + catch (Exception e) { + + try { + + // check to fix a problem for the database. + // Indeed if the string is + // an hexadecimal some strings as (6F or 6D that + // are double and float values) are + // casted to Double and the value returned is + // 6.0 altering the original value. If the string is + // an hexadecimal the cast is not performed. + + if ((type != null)) { + + // // check data type value + // AnalysisLogger.getLogger().debug( + // "In DatabaseManagement->check data type value: " + // + type); + + if ((type.contains("decimal")) || (type.contains("double")) + || (type.contains("numeric")) + || (type.contains("float"))) { + + valConverted = "" + Double.parseDouble(valConverted); + + // // check the parsed value + // AnalysisLogger.getLogger().debug( + // "In DatabaseManagement->parsed value Double: " + // + valConverted); + + } + + if (type.contains("real")) { + + valConverted = "" + Float.parseFloat(valConverted); + + // // check the parsed value + // AnalysisLogger.getLogger().debug( + // "In DatabaseManagement->parsed value Float: " + // + valConverted); + + } + + } + + } catch (Exception excp) { + throw excp; + } + + } + + return valConverted; + + } + + // create the database's connection without using the configuration file but + // using the data input. + // Note that in this case the variable sourceSchemaName for database mysql + // is set to the database name while for database postgres will be set to + // the schema's name in the createConnection(String cfgDir, String + // SourceFile) method + public SessionFactory createConnection(String DatabaseUserName, + String DatabasePassword, String DatabaseDriver, + String DatabaseDialect, String DatabaseURL, String DatabaseName) + throws IOException { + + // ConnectionManager connection = new ConnectionManager(); + + AlgorithmConfiguration config = new AlgorithmConfiguration(); + + AnalysisLogger.getLogger().debug( + "In DatabaseManagement->DatabaseName: " + DatabaseName); + + AnalysisLogger.getLogger().debug( + "In DatabaseManagement->DatabaseURL: " + DatabaseURL); + + AnalysisLogger.getLogger().debug( + "In DatabaseManagement->DatabaseUserName: " + DatabaseUserName); + + AnalysisLogger.getLogger().debug( + "In DatabaseManagement->DatabasePassword: " + DatabasePassword); + + AnalysisLogger.getLogger().debug( + "In DatabaseManagement->configPath: " + configPath); + AnalysisLogger.getLogger().debug( + "In DatabaseManagement->DatabaseDriver: " + DatabaseDriver); + + AnalysisLogger.getLogger().debug( + "In DatabaseManagement->DatabaseDialect: " + DatabaseDialect); + + config = connection.setconfiguration(configPath, DatabaseUserName, + DatabasePassword, DatabaseDriver, DatabaseDialect, DatabaseURL, + DatabaseName); + + sourceDBSession = connection.initDBConnection(config); + + // sourceSchemaName = Name; // for a database mysql the variable + // "sourceschemaname" is the database's name + // (while for the database postgresql is the + // schema's name) + + if (DatabaseDriver.toLowerCase().contains("postgres")) { + + DBType = POSTGRES; + + } + + if (DatabaseDriver.toLowerCase().contains("mysql")) { + + DBType = MYSQL; + + } + + return sourceDBSession; + + } + + // create the database's connection using the configuration file. + // note that for database postgres the variable sourceSchemaName is set to + // the schema's name. + public SessionFactory createConnection(String cfgDir, String SourceFile) + throws Exception { + + configPath = cfgDir; + if (!configPath.endsWith("/")) + configPath += "/"; + + sourceSchemaName = op.getDBSchema(configPath + SourceFile); // the + // variable + // "sourceschemaname" + // is + // retrieved + // through + // this + // method + // that + // recovers + // the value + // by means + // of the + // configuration + // file. + // for a database postgresql it is the schema's name while for a + // database mysql it is the database's name. + + sourceDBSession = connection.initDBConnection(configPath + SourceFile); + + // destinationDBType = POSTGRES; + // sourceDBType = MYSQL; + // + // // typesMap = new DBAdapter(configPath + "/" + sourceDBType + "2" + // // + destinationDBType + ".properties"); + + DBType = op.getDBType(); // Recover the database's type by means of the + // configuration file + + return sourceDBSession; + + } + + // close the connection + public void closeConnection() { + + sourceDBSession.close(); + + } + + /** + * method that allows to submit a query. + * + * @param schemaName + * : the schema's name of the database postgres. For database + * mysql this parameter is null. + * @return query result + * @throws Exception + */ + + // method that allows to submit a query + public List submitQuery(String query, SessionFactory session, + String pathFile) throws Exception { + + List results = new ArrayList(); + + results = connection.executeQuery(query, session); + + if (results != null) { + AnalysisLogger.getLogger().debug( + "DatabaseManagement-> Query result retrieved"); + } + + // store table in a file + AnalysisLogger.getLogger().debug( + "In DatabaseManagement->store table in a file"); + + // store table in a file + // writeQueryResultIntoFile(results); + + // writeSampleTableIntoFile(results, tableName, schemaName); + + String FileName = pathFile + "QueryResult.csv"; + + // List listColumnNames = getColumnNamesTable(tableName, + // schemaName); + + // to retrieve datatype columns of a table + + List DataTypeColumns = null; + + // write the result in the file and in the map + + AnalysisLogger.getLogger().debug( + "In DatabaseManagement->writing the result in the file: " + + FileName); + + file = new File(FileName); + + out = new BufferedWriter(new OutputStreamWriter(new FileOutputStream( + file), "UTF-8")); + + // String header = null; + + // writeTableIntoFile(results, DataTypeColumns, header, tableName, + // schemaName, + // FileName); + + writeTableIntoFile(results, DataTypeColumns); + + return results; + + } + + /** + * Get the table's names for a database. + * + * @param schemaName + * : the schema's name of the database postgres. For database + * mysql this parameter is null. + * @return List listTables: a list tables' names. + * @throws Exception + */ + + public List getTables(String databaseName, String schemaName) + throws Exception { + + String query = null; + + if (DBType.equals(POSTGRES)) { + + query = String.format(selectTablesQuery, schemaName); + + AnalysisLogger.getLogger().debug( + "DatabaseManagement->retrieving tables names with query: " + + query); + + sourceSchemaName = schemaName; // a database postgres manages schema + // concept so for every operation on + // the database it is important to + // specify the schema name + + } + + if (DBType.equals(MYSQL)) { + + query = String.format(selectTablesQuery, databaseName); + + AnalysisLogger.getLogger().debug( + "DatabaseManagement->retrieving tables names with query: " + + query); + + sourceSchemaName = databaseName; // for database mysql the schema + // name is the database name + // because the database mysql + // does not manage schema + // concept. + + } + + List resultSet = connection + .executeQuery(query, sourceDBSession); + + if (resultSet != null) { + + // AnalysisLogger.getLogger().debug( + // "DatabaseManagement->table's list: "); + + AnalysisLogger.getLogger().debug( + "DatabaseManagement->table's list retrieved"); + + tablesname = new ArrayList(); + + for (Object result : resultSet) { + tablesname.add((String) result); + + // AnalysisLogger.getLogger().debug( + // "DatabaseManagement->" + (String) result); + + } + + } + + return tablesname; + + } + + /** + * Get the schema's name for the database Postgresql. + * + * @return listSchemas: the list of the schemas' names + * @throws Exception + */ + public List getSchemas() throws Exception { + + List list = new ArrayList(); + + if (DBType.equals(POSTGRES)) { + + List resultSet = connection.executeQuery( + listSchemaNameQuery, sourceDBSession); + + if (resultSet != null) { + + for (Object result : resultSet) { + list.add((String) result); + } + } + + } + + return list; + + } + + /** + * Get the "Create Table" statement. + * + * @param schemaName + * : the schema's name of the database postgres. For database + * mysql this parameter is null. + * @return String: the create statement . + * @throws Exception + */ + + // Get the "Create Table" statement + public String getCreateTable(String tablename, String schemaName) + throws Exception { + + String createstatement = ""; + + if (DBType.equals(POSTGRES)) { + + crossTableStructure = getSourceTableObject(tablename, schemaName); + + String tableBuildQuery = crossTableStructure.buildUpCreateTable(); + + createstatement = tableBuildQuery; + + AnalysisLogger.getLogger().debug( + "DatabaseManagement->'Create Table' statement: " + + tableBuildQuery); + + } + + if (DBType.equals(MYSQL)) { + + crossTableStructure = getSourceTableObject(tablename, schemaName); + + try { + + String createtablestatement = mysqlobj.showCreateTable( + connection, sourceDBSession); + + createstatement = createtablestatement; + + AnalysisLogger.getLogger().debug( + "DatabaseManagement->'Create Table' statement: " + + createtablestatement); + + } catch (Exception e) { + + AnalysisLogger.getLogger().debug( + "DatabaseManagement->Exception: " + e.getMessage()); + throw e; + + } + + } + + // to recover the column names of the table + getColumnNamesTable(tablename, schemaName); + + return createstatement; + + } + + // Method that creates the table object for a database + private AbstractTableStructure getSourceTableObject(String tableName, + String schemaName) throws Exception { + + sourceSchemaName = schemaName; + + if (DBType.equals(MYSQL)) { + + mysqlobj = new MySQLTableStructure(sourceSchemaName, tableName, + sourceDBSession); + + return mysqlobj; + + } + + else if (DBType.equals(POSTGRES)) { + + PostgresTableStructure postobj = new PostgresTableStructure( + sourceSchemaName, tableName, sourceDBSession); + + return postobj; + + } else { + return null; + } + + } + + // Method that returns the estimated number of rows + public long getNumberOfRows(String tablename) throws Exception { + + long rows; + + rows = op.calculateElements(connection, DBType, tablename, + sourceDBSession); + AnalysisLogger.getLogger().debug( + "DatabaseManagement->rows' number calculated: " + rows); + + estimatedRows = rows; + + return rows; + + } + + /** + * retrieve 100 rows of a table randomly that have the maximum number of + * columns not null + * + * @param tableName + * : the table's name + * @param schemaName + * : the schema's name of the database postgres. For database + * mysql this parameter is the database name. + * @return a rows' list. + * @throws Exception + */ + // retrieve 100 rows of a table randomly that have the maximum number of + // columns not null + public void smartSampleOnTable(String tableName, String schemaName, + String pathFile) throws Exception { + + List resultSet = null; + + AnalysisLogger + .getLogger() + .debug("DatabaseManagement->starting the Smart Sample on table operation"); + + AnalysisLogger.getLogger().debug( + "DatabaseManagement->retrieving the 100 rows"); + + if (estimatedRows == 0) { + // estimatedRows = Integer.valueOf(getNumberOfRows(tableName)); + + estimatedRows = getNumberOfRows(tableName); + } + + // to retrieve datatype columns of a table + List DataTypeColumns = getDataTypeColumns(tableName, schemaName); + + Sampler sampler = new Sampler(); + resultSet = sampler.smartSampleOnTable(connection, sourceDBSession, + DBType, tableName, schemaName, estimatedRows, DataTypeColumns); + + if (resultSet != null) { + AnalysisLogger.getLogger().debug( + "DatabaseManagement-> rows retrieved"); + } + + // store table in a file + AnalysisLogger.getLogger().debug( + "In DatabaseManagement->store table in a file"); + + // store table in a file + + // writeSampleTableIntoFile(resultSet, tableName, schemaName); + + String FileName = pathFile + "SampleResult.csv"; + + // to recover columns names list + + List listColumns = sampler.getListColumns(); + // String header = ""; + + // //print check + // AnalysisLogger.getLogger().debug( + // "In DatabaseManagement->list columns size: " +listColumns.size()); + + // to recover columns names list + for (int i = 0; i < listColumns.size(); i++) { + + if (i != listColumns.size() - 1) { + + header = header + listColumns.get(i) + ","; + + } else { + + header = header + listColumns.get(i); + + } + } + + // //print check + // AnalysisLogger.getLogger().debug( + // "In DatabaseManagement->header: " + header); + + // write the result in the file and in the map + + AnalysisLogger.getLogger().debug( + "In DatabaseManagement->writing the result in the file: " + + FileName); + + file = new File(FileName); + + out = new BufferedWriter(new OutputStreamWriter(new FileOutputStream( + file), "UTF-8")); + + // mapResult.put("HEADERS", header); + // + // out.write(header); + // out.newLine(); + + // writeTableIntoFile(resultSet, DataTypeColumns, header, tableName, + // schemaName, + // FileName); + + // writeTableIntoFile(resultSet, DataTypeColumns, tableName, + // schemaName, + // FileName); + + writeTableIntoFile(resultSet, DataTypeColumns); + + // return resultSet; + + } + + /** + * Retrieve the first 100 rows of a table. + * + * @param tableName + * : the table's name + * @param schemaName + * : the schema's name of the database postgres. For database + * mysql this parameter is the database name. + * @return a rows' list. + * @throws Exception + */ + + // retrieve the first 100 rows of a table + + public void sampleOnTable(String tableName, String schemaName, + String pathFile) throws Exception { + + List resultSet = null; + + AnalysisLogger.getLogger().debug( + "DatabaseManagement->starting the Sample on table operation"); + + AnalysisLogger.getLogger().debug( + "DatabaseManagement->retrieving the first 100 rows"); + + // to retrieve datatype columns of a table + List DataTypeColumns = getDataTypeColumns(tableName, schemaName); + + Sampler sampler = new Sampler(); + resultSet = sampler.sampleOnTable(connection, sourceDBSession, DBType, + tableName, schemaName, DataTypeColumns); + + if (resultSet != null) { + AnalysisLogger.getLogger().debug( + "DatabaseManagement-> rows retrieved"); + } + + // store table in a file + AnalysisLogger.getLogger().debug( + "In DatabaseManagement->store table in a file"); + + // store table in a file + + // writeSampleTableIntoFile(resultSet, tableName, schemaName); + + String FileName = pathFile + "SampleResult.csv"; + + // to recover columns names list + List listColumns = sampler.getListColumns(); + // String header = ""; + + for (int i = 0; i < listColumns.size(); i++) { + + if (i != listColumns.size() - 1) { + + header = header + listColumns.get(i) + ", "; + + } else { + + header = header + listColumns.get(i); + + } + } + + // write the result in the file and in the map + + AnalysisLogger.getLogger().debug( + "In DatabaseManagement->writing the result in the file: " + + FileName); + + file = new File(FileName); + + out = new BufferedWriter(new OutputStreamWriter(new FileOutputStream( + file), "UTF-8")); + + // mapResult.put("HEADERS", header); + // + // out.write(header); + // out.newLine(); + + // System.out.println("HEADER:" + header); + + // writeTableIntoFile(resultSet, DataTypeColumns, header, tableName, + // schemaName, + // FileName); + // writeTableIntoFile(resultSet, DataTypeColumns, tableName, schemaName, + // FileName); + + writeTableIntoFile(resultSet, DataTypeColumns); + + // return the first 100 rows + // return resultSet; + + } + + /** + * Retrieve 100 rows of a table in a random manner. + * + * @param tableName + * : the table's name + * @param schemaName + * : the schema's name of the database postgres. For database + * mysql this parameter is the database name. + * @return a rows' list. + * @throws Exception + */ + + // to retrieve 100 rows of a table in a random manner + public void randomSampleOnTable(String tableName, String schemaName, + String pathFile) throws Exception { + + List resultSet = null; + + AnalysisLogger + .getLogger() + .debug("DatabaseManagement->starting the Random Sample on table operation"); + + AnalysisLogger.getLogger().debug( + "DatabaseManagement->retrieving 100 rows"); + + // to retrieve datatype columns of a table + List DataTypeColumns = getDataTypeColumns(tableName, schemaName); + + if (estimatedRows == 0) { + + estimatedRows = getNumberOfRows(tableName); + } + + Sampler sampler = new Sampler(); + resultSet = sampler.randomSampleOnTable(connection, sourceDBSession, + DBType, tableName, schemaName, estimatedRows, DataTypeColumns); + + if (resultSet != null) { + AnalysisLogger.getLogger().debug( + "DatabaseManagement-> rows retrieved"); + } + + // store table in a file + AnalysisLogger.getLogger().debug( + "In DatabaseManagement->store table in a file"); + + // store table in a file + + // writeSampleTableIntoFile(resultSet, tableName, schemaName); + + String FileName = pathFile + "SampleResult.csv"; + + // to recover columns names list + + List listColumns = sampler.getListColumns(); + + // //print check + // for (int i = 0; i < listColumns.size(); i++) { + // AnalysisLogger.getLogger() + // .debug("In DatabaseManagement->listcolumns: " + // + listColumns.get(i)); + // } + // String header = ""; + + for (int i = 0; i < listColumns.size(); i++) { + + if (i != listColumns.size() - 1) { + + header = header + listColumns.get(i) + ", "; + + } else { + + header = header + listColumns.get(i); + + } + } + + // write the result in the file and in the map + + AnalysisLogger.getLogger().debug( + "In DatabaseManagement->writing the result in the file: " + + FileName); + + file = new File(FileName); + + out = new BufferedWriter(new OutputStreamWriter(new FileOutputStream( + file), "UTF-8")); + + // mapResult.put("HEADERS", header); + // + // out.write(header); + // out.newLine(); + + // writeTableIntoFile(resultSet, DataTypeColumns, header, tableName, + // schemaName, + // FileName); + + // writeTableIntoFile(resultSet, DataTypeColumns, tableName, schemaName, + // FileName); + + writeTableIntoFile(resultSet, DataTypeColumns); + + // return resultSet; + + } + + // // write the table result in the file and build the map of results + // private void writeTableIntoFile(List result, + // List DataTypeColumns, String tableName, String schemaName, + // String FileName) throws Exception { + + // write the table result in the file and build the map of results + private void writeTableIntoFile(List result, + List DataTypeColumns) throws Exception { + + // // file that will contain result + // BufferedWriter out; + // + // // String fileName; + // + // // fileName = "./cfg/" + "table.txt"; + // // fileName = "./files/" + "table.txt"; + // + // // fileName = + // // + // "/home/loredana/workspace/DatabasesResourcesManagerAlgorithms/cfg/" + // // + "SampleOnTable.txt"; + // + // // fileName = "./files/" + "SmartSampleOnTable.txt"; + // // File file = new File(fileName); + // + // AnalysisLogger.getLogger().debug( + // "In DatabaseManagement->writing the result in the file: " + // + FileName); + // + // file = new File(FileName); + // + // // FileWriter fw = new FileWriter(file.getAbsoluteFile()); + // // out = new BufferedWriter(fw); + // + // out = new BufferedWriter(new OutputStreamWriter(new FileOutputStream( + // file), "UTF-8")); + // + // // mapResult.put("HEADER", header); + // + // out.write(header); + // out.newLine(); + + // to get columns names and result + + // write headers in the file + + // to recover columns names + + if (header.equals("")) { + ArrayList listKeys = new ArrayList( + ((LinkedHashMap) (result.get(0))).keySet()); + + for (int i = 0; i < listKeys.size(); i++) { + + if (i != listKeys.size() - 1) { + + header = header + listKeys.get(i) + ", "; + + } else { + + header = header + listKeys.get(i); + + } + } + + } + + // // print check + // AnalysisLogger.getLogger().debug( + // "DatabaseManagement->HEADERS: " + header); + + out.write(header); + out.newLine(); + + mapResult.put("HEADERS", header); + + // //print check values + // AnalysisLogger.getLogger().debug( + // "DatabaseManagement->columns names: " + listKeys); + + if (result != null && result.size() != 0) { + + + + // // write operation in the file + for (int i = 0; i < result.size(); i++) { + + String RowString = ""; + + Object element = result.get(i); + + ArrayList listvalues = new ArrayList( + ((LinkedHashMap) element).values()); + +// // print check +// AnalysisLogger.getLogger().debug( +// "DatabaseManagement->values: " + listvalues); + + Object[] row = listvalues.toArray(); + + if (row.length > 1) { + + for (int j = 0; j < row.length; j++) { + + if (row[j] == null) { + row[j] = ""; + } + + // to parse the obtained results in order to align + // number + // values with those of postgres + String original = row[j].toString(); + +// // check value +// AnalysisLogger.getLogger().debug( +// "In DatabaseManagement->original value: " +// + original); + + String parsed = "" + row[j]; + + if (original != "") { + // convert database datatypes to Java datatypes + if (DataTypeColumns == null + || DataTypeColumns.size() == 0) + parsed = convertToJavaType(row[j] + .getClass().getName(), parsed); + else + parsed = convertToJavaType( + DataTypeColumns.get(j), parsed); + } + + // // check value + // AnalysisLogger.getLogger().debug( + // "In DatabaseManagement->row: " + (i + 1) + // + " column: " + (j + 1) + " value= " + // + parsed); + + // write in a file + + if (j != row.length - 1) { + + out.write("\"" + parsed + "\""); + + out.write(","); + // System.out.println("write column : " + j); + // RowString = RowString + parsed + " "; + if (j == 0) { + RowString = parsed; + } else { + + RowString = RowString + "," + parsed; + } + + } + if (j == row.length - 1) { + + out.write("\"" + parsed + "\""); + out.newLine(); + + // to add a row to the map + RowString = RowString + "," + parsed; + // mapSampleTableResult.put(String.valueOf(i), + // RowString); + + // // check value row + // + // AnalysisLogger.getLogger().debug( + // "writing the value: " + RowString + " key: " + // + String.valueOf(i)); + + // mapResult.put(Integer.valueOf(i), RowString); + mapResult.put(String.valueOf(i), RowString); + + } + + } + }else if (result.size()==1){ + +// Object RowElement = (Object) result.get(0); + + if (row[0] == null) { + row[0] = ""; + } + + // to parse the obtained results in order to align + // number + // values with those of postgres + String original = row[0].toString(); + +// // check value +// AnalysisLogger.getLogger().debug( +// "In DatabaseManagement->original value: " +// + original); + + String parsed = "" + row[0]; + + if (original != "") { + // convert database datatypes to Java datatypes + if (DataTypeColumns == null + || DataTypeColumns.size() == 0) + parsed = convertToJavaType(row[0] + .getClass().getName(), parsed); + else + parsed = convertToJavaType( + DataTypeColumns.get(0), parsed); + } + + out.write(row[0].toString()); + out.newLine(); + + // to add a row to the map + mapResult.put(String.valueOf(i), row[0].toString()); + + } + + } + + + + } + + // close the file + out.close(); + + } + + // to retrieve datatype columns of a table + private List getDataTypeColumns(String tableName, String schemaName) + throws Exception { + + AnalysisLogger.getLogger().debug( + "In DatabaseManagement->retrieving data type columns"); + + String query; + List resultDataTypeColumns = null; + + List DataTypeColumns = new ArrayList(); + + // query to get data type columns + if (DBType.equals(POSTGRES)) { + + query = String.format(queryForDataTypeColumnsPostgres, tableName, + schemaName); + + resultDataTypeColumns = connection.executeQuery(query, + sourceDBSession); + + if (resultDataTypeColumns != null) { + + for (int i = 0; i < resultDataTypeColumns.size(); i++) { + + // // check data type column + + // AnalysisLogger.getLogger().debug( + // "In DatabaseManagement->column: " + i + // + " data type: " + // + (String) resultDataTypeColumns.get(i)); + + Object element = resultDataTypeColumns.get(i); + + ArrayList listvalues = new ArrayList( + ((LinkedHashMap) element).values()); + + // //print check + // AnalysisLogger.getLogger().debug( + // "DatabaseManagement->datatype values: " + // + listvalues); + + DataTypeColumns.add(i, (String) listvalues.get(0)); + + } + + } + + } + + if (DBType.equals(MYSQL)) { + + query = String.format(queryForDataTypeColumnsMysql, tableName, + schemaName); + + // System.out.println("query: " + query); + + resultDataTypeColumns = connection.executeQuery(query, + sourceDBSession); + + if (resultDataTypeColumns != null) { + + for (int i = 0; i < resultDataTypeColumns.size(); i++) { + + Object element = resultDataTypeColumns.get(i); + + ArrayList listvalues = new ArrayList( + ((LinkedHashMap) element).values()); + + // //print check + // AnalysisLogger.getLogger().debug("VALUES:" + listvalues); + + // //print check + // AnalysisLogger.getLogger().debug( + // "In DatabaseManagement->column: " + i + // + " data type: " + listvalues); + + // to add the data types columns + // DataTypeColumns.add(i, + // (String) resultDataTypeColumns.get(i)); + + DataTypeColumns.add(i, (String) listvalues.get(0)); + + } + + } + + } + + return DataTypeColumns; + + } + + private List getColumnNamesTable(String tableName, String schemaName) + throws Exception { + + // List ColumnNames = new ArrayList(); + + AnalysisLogger.getLogger().debug( + "DatabaseManagement->retrieving column names"); + + // preparing the query to get columns' names + String queryColumns = null; + + // build the query for database postgres. The parameter "schemaName" is + // the schema name. + if (DBType.equals(POSTGRES)) { + + queryColumns = String.format(queryForColumnsPostgres, tableName, + schemaName); + + } + + // build the query for database mysql. The parameter "schemaName" is the + // database name. + if (DBType.equals(MYSQL)) { + + queryColumns = String.format(queryForColumnsMysql, tableName, + schemaName); + + } + + List columnsSet = null; + // List listColumnNames = null; + + columnsSet = connection.executeQuery(queryColumns, sourceDBSession); + + AnalysisLogger.getLogger().debug( + "DatabaseManagement->query submitted successfully: " + + queryColumns); + + if (columnsSet != null) { + + listColumnNamesTable = new ArrayList(); + + for (Object column : columnsSet) { + + AnalysisLogger.getLogger().debug( + "DatabaseManagement->column name: " + column); + + listColumnNamesTable.add((String) column); + } + } + + return listColumnNamesTable; + + } + + // to retrieve the column names of a table + public List getListColumnNamesTable() { + + return listColumnNamesTable; + } + + // to return the file in which the table result (originated from a submit + // query and sample operations) is stored + public File getFileSampleTableResult() { + + // return fileSample; + return file; + + } + + // to return the map which contains the rows that constitute the table + // result + public HashMap getMapSampleTableResult() { + + // return mapSampleTableResult; + + return mapResult; + + } + + // to return the file in which the query result (originated from a submit + // query) is stored + public File getFileQueryResult() { + + // return fileQueryResult; + return file; + + } + + // to return the map which contains the rows of the query result + public HashMap getMapQueryResult() { + + // return mapQueryResult; + + return mapResult; + } + + // method that allows to translate the query in another language + public String smartCorrectionOnQuery(String OriginalQuery, int dialect) + throws ParseException, ConvertException { + + // //print check + // AnalysisLogger.getLogger().debug( + // "In DatabaseManagement-> smartCorrectionOnQuery, query and dialect: " + // + OriginalQuery + " " + dialect); + + String queryCorrected = ""; + + // to translate the query in another language + SqlDialectConverter obj = new SqlDialectConverter(OriginalQuery); + + queryCorrected = obj.convert(dialect); + + AnalysisLogger.getLogger().debug( + "In DatabaseManagement-> query converted: " + queryCorrected); + + return queryCorrected; + + } + + // write the table result in the file and build the map of results + // private void writeSampleTableIntoFile(List result, + // String tableName, String schemaName) throws Exception { + // + // // file that will contain sample result + // BufferedWriter out; + // String fileName; + // + // // fileName = "./cfg/" + "table.txt"; + // // fileName = "./files/" + "table.txt"; + // + // fileName = + // "/home/loredana/workspace/DatabasesResourcesManagerAlgorithms/cfg/" + // + "SampleOnTable.txt"; + // + // // fileName = "./files/" + "SmartSampleOnTable.txt"; + // // File file = new File(fileName); + // + // fileSample = new File(fileName); + // + // FileWriter fw = new FileWriter(fileSample.getAbsoluteFile()); + // out = new BufferedWriter(fw); + // + // // for the exact parsing of the obtained results with the values of + // // postgres, a check is needed against the data type columns + // + // String query; + // List resultDataTypeColumns = null; + // + // List DataTypeColumns = new ArrayList(); + // + // // query to get data type columns + // if (DBType.equals(POSTGRES)) { + // + // query = String.format(queryForDataTypeColumnsPostgres, tableName, + // schemaName); + // + // resultDataTypeColumns = connection.executeQuery(query, + // sourceDBSession); + // + // if (resultDataTypeColumns != null) { + // + // for (int i = 0; i < resultDataTypeColumns.size(); i++) { + // + // AnalysisLogger.getLogger().debug( + // "In DatabaseManagement->column: " + i + // + " data type: " + // + (String) resultDataTypeColumns.get(i)); + // + // // to add the data types columns + // DataTypeColumns.add(i, + // (String) resultDataTypeColumns.get(i)); + // + // } + // + // } + // + // } + // + // if (DBType.equals(MYSQL)) { + // + // + // + // query = String.format(queryForDataTypeColumnsMysql, tableName, + // schemaName); + // + // System.out.println("query: " + query); + // + // resultDataTypeColumns = connection.executeQuery(query, + // sourceDBSession); + // + // if (resultDataTypeColumns != null) { + // + // for (int i = 0; i < resultDataTypeColumns.size(); i++) { + // + // AnalysisLogger.getLogger().debug( + // "In DatabaseManagement->column: " + i + // + " data type: " + // + (String) resultDataTypeColumns.get(i)); + // + // // to add the data types columns + // DataTypeColumns.add(i, + // (String) resultDataTypeColumns.get(i)); + // + // } + // + // } + // + // } + // + // // write operation in the file + // for (int i = 0; i < result.size(); i++) { + // + // String RowString = ""; + // + // if (result.get(i).getClass().isArray()) { + // + // Object[] row = (Object[]) result.get(i); + // + // // String RowString = ""; + // + // for (int j = 0; j < row.length; j++) { + // + // if (row[j] == null) { + // row[j] = ""; + // } + // + // + // // to parse the obtained results in order to align number + // // values with + // // those of postgres + // String original = row[j].toString(); + // + // AnalysisLogger.getLogger().debug( + // "In DatabaseManagement->original value: " + // + original); + // + // String parsed = "" + row[j]; + // + // if(original!="") { + // + // try { + // + // + // // parse to Long + // parsed = "" + Long.parseLong(original); + // AnalysisLogger.getLogger().debug( + // "In DatabaseManagement->parsed value Long: " + // + parsed); + // + // } + // + // catch (Exception e) { + // try { + // + // // check to fix a problem for the database. + // // Indeed if the string is + // // an hexadecimal some strings as (6F or 6D that are + // // double and float values) are + // // casted to Double and the value returned is 6.0 + // // altering the original value. If the string is an + // // hexadecimal the cast is not performed. + // + // if ((DataTypeColumns.get(j) != null)) { + // + // AnalysisLogger.getLogger().debug( + // "In DatabaseManagement->check data type value: " + // + DataTypeColumns.get(j)); + // + // if ((DataTypeColumns.get(j).contains("decimal")) + // || (DataTypeColumns.get(j) + // .contains("double")) + // || (DataTypeColumns.get(j) + // .contains("real")) + // || (DataTypeColumns.get(j) + // .contains("numeric")) + // || (DataTypeColumns.get(j) + // .contains("float"))) { + // + // parsed = "" + Double.parseDouble(parsed); + // + // AnalysisLogger.getLogger().debug( + // "In DatabaseManagement->parsed value Double: " + // + parsed); + // + // } + // } + // + // // if (DBType.equals(POSTGRES)) { + // // + // // if ((DataTypeColumns.get(j) != null)) { + // // + // // AnalysisLogger.getLogger().debug( + // // "In DatabaseManagement->check data type value: " + // // + DataTypeColumns.get(j)); + // // + // // if ((DataTypeColumns.get(j) + // // .contains("decimal")) + // // || (DataTypeColumns.get(j) + // // .contains("double")) + // // || (DataTypeColumns.get(j) + // // .contains("real")) + // // || (DataTypeColumns.get(j) + // // .contains("numeric"))) { + // // + // // parsed = "" + // // + Double.parseDouble(parsed); + // // + // // AnalysisLogger.getLogger().debug( + // // "In DatabaseManagement->parsed value Double: " + // // + parsed); + // // + // // } + // // } + // // } + // + // // if (DBType.equals(MYSQL)) { + // // + // // if ((DataTypeColumns.get(j) != null)) { + // // + // // AnalysisLogger.getLogger().debug( + // // "In DatabaseManagement->check data type value: " + // // + DataTypeColumns.get(j)); + // // + // // if ((DataTypeColumns.get(j) + // // .contains("decimal")) + // // || (DataTypeColumns.get(j) + // // .contains("double")) + // // || (DataTypeColumns.get(j) + // // .contains("real")) + // // || (DataTypeColumns.get(j) + // // .contains("numeric")) + // // || (DataTypeColumns.get(j) + // // .contains("float"))) { + // // + // // parsed = "" + // // + Double.parseDouble(parsed); + // // + // // AnalysisLogger.getLogger().debug( + // // "In DatabaseManagement->parsed value Double: " + // // + parsed); + // // + // // } + // // } + // // } + // + // // else { + // // parsed = "" + Double.parseDouble(parsed); + // // + // // AnalysisLogger.getLogger().debug( + // // "In DatabaseManagement->parsed value Double: " + // // + parsed); + // // + // // } + // + // } catch (Exception e1) { + // throw e1; + // } + // } + // + // } + // + // AnalysisLogger.getLogger().debug( + // "In DatabaseManagement->row: " + (i + 1) + // + " column: " + (j + 1) + " value= " + // + parsed); + // + // // write in a file + // + // if (j != row.length - 1) { + // + // out.write(parsed); + // + // out.write(","); + // // System.out.println("write column : " + j); + // RowString = RowString + parsed + " "; + // + // } + // if (j == row.length - 1) { + // + // out.write(parsed); + // out.newLine(); + // + // // to add a row to the map + // mapSampleTableResult.put(String.valueOf(i), RowString); + // + // } + // + // } + // + // } + // + // else { + // + // Object RowElement = (Object) result.get(i); + // + // AnalysisLogger.getLogger().debug( + // "In DatabaseManagement->row: " + (i + 1) + " value= " + // + RowElement.toString()); + // + // // write in a file + // + // out.write(RowElement.toString()); + // out.newLine(); + // + // //to add a row to the map + // mapSampleTableResult.put(String.valueOf(i), RowElement.toString()); + // + // } + // } + // // close the file + // out.close(); + // + // } + + // write the query result in the file and build the map of results + // private void writeQueryResultIntoFile(List result) throws + // Exception { + // + // // file that will contain query result + // BufferedWriter out; + // String fileName; + // + // fileName = + // "/home/loredana/workspace/DatabasesResourcesManagerAlgorithms/cfg/" + // + "QueryResult.txt"; + // + // fileQueryResult = new File(fileName); + // + // FileWriter fw = new FileWriter(fileQueryResult.getAbsoluteFile()); + // out = new BufferedWriter(fw); + // + // for (int i = 0; i < result.size(); i++) { + // + // if (result.get(i).getClass().isArray()) { + // + // Object[] row = (Object[]) result.get(i); + // + // String RowString = ""; + // + // for (int j = 0; j < row.length; j++) { + // + // AnalysisLogger.getLogger().debug( + // "In DatabaseManagement->row: " + (i + 1) + " column: " + // + (j + 1) + " value= " + row[j].toString()); + // + // + // // write in a file + // + // if (j != row.length - 1) { + // + // out.write(row[j].toString()); + // out.write(","); + // RowString = RowString + row[j].toString() + " "; + // + // System.out.println("storing: " + RowString); + // + // // AnalysisLogger.getLogger().debug( + // // "In SubmitQuery->row " + (j+1) + ": " + + // // row[j].toString()); + // + // } + // if (j == row.length - 1) { + // + // out.write(row[j].toString()); + // out.newLine(); + // + // RowString = RowString + row[j].toString(); + // + // System.out.println("storing: " + RowString); + // + // //to add a row to the map + // mapQueryResult.put(String.valueOf(i), RowString); + // + // } + // + // } + // + // } + // + // else { + // // String RowElement=""; + // + // Object RowElement = (Object) result.get(i); + // + // // AnalysisLogger.getLogger().debug( + // // "In SubmitQuery->Query's Result: "); + // + // AnalysisLogger.getLogger().debug( + // "In DatabaseManagement->row: " + (i + 1) + " value= " + // + RowElement.toString()); + // + // // write in a file + // + // out.write(RowElement.toString()); + // out.newLine(); + // + // //to add a row to the map + // mapQueryResult.put(String.valueOf(i), RowElement.toString()); + // + // } + // + // } + // + // //close file + // out.close(); + // + // } + + // to check if the string is an hexadecimal + // private boolean isHexadecimal(String text) { + // + // boolean isHexadecimal = false; + // + // text = text.trim(); + // + // char[] hexDigits = { 'a', 'b', 'c', 'd', 'e', 'f', 'A', 'B', 'C', 'D', + // 'E', 'F' }; + // + // check: for (char symbol : text.toCharArray()) { + // + // for (char hexDigit : hexDigits) { + // + // if (symbol == hexDigit) { + // + // isHexadecimal = true; + // + // break check; + // + // } + // + // } + // + // } + // + // return isHexadecimal; + // + // } + + // private void writeTableIntoFile(List result) throws IOException { + // + // // file that will contain query's result + // BufferedWriter out; + // String fileName; + // + // // fileName = "./cfg/" + "table.txt"; + // fileName = "./files/" + "table.txt"; + // + // // fileName = "./files/" + "SmartSampleOnTable.txt"; + // File file = new File(fileName); + // + // FileWriter fw = new FileWriter(file.getAbsoluteFile()); + // out = new BufferedWriter(fw); + // + // for (int i = 0; i < result.size(); i++) { + // + // // System.out.println(result.get(i).getClass().isArray()); + // + // if (result.get(i).getClass().isArray()) { + // + // Object[] row = (Object[]) result.get(i); + // + // String RowString = ""; + // + // for (int j = 0; j < row.length; j++) { + // + // if (row[j] == null) { + // row[j] = ""; + // } + // + // // to parse the obtained results to align number values with + // // those of postgres + // String original = row[j].toString(); + // String parsed = "" + row[j]; + // + // try { + // parsed = "" + Long.parseLong(parsed); + // } catch (Exception e) { + // try { + // parsed = "" + Double.parseDouble(parsed); + // } catch (Exception e1) { + // } + // } + // + // // AnalysisLogger.getLogger().debug( + // // "In SubmitQuery->row: " + (i + 1) + " column: " + // // + (j + 1) + " value= " + // // + row[j].toString()); + // + // AnalysisLogger.getLogger().debug( + // "In DatabaseManagement->row: " + (i + 1) + " column: " + // + (j + 1) + " value= " + parsed); + // + // // System.out.print("\"" + row[j] + "\"; "); + // + // // write in a file + // + // if (j != row.length - 1) { + // + // System.out + // .println("scrittura su file: elementi intermedi"); + // // out.write("1"); + // + // // out.write(row[j].toString()); + // out.write(parsed); + // + // out.write(","); + // RowString = RowString + row[j].toString() + " "; + // + // // AnalysisLogger.getLogger().debug( + // // "In SubmitQuery->row " + (j+1) + ": " + + // // row[j].toString()); + // + // } + // if (j == row.length - 1) { + // + // System.out + // .println("scrittura su file: elemento finale"); + // + // // out.write(row[j].toString()); + // out.write(parsed); + // out.newLine(); + // + // } + // + // } + // + // } + // + // else { + // // String RowElement=""; + // + // Object RowElement = (Object) result.get(i); + // + // // AnalysisLogger.getLogger().debug( + // // "In SubmitQuery->Query's Result: "); + // + // AnalysisLogger.getLogger().debug( + // "In DatabaseManagement->row: " + (i + 1) + " value= " + // + RowElement.toString()); + // + // // write in a file + // + // out.write(RowElement.toString()); + // out.newLine(); + // + // } + // } + // + // out.close(); + // + // } + +} diff --git a/src/main/java/org/gcube/dataanalysis/databases/utils/DatabaseOperations.java b/src/main/java/org/gcube/dataanalysis/databases/utils/DatabaseOperations.java new file mode 100644 index 0000000..a2a9166 --- /dev/null +++ b/src/main/java/org/gcube/dataanalysis/databases/utils/DatabaseOperations.java @@ -0,0 +1,247 @@ +package org.gcube.dataanalysis.databases.utils; + +import java.io.File; +import java.io.FileInputStream; +import java.math.BigInteger; +import java.util.Iterator; +import java.util.List; +import java.util.ArrayList; +import java.util.LinkedHashMap; + +import org.dom4j.Document; +import org.dom4j.Node; +import org.dom4j.io.SAXReader; +import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger; +import org.hibernate.SessionFactory; + +/** Class that allows to performs some operations on a database */ +public class DatabaseOperations { + + private String DBType = ""; // database's type + + // private static final String QueryPostgres = + // "select count(*) from \"%1$s\" limit 1"; + private static final String QueryPostgres = "select count(*) from (select * from \"%1$s\" limit 1) as a"; + private static final String QueryMysql = "select count(*) from (select * from `%1$s` limit 1) as a"; + // private static final String QueryMysql = + // "select count(*) from `%1$s` limit 1"; + + // private static final String Query = "select * from %1$s limit 1"; + // private static final String countQuery = "select count(*) from %1$s"; + // private static final String explainQuery = "explain select * from %1$s"; + private static final String explainQueryPostgres = "explain select * from \"%1$s\""; + private static final String explainQueryMysql = "explain select * from `%1$s`"; + + private static final String MYSQL = "MySQL"; + private static final String POSTGRES = "Postgres"; + + // Method that recover the schema's name of the database. + public String getDBSchema(String configurationFile) throws Exception { + + File fl = new File(configurationFile); + FileInputStream stream = new FileInputStream(fl); + + SAXReader saxReader = new SAXReader(); + Document document = saxReader.read(stream); + + List nodes = document + .selectNodes("//hibernate-configuration/session-factory/property"); + + Iterator nodesIterator = nodes.iterator(); + + String dbschema = ""; + while (nodesIterator.hasNext()) { + + Node currentnode = nodesIterator.next(); + String element = currentnode.valueOf("@name"); + if (element.equals("connection.url")) { + String url = currentnode.getText(); + dbschema = url.substring(url.lastIndexOf("/") + 1); + if (dbschema.indexOf('?') > 0) + dbschema = dbschema.substring(0, dbschema.indexOf('?')); + AnalysisLogger.getLogger().debug( + "DatabaseOperations->recovering the database's name: " + + dbschema); + + // DBType="MySQL"; + + // break; + } + + if (element.equals("connection.schemaname")) { + String url = currentnode.getText(); + dbschema = url.substring(url.lastIndexOf("/") + 1); + if (dbschema.indexOf('?') > 0) + dbschema = dbschema.substring(0, dbschema.indexOf('?')); + AnalysisLogger.getLogger().debug( + "DatabaseOperations->recovering the schema's name: " + + dbschema); + DBType = POSTGRES; + // break; + + } + + if (DBType.equals("")) { + + DBType = MYSQL; + + } + + } + + // close stream + stream.close(); + + return dbschema; + } + + // Method that returns the database's type + public String getDBType() { + + return DBType; + + } + + // Method that calculate the estimated number of rows + public long calculateElements(ConnectionManager connection, + String dbType, String tablename, SessionFactory session) + throws Exception { + + long count = 0; + + String countingQuery = null; + + if (dbType.equals(POSTGRES)) { + + countingQuery = String.format(QueryPostgres, tablename); + + } + if (dbType.equals(MYSQL)) { + + countingQuery = String.format(QueryMysql, tablename); + + } + + AnalysisLogger.getLogger().debug( + "DatabaseOperations->calculating rows' number with the query: " + + countingQuery); + + List result; + + // try { + // result = DatabaseFactory.executeSQLQuery(countingQuery, session); + + result = connection.executeQuery(countingQuery, session); + + // if ((result != null) && (result.size() > 0)) { + if (result != null) { + + Object element = result.get(0); + + ArrayList listvalues = new ArrayList(((LinkedHashMap) element).values()); + + + + // System.out.println("Dimension: " + result.size()); + +// Integer numElem = Integer.valueOf(result.get(0).toString()); + +// Long numElemvalue = Long.valueOf(result.get(0).toString()); + + Long numElemvalue = Long.valueOf(listvalues.get(0).toString()); + + long numElem = numElemvalue.longValue(); + + // if (numElem.intValue() == 0){ throw new + // Exception("The table has not rows");} + + if (numElem > 0) { + + AnalysisLogger + .getLogger() + .debug("DatabaseOperations->the database has at least a row.Calculating rows' number through an estimation"); + + String explain = null; + + if (dbType.equals(POSTGRES)) { + + explain = String.format(explainQueryPostgres, tablename); + + } + if (dbType.equals(MYSQL)) { + + explain = String.format(explainQueryMysql, tablename); + + } + + // call query with explain function + + AnalysisLogger.getLogger().debug( + "DatabaseOperations->calculating rows' number with the query: " + + explain); + + List resultinfo; + + // resultinfo = DatabaseFactory.executeSQLQuery(explain, + // session); + + resultinfo = connection.executeQuery(explain, session); + + // recovery result + + if (dbType.equals(MYSQL)) { + +// Object[] resultArray = (Object[]) (resultinfo.get(0)); + + Object elem = resultinfo.get(0); + + ArrayList values = new ArrayList( + ((LinkedHashMap) elem).values()); + +// //print check +// AnalysisLogger.getLogger().debug( +// "DatabaseOperations->VALUE: " + values); + + BigInteger value = (BigInteger) values.get(8); + +// BigInteger value = (BigInteger) resultArray[8]; + + count = value.longValue(); + + + + } + + if (dbType.equals(POSTGRES)) { + + String var = resultinfo.get(0).toString(); + + int beginindex = var.indexOf("rows"); + + int lastindex = var.indexOf("width"); + + var = var.substring(beginindex + 5, lastindex - 1); + + Long value = Long.valueOf(var); + + count = value.longValue(); + + } + + } + + } + + // } catch (Exception e) { + // TODO Auto-generated catch block + // e.printStackTrace(); + // } + + AnalysisLogger.getLogger().debug( + "DatabaseOperations->rows' number calculated: " + count); + + return count; + + } + +} diff --git a/src/main/java/org/gcube/dataanalysis/databases/utilsold/ConnectionManager.java b/src/main/java/org/gcube/dataanalysis/databases/utilsold/ConnectionManager.java new file mode 100644 index 0000000..e402dab --- /dev/null +++ b/src/main/java/org/gcube/dataanalysis/databases/utilsold/ConnectionManager.java @@ -0,0 +1,143 @@ +//package org.gcube.dataanalysis.databases.utilsold; +// +////import java.awt.List; +// +//import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration; +//import org.gcube.dataanalysis.ecoengine.utils.DatabaseUtils; +//import org.hibernate.Query; +//import org.hibernate.Session; +//import org.hibernate.SessionFactory; +// +//import java.io.IOException; +//import java.net.MalformedURLException; +//import java.util.List; +// +///** +// * Class that allows to manage a database selected from a user. It performs to +// * set the database configuration, to connect to the database and finally to +// * execute a query. +// */ +//public class ConnectionManager { +// +// private org.gcube.dataanalysis.ecoengine.utils.DatabaseFactory df; +// +// +// public ConnectionManager() { +// +//// org.gcube.dataanalysis.ecoengine.utils.DatabaseFactory df = new org.gcube.dataanalysis.ecoengine.utils.DatabaseFactory(); +// df = new org.gcube.dataanalysis.ecoengine.utils.DatabaseFactory(); +// +// +// } +// +// /** Method that allows to set the configuration */ +// public AlgorithmConfiguration setconfiguration(String ConfigPath, +// String DatabaseUserName, String DatabasePassword, +// String DatabaseDriver, String DatabaseDialect, String DatabaseURL, +// String DatabaseName) throws IOException { +// +// AlgorithmConfiguration config = new AlgorithmConfiguration(); +// +// if (DatabaseName.equals("")) { +// +// throw new MalformedURLException( +// "Invalid Url: the database's name is not present"); +// // return null; +// } +// +// if (!ConfigPath.equals("")) +// config.setConfigPath(ConfigPath); +// +// if (!DatabaseUserName.equals("")) { +// config.setParam("DatabaseUserName", DatabaseUserName); +// } +// +// if (!DatabasePassword.equals("")) +// config.setParam("DatabasePassword", DatabasePassword); +// +// if (!DatabaseDriver.equals("")) +// config.setParam("DatabaseDriver", DatabaseDriver); +// +// if (!DatabaseDialect.equals("")) +// config.setParam("DatabaseDialect", DatabaseDialect); +// +// if (!DatabaseURL.equals("")) +// config.setParam("DatabaseURL", DatabaseURL); +// +// return config; +// +// } +// +// /** Method that creates the connection */ +// public SessionFactory createConnection(AlgorithmConfiguration config) { +// +// SessionFactory dbconnection = DatabaseUtils.initDBSession(config); +// +// return dbconnection; +// +// } +// +// +// +// // public List executeQuery(String query, SessionFactory +// // DBSessionFactory){ +// // +// // List obj = null; +// // +// // +// // +// // return obj; +// // } +// // +// +// /** Method that execute a query */ +// public List executeQuery(String query, +// SessionFactory DBSessionFactory) throws Exception { +// +// List obj = null; +// Session ss = null; +// +// try { +// ss = DBSessionFactory.getCurrentSession(); +// +// ss.beginTransaction(); +// +// Query qr = null; +// +// // Wrapper for a query. It allows the query to operate in a proper +// // way +// query = "select * from (" + query + ") as query"; +// +// qr = ss.createSQLQuery(query); +// +// List result = qr.list(); +// +// ss.getTransaction().commit(); +// +// /* +// * if (result == null) System.out.println( +// * "Hibernate doesn't return a valid object when org.gcube.contentmanagement.lexicalmatcher retrieve UserState Object" +// * ); +// * +// * if (result != null && result.size() == 0) +// * System.out.println(String.format("found nothing in database")); +// */ +// if (result != null && result.size() != 0) { +// obj = result; +// } +// +// } catch (Exception e) { +// +// // System.out.println(String.format("Error while executing query: %1$s %2$s", +// // query, e.getMessage())); +// // e.printStackTrace(); +// // System.out.println(String.format("Error while executing query: %1$s %2$s", +// // query, e.getMessage())); +// throw e; +// } +// +// return obj; +// +// } +// +//} diff --git a/src/main/java/org/gcube/dataanalysis/databases/utilsold/DatabaseFactory.java b/src/main/java/org/gcube/dataanalysis/databases/utilsold/DatabaseFactory.java new file mode 100644 index 0000000..279c51a --- /dev/null +++ b/src/main/java/org/gcube/dataanalysis/databases/utilsold/DatabaseFactory.java @@ -0,0 +1,160 @@ +//package org.gcube.dataanalysis.databases.utilsold; +// +//import java.io.ByteArrayInputStream; +//import java.util.List; +// +//import javax.xml.parsers.DocumentBuilderFactory; +// +//import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger; +//import org.gcube.contentmanagement.lexicalmatcher.utils.FileTools; +//import org.hibernate.Query; +//import org.hibernate.Session; +//import org.hibernate.SessionFactory; +//import org.hibernate.cfg.Configuration; +// +// +///** Class that allows to connect to a database and to execute a query */ +//public class DatabaseFactory { +// +// +// //Method that establish a connection with the database +// public static SessionFactory initDBConnection(String configurationFile) throws Exception { +// String xml = FileTools.readXMLDoc(configurationFile); +// SessionFactory DBSessionFactory = null; +// Configuration cfg = new Configuration(); +// cfg = cfg.configure(DocumentBuilderFactory.newInstance().newDocumentBuilder().parse(new ByteArrayInputStream(xml.getBytes()))); +// DBSessionFactory = cfg.buildSessionFactory(); +// return DBSessionFactory; +// } +// +// +// +// //Method that execute the query +// public static List executeSQLQuery(String query, SessionFactory DBSessionFactory) throws Exception { +//// System.out.println("QUERY: "+query); +// try { +// return executeHQLQuery(query, DBSessionFactory, true); +// +// } catch (Exception e) { +// // TODO: handle exception +// throw e; +// } +// +// } +// +// public static List executeHQLQuery(String query, SessionFactory DBSessionFactory, boolean useSQL) throws Exception{ +// Session ss = null; +// List obj = null; +// +// +// try { +// +// ss = DBSessionFactory.getCurrentSession(); +// +// ss.beginTransaction(); +// +// Query qr = null; +// +// if (useSQL) +// qr = ss.createSQLQuery(query); +// else +// qr = ss.createQuery(query); +// +// List result = null; +// +// AnalysisLogger.getLogger().debug("DatabaseFactory->"+qr.getQueryString()); +// try { +// result = qr.list(); +// ss.getTransaction().commit(); +// +// if (result == null) +// System.out.println("Hibernate doesn't return a valid object when org.gcube.contentmanagement.lexicalmatcher retrieve UserState Object"); +// +//// if (result != null && result.size() == 0) +//// System.out.println(String.format("found nothing in database for query: "+query)); +// +// if (result != null && result.size() != 0) { +// obj = result; +// } +// +// rollback(ss); +// +// return obj; +// +// } catch (Exception e) { +// // TODO: handle exception +// throw e; +// } +// +// +// +// +// +// +// +// +// } catch (Exception e) { +// // TODO: handle exception +// +// throw e; +// } +// +// +// +// +// +// +// } +// +// +// public static void rollback(Session ss) { +// +// try { +// if (ss != null && ss.getTransaction() != null) +// ss.getTransaction().rollback(); +// } catch (Exception ex) { +// +// } finally { +// try { +// ss.close(); +// } catch (Exception ee) { +// } +// } +// } +// +// +// +//// public static void executeSQLUpdate(String query, SessionFactory DBSessionFactory) throws Exception { +//// executeHQLUpdate(query, DBSessionFactory, true); +//// } +// +// +//// public static void executeHQLUpdate(String query, SessionFactory DBSessionFactory, boolean useSQL) throws Exception{ +////// System.out.println("executing query: " + query); +//// Session ss = null; +//// +//// try { +//// +//// ss = DBSessionFactory.getCurrentSession(); +////// System.out.println("executing query"); +//// ss.beginTransaction(); +//// Query qr = null; +//// +//// if (useSQL) +//// qr = ss.createSQLQuery(query); +//// else +//// qr = ss.createQuery(query); +//// +//// qr.executeUpdate(); +//// ss.getTransaction().commit(); +//// +//// } catch (Exception e) { +//// AnalysisLogger.getLogger().debug(query); +//// rollback(ss); +////// e.printStackTrace(); +//// throw e; +//// } +//// } +// +// +//} diff --git a/src/main/java/org/gcube/dataanalysis/databases/utilsold/DatabaseManagement.java b/src/main/java/org/gcube/dataanalysis/databases/utilsold/DatabaseManagement.java new file mode 100644 index 0000000..e39e276 --- /dev/null +++ b/src/main/java/org/gcube/dataanalysis/databases/utilsold/DatabaseManagement.java @@ -0,0 +1,207 @@ +//package org.gcube.dataanalysis.databases.utilsold; +// +//import java.math.BigInteger; +//import java.util.ArrayList; +//import java.util.List; +// +//import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger; +////import org.gcube.databasemanagement.DBAdapter; +//import org.gcube.dataanalysis.databases.structure.MySQLTableStructure; +//import org.gcube.dataanalysis.databases.structure.AbstractTableStructure; +//import org.gcube.dataanalysis.databases.structure.PostgresTableStructure; +////import org.gcube.contentmanagement.databases.structure.MySQLTableStructure; +//import org.hibernate.SessionFactory; +// +// +///** Class that allows manage a database offering several functionalities */ +//public class DatabaseManagement { +// +// // AbstractTableStructure crossTableStructure; +// private List tablesname = new ArrayList(); +// private String configPath; +// private String sourceSchemaName; +// private SessionFactory sourceDBSession; +// private String DBType; +// private AbstractTableStructure crossTableStructure; +// // private DBAdapter typesMap; +// private DatabaseOperations op = new DatabaseOperations(); +//// private String destinationDBType; +//// private String sourceDBType; +// MySQLTableStructure mysqlobj; +// +// private static final String MYSQL = "MySQL"; +// private static final String POSTGRES = "Postgres"; +// private static final String selectTablesQuery = "SELECT distinct table_name FROM information_schema.COLUMNS where table_schema='%1$s';"; +// private static final String listSchemaNameQuery="select schema_name from information_schema.schemata where schema_name <> 'information_schema' and schema_name !~ E'^pg_'"; +// +// public DatabaseManagement(String cfgDir, String SourceFile) +// throws Exception { +// +// configPath = cfgDir; +// if (!configPath.endsWith("/")) +// configPath += "/"; +// +// sourceSchemaName = op.getDBSchema(configPath + SourceFile); +// +// sourceDBSession = DatabaseFactory.initDBConnection(configPath +// + SourceFile); +// +// +//// destinationDBType = POSTGRES; +//// sourceDBType = MYSQL; +//// +//// // typesMap = new DBAdapter(configPath + "/" + sourceDBType + "2" +//// // + destinationDBType + ".properties"); +// +// } +// +// // Get the table's names +// public List getTables() throws Exception { +// +// String query = String.format(selectTablesQuery, sourceSchemaName); +// +// List resultSet = DatabaseFactory.executeSQLQuery(query, +// sourceDBSession); +// +// for (Object result : resultSet) { +// tablesname.add((String) result); +// } +// +// // Get the Database's type +// DBType = op.getDBType(); +// +// return tablesname; +// +// } +// +// +// //Get the schema's name for the database Postgres +// public List getSchemas() throws Exception{ +// +// // Get the Database's type +// DBType = op.getDBType(); +// +// List list= new ArrayList(); +// +// +// +// if (DBType.equals(POSTGRES)) { +// +// +// List resultSet = DatabaseFactory.executeSQLQuery(listSchemaNameQuery, +// sourceDBSession); +// +// for (Object result : resultSet) { +// list.add((String) result); +// } +// } +// +// if (DBType.equals(MYSQL)){ +// +// list=null; +// +// +// } +// +// +// +// return list; +// +// } +// +// // Get the "Create Table" statement +// public String getCreateTable(String tablename) throws Exception { +// +// String createstatement = ""; +// +// if (DBType.equals(POSTGRES)) { +// +// // for (String table : tablesname) { +// +// crossTableStructure = getSourceTableObject(tablename); +// +// String tableBuildQuery = crossTableStructure.buildUpCreateTable(); +// +// AnalysisLogger.getLogger().debug( +// "DatabaseManagement->'Create Table' statement: " +// + tableBuildQuery); +// +// // } +// +// } +// +// if (DBType.equals(MYSQL)) { +// +// // for (String table : tablesname) { +// +// crossTableStructure = getSourceTableObject(tablename); +// +// try { +// +// String createtablestatement = mysqlobj +// .showCreateTable(sourceDBSession); +// +// AnalysisLogger.getLogger().debug( +// "DatabaseManagement->'Create Table' statement: " +// + createtablestatement); +// +// } catch (Exception e) { +// // TODO: handle exception +// +// AnalysisLogger.getLogger().debug( +// "DatabaseManagement->Exception: " + e.getMessage()); +// } +// +// // } +// +// } +// +// return createstatement; +// +// } +// +// // Method that create the database object +// private AbstractTableStructure getSourceTableObject(String tablename) +// throws Exception { +// +// if (DBType.equals(MYSQL)) { +// +// mysqlobj = new MySQLTableStructure(sourceSchemaName, tablename, +// sourceDBSession); +// +// // mysqlobj = new MySQLTableStructure(sourceSchemaName, tablename, +// // typesMap, sourceDBSession); +// +// // return new MySQLTableStructure(sourceSchemaName, tablename, +// // typesMap, sourceDBSession); +// return mysqlobj; +// +// } +// +// else if (DBType.equals(POSTGRES)) { +// +// PostgresTableStructure postobj = new PostgresTableStructure( +// sourceSchemaName, tablename, sourceDBSession); +// +// // PostgresTableStructure postobj = new PostgresTableStructure( +// // sourceSchemaName, tablename, typesMap, sourceDBSession); +// +// return postobj; +// +// } else { +// return null; +// } +// +// } +// +// // Method that returns the estimated number of rows +// public BigInteger getNumberOfRows(String tablename) throws Exception { +// +// BigInteger rows; +// +// rows = op.calculateElements(tablename, sourceDBSession); +// return rows; +// +// } +// +//} diff --git a/src/main/java/org/gcube/dataanalysis/databases/utilsold/DatabaseOperations.java b/src/main/java/org/gcube/dataanalysis/databases/utilsold/DatabaseOperations.java new file mode 100644 index 0000000..67ce6c3 --- /dev/null +++ b/src/main/java/org/gcube/dataanalysis/databases/utilsold/DatabaseOperations.java @@ -0,0 +1,156 @@ +//package org.gcube.dataanalysis.databases.utilsold; +// +//import java.io.File; +//import java.io.FileInputStream; +//import java.math.BigInteger; +//import java.util.Iterator; +//import java.util.List; +// +//import org.dom4j.Document; +//import org.dom4j.Node; +//import org.dom4j.io.SAXReader; +//import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger; +//import org.hibernate.SessionFactory; +// +///** Class that allows to performs some operations on a database */ +//public class DatabaseOperations { +// +// private String DBType = ""; //database's type +// +// private static final String Query = "select * from %1$s limit 1"; +// // private static final String countQuery = "select count(*) from %1$s"; +// private static final String explainQuery = "explain select * from %1$s"; +// +// private static final String MYSQL = "MySQL"; +// private static final String POSTGRES = "Postgres"; +// +// +// //Method that recover the schema's name of the database. +// public String getDBSchema(String configurationFile) throws Exception { +// +// File fl = new File(configurationFile); +// FileInputStream stream = new FileInputStream(fl); +// +// SAXReader saxReader = new SAXReader(); +// Document document = saxReader.read(stream); +// +// List nodes = document +// .selectNodes("//hibernate-configuration/session-factory/property"); +// +// Iterator nodesIterator = nodes.iterator(); +// +// String dbschema = ""; +// while (nodesIterator.hasNext()) { +// +// Node currentnode = nodesIterator.next(); +// String element = currentnode.valueOf("@name"); +// if (element.equals("connection.url")) { +// String url = currentnode.getText(); +// dbschema = url.substring(url.lastIndexOf("/") + 1); +// if (dbschema.indexOf('?') > 0) +// dbschema = dbschema.substring(0, dbschema.indexOf('?')); +// AnalysisLogger.getLogger().debug( +// "DatabaseOperations-> recovering the database's name: " + dbschema); +// +// +// // DBType="MySQL"; +// +// // break; +// } +// +// if (element.equals("connection.schemaname")) { +// String url = currentnode.getText(); +// dbschema = url.substring(url.lastIndexOf("/") + 1); +// if (dbschema.indexOf('?') > 0) +// dbschema = dbschema.substring(0, dbschema.indexOf('?')); +// AnalysisLogger.getLogger().debug( +// "DatabaseOperations-> recovering the schema's name: " + dbschema); +// DBType = POSTGRES; +// // break; +// +// } +// +// if (DBType.equals("")) { +// +// DBType = MYSQL; +// +// } +// +// } +// +// // close stream +// stream.close(); +// +// return dbschema; +// } +// +// //Method that returns the database's type +// public String getDBType() { +// +// return DBType; +// +// } +// +// //Method that calculate the estimated number of rows +// public BigInteger calculateElements(String tablename, SessionFactory session) throws Exception{ +// +// BigInteger count = BigInteger.ZERO; +// +// String countingQuery = String.format(Query, tablename); +// +// AnalysisLogger.getLogger().debug( +// "DatabaseOperations-> calculating rows' number with the query: " + countingQuery); +// +// List result; +// +//// try { +// result = DatabaseFactory.executeSQLQuery(countingQuery, session); +// +// if ((result != null) && (result.size() > 0)) { +// +// // call query with explain function +// +// String explain = String.format(explainQuery, tablename); +// AnalysisLogger.getLogger().debug( +// "DatabaseOperations-> calculating rows' number with the query: " + explain); +// +// List resultinfo; +// +// resultinfo = DatabaseFactory.executeSQLQuery(explain, session); +// +// // recovery result +// +// if (DBType.equals(MYSQL)) { +// +// Object[] resultArray = (Object[]) (resultinfo.get(0)); +// +// count = (BigInteger) resultArray[8]; +// +// } +// +// if (DBType.equals(POSTGRES)) { +// +// String var = resultinfo.get(0).toString(); +// +// int beginindex = var.indexOf("rows"); +// +// int lastindex = var.indexOf("width"); +// +// var = var.substring(beginindex + 5, lastindex - 1); +// +// count = new BigInteger(var); +// +// } +// +// } +// +//// } catch (Exception e) { +//// TODO Auto-generated catch block +//// e.printStackTrace(); +//// } +// +// return count; +// +// } +// +//} diff --git a/src/main/resources/symm.key b/src/main/resources/symm.key new file mode 100644 index 0000000..b113201 --- /dev/null +++ b/src/main/resources/symm.key @@ -0,0 +1 @@ +< ¬@Qaj¤F€g¸ðQ \ No newline at end of file diff --git a/src/test/java/org/gcube/dataanalysis/test/Prova.java b/src/test/java/org/gcube/dataanalysis/test/Prova.java new file mode 100644 index 0000000..7f50408 --- /dev/null +++ b/src/test/java/org/gcube/dataanalysis/test/Prova.java @@ -0,0 +1,36 @@ +package org.gcube.dataanalysis.test; + +import org.gcube.common.encryption.StringEncrypter; +import org.gcube.common.scope.api.ScopeProvider; +import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger; +import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration; +import org.gcube.dataanalysis.ecoengine.interfaces.StandardLocalExternalAlgorithm; + +public class Prova { + + /** + * @param args + */ + public static void main(String[] args) { + // TODO Auto-generated method stub +// AnalysisLogger.setLogger("./cfg/"+AlgorithmConfiguration.defaultLoggerFile); + +// AnalysisLogger.getLogger().debug("ciao"); + + + ScopeProvider.instance.set("/gcube/devsec"); + + try { + String password = StringEncrypter.getEncrypter().decrypt("UwNMZOK7FlIjGPR+NZCV6w=="); + + System.out.println(password); + } catch (Exception e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + StandardLocalExternalAlgorithm sle = null; +// System.out.println("CIAO"); + + } + +} diff --git a/src/test/java/org/gcube/dataanalysis/test/RegressionRandomSampleOnTable.java b/src/test/java/org/gcube/dataanalysis/test/RegressionRandomSampleOnTable.java new file mode 100644 index 0000000..fe74ec3 --- /dev/null +++ b/src/test/java/org/gcube/dataanalysis/test/RegressionRandomSampleOnTable.java @@ -0,0 +1,163 @@ +//package org.gcube.dataanalysis.test; +// +//import java.io.IOException; +// +//import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger; +//import org.gcube.dataanalysis.databases.utils.DatabaseManagement; +// +//public class RegressionRandomSampleOnTable { +// +// /** +// * @param args +// */ +// public static void main(String[] args) { +// // TODO Auto-generated method stub +// AnalysisLogger.getLogger().debug("Executing: " + "Postgres"); +// testPostgres(); +// +// AnalysisLogger.getLogger().debug("Executing: " + "Mysql1"); +//// testMysql1(); +// +// AnalysisLogger.getLogger().debug("Executing: " + "Mysql2"); +//// testMysql2(); +// +// AnalysisLogger.getLogger().debug("Executing: " + "Mysql3"); +//// testMysql3(); +// } +// +// // Postgres database +// private static void testPostgres() { +// +// // connection to database +// DatabaseManagement mgt = new DatabaseManagement(""); +// +// try { +// mgt.createConnection( +// "postgres", +// "d4science2", +// "org.postgresql.Driver", +// "org.hibernate.dialect.PostgreSQLDialect", +// "jdbc:postgresql://geoserver-dev.d4science-ii.research-infrastructures.eu:5432/aquamapsdb", +// "aquamapsdb"); +// } catch (IOException e) { +// // TODO Auto-generated catch block +// e.printStackTrace(); +// } +// +// // SmartSampleOnTable operation +// +// try { +// // for database postgres, if a table is not in lower case format, it +// // is necessary to include the table name in quotes "" +// mgt.randomSampleOnTable("Divisions", "public"); +// } catch (Exception e) { +// // TODO Auto-generated catch block +// e.printStackTrace(); +// +// AnalysisLogger.getLogger().debug( +// "In RegressionRandomSampleOnTable->EXCEPTION: " + e); +// } +// +// } +// +// // Mysql database +// private static void testMysql1() { +// +// // connection to database +// DatabaseManagement mgt = new DatabaseManagement(""); +// +// try { +// mgt.createConnection("root", "test", "com.mysql.jdbc.Driver", +// "org.hibernate.dialect.MySQLDialect", +// // "jdbc:mysql://146.48.87.169:3306/col2oct2010", +// "jdbc:mysql://146.48.87.169:3306/aquamaps", "hcaf_d"); +// } catch (IOException e) { +// // TODO Auto-generated catch block +// e.printStackTrace(); +// } +// +// // SmartSampleOnTable operation +// +// try { +// // for database postgres, if a table is not in lower case format, it +// // is necessary to include the table name in quotes "" +// mgt.randomSampleOnTable("hcaf_d", "aquamaps"); +// } catch (Exception e) { +// // TODO Auto-generated catch block +// e.printStackTrace(); +// +// AnalysisLogger.getLogger().debug( +// "In RegressionRandomSampleOnTable->EXCEPTION: " + e); +// } +// +// } +// +// // Mysql database +// private static void testMysql2() { +// +// // connection to database +// DatabaseManagement mgt = new DatabaseManagement(""); +// +// try { +// mgt.createConnection("root", "test", "com.mysql.jdbc.Driver", +// "org.hibernate.dialect.MySQLDialect", +// "jdbc:mysql://146.48.87.169:3306/col2oct2010", +//// "jdbc:mysql://146.48.87.169:3306/aquamaps", +// "example"); +// } catch (IOException e) { +// // TODO Auto-generated catch block +// e.printStackTrace(); +// } +// +// // SmartSampleOnTable operation +// +// try { +// // for database postgres, if a table is not in lower case format, it +// // is necessary to include the table name in quotes "" +// mgt.randomSampleOnTable("example", "col2oct2010"); +// } catch (Exception e) { +// // TODO Auto-generated catch block +// e.printStackTrace(); +// +// AnalysisLogger.getLogger().debug( +// "In RegressionRandomSampleOnTable->EXCEPTION: " + e); +// } +// +// +// } +// +// //Mysql database +// private static void testMysql3() { +// +// // connection to database +// DatabaseManagement mgt = new DatabaseManagement(""); +// +// try { +// mgt.createConnection("root", "test", "com.mysql.jdbc.Driver", +// "org.hibernate.dialect.MySQLDialect", +// "jdbc:mysql://146.48.87.169:3306/col2oct2010", +//// "jdbc:mysql://146.48.87.169:3306/aquamaps", +// "common_names"); +// } catch (IOException e) { +// // TODO Auto-generated catch block +// e.printStackTrace(); +// } +// +// // SmartSampleOnTable operation +// +// try { +// // for database postgres, if a table is not in lower case format, it +// // is necessary to include the table name in quotes "" +// mgt.randomSampleOnTable("common_names", "col2oct2010"); +// } catch (Exception e) { +// // TODO Auto-generated catch block +// e.printStackTrace(); +// +// AnalysisLogger.getLogger().debug( +// "In RegressionRandomSampleOnTable->EXCEPTION: " + e); +// } +// +// +// } +// +//} diff --git a/src/test/java/org/gcube/dataanalysis/test/RegressionSmartSampleOnTable.java b/src/test/java/org/gcube/dataanalysis/test/RegressionSmartSampleOnTable.java new file mode 100644 index 0000000..8bcdd27 --- /dev/null +++ b/src/test/java/org/gcube/dataanalysis/test/RegressionSmartSampleOnTable.java @@ -0,0 +1,91 @@ +//package org.gcube.dataanalysis.test; +// +//import java.io.IOException; +// +//import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger; +//import org.gcube.dataanalysis.databases.utils.DatabaseManagement; +//import org.junit.rules.TestName; +// +//public class RegressionSmartSampleOnTable { +// +// // String [] testName = {"Postgres", "Mysql"}; +// +// public static void main(String[] args) { +// // TODO Auto-generated method stub +// +// AnalysisLogger.getLogger().debug("Executing: " + "Postgres"); +// testPostgres(); +// +// AnalysisLogger.getLogger().debug("Executing: " + "Mysql"); +//// testMysql(); +// +// } +// +// // Postgres database +// private static void testPostgres() { +// +// // connection to database +// DatabaseManagement mgt = new DatabaseManagement(""); +// +// try { +// mgt.createConnection( +// "postgres", +// "d4science2", +// "org.postgresql.Driver", +// "org.hibernate.dialect.PostgreSQLDialect", +// "jdbc:postgresql://geoserver-dev.d4science-ii.research-infrastructures.eu:5432/aquamapsdb", +// "aquamapsdb"); +// } catch (IOException e) { +// // TODO Auto-generated catch block +// e.printStackTrace(); +// } +// +// // SmartSampleOnTable operation +// +// try { +// // for database postgres, if a table is not in lower case format, it +// // is necessary to include the table name in quotes "" +// mgt.smartSampleOnTable("Divisions", "public"); +// } catch (Exception e) { +// // TODO Auto-generated catch block +// e.printStackTrace(); +// +// AnalysisLogger.getLogger().debug( +// "In TestSmartSampleOnTable->EXCEPTION: " + e); +// } +// +// } +// +// // Mysql database +// private static void testMysql() { +// +// // connection to database +// DatabaseManagement mgt = new DatabaseManagement(""); +// +// try { +// mgt.createConnection("root", "test", "com.mysql.jdbc.Driver", +// "org.hibernate.dialect.MySQLDialect", +// // "jdbc:mysql://146.48.87.169:3306/col2oct2010", +// "jdbc:mysql://146.48.87.169:3306/aquamaps", "hcaf_d"); +// } catch (IOException e) { +// // TODO Auto-generated catch block +// e.printStackTrace(); +// } +// +// // SmartSampleOnTable operation +// +// try { +// // for database postgres, if a table is not in lower case format, it +// // is necessary to include the table name in quotes "" +// mgt.smartSampleOnTable("hcaf_d", "aquamaps"); +// } catch (Exception e) { +// // TODO Auto-generated catch block +// e.printStackTrace(); +// +// AnalysisLogger.getLogger().debug( +// "In TestSampleOnTable->EXCEPTION: " + e); +// } +// +// } +// +//} diff --git a/src/test/java/org/gcube/dataanalysis/test/TestApp.java b/src/test/java/org/gcube/dataanalysis/test/TestApp.java new file mode 100644 index 0000000..54f8ac8 --- /dev/null +++ b/src/test/java/org/gcube/dataanalysis/test/TestApp.java @@ -0,0 +1,433 @@ +//package org.gcube.dataanalysis.test; +//import static org.gcube.resources.discovery.icclient.ICFactory.clientFor; +//import static org.gcube.resources.discovery.icclient.ICFactory.queryFor; +// +//import java.io.BufferedReader; +//import java.io.IOException; +//import java.io.InputStreamReader; +//import java.util.List; +//import java.util.Scanner; +// +//import org.gcube.common.encryption.StringEncrypter; +//import org.gcube.common.resources.gcore.ServiceEndpoint; +//import org.gcube.common.scope.api.ScopeProvider; +//import org.gcube.dataanalysis.databases.resources.DBResource; +//import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration; +//import org.gcube.resources.discovery.client.api.DiscoveryClient; +//import org.gcube.resources.discovery.client.queries.impl.XQuery; +//import org.hibernate.SessionFactory; +// +//public class TestApp { +// +// /** +// * @param args +// */ +// public static void main(String[] args) { +// // TODO Auto-generated method stub +// +// +//// ScopeProvider.instance.set("/gcube/devsec/devVRE"); +// ScopeProvider.instance.set("/gcube/devsec"); +// +// +// +// XQuery query = queryFor(ServiceEndpoint.class); +// query.addCondition("$resource/Profile/Category/text() eq 'Database'"); +// +// DiscoveryClient prova=clientFor(DBResource.class); +// List access = prova.submit(query); +// +// +// System.out.println("size resource: "+access.size()); +// +// +// +// +// +// for(int i=0;i rawnames = df.executeQuery("select * from (select a.field1 as aa, b.field1_id as bb, b.field1 as cc from a7f768710_c7b0_11df_b2bc_e0f36cf4c8cd as a "+ +////// "left join cur_00d4e2d0_ecbd_11df_87fa_de008e0850ff as b on a.field3_id=b.field1_id) as cd", sf); +//// +//// /*List rawnames = df.executeQuery("select * from (select a.field1, b.field1_id, b.field1 from a7f768710_c7b0_11df_b2bc_e0f36cf4c8cd as a "+ +//// "left join cur_00d4e2d0_ecbd_11df_87fa_de008e0850ff as b on a.field3_id=b.field1_id) as cd", sf);*/ +//// +////// List rawnames = df.executeQuery("select a.field1 as aa, b.field1_id as bb, b.field1 as cc from a7f768710_c7b0_11df_b2bc_e0f36cf4c8cd as a "+ +////// "left join cur_00d4e2d0_ecbd_11df_87fa_de008e0850ff as b on a.field3_id=b.field1_id)", sf); +//// +//// +//// +//// +//// +//// //Query funzionante +//// +////// List rawnames = df.executeQuery("select a.field1, b.field1_id from a7f768710_c7b0_11df_b2bc_e0f36cf4c8cd as a "+ +////// "left join cur_00d4e2d0_ecbd_11df_87fa_de008e0850ff as b on a.field3_id=b.field1_id", sf); +//// +//// +//// //Query non funzionante +////// List rawnames = df.executeQuery("select a.field1, b.field1_id, b.field1 from a7f768710_c7b0_11df_b2bc_e0f36cf4c8cd as a "+ +////// "left join cur_00d4e2d0_ecbd_11df_87fa_de008e0850ff as b on a.field3_id=b.field1_id", sf); +//// +//// //Query funzionante con alias +//// +////// List rawnames = df.executeQuery("select a.field1 as aa, b.field1_id as bb, b.field1 as cc from a7f768710_c7b0_11df_b2bc_e0f36cf4c8cd as a "+ +////// "left join cur_00d4e2d0_ecbd_11df_87fa_de008e0850ff as b on a.field3_id=b.field1_id", sf); +//// +//// //Query funzionante +////// List rawnames = df.executeQuery("select a.source_data as sourceA, b.source_data as sourceB, a.target_data_scientific_name as targetA, b.target_data_scientific_name as tagertB " + +////// "from bionymoutsimplefaked1csvpreprcsv as a join bionymoutfaked1csvpreprcsv as b on a.source_data=b.source_data limit 10",sf); +//// +//// List rawnames = df.executeQuery(q,sf); +//// +//// +//// System.out.println("***************************************************************"); +//// System.out.println(); +//// +//// System.out.println("Size: "+rawnames.size()); +// // +//// for (int i = 0; i < rawnames.size(); i++) { +// // +//// Object[] row = (Object[]) rawnames.get(i); +// // +//// for (int j = 0; j < row.length; j++) { +// // +//// System.out.print("\"" + row[j] + "\"; "); +// // +//// } +//// System.out.println(); +//// //System.out.println("Fine "); +// // +//// } +//// +//// +//// +//// } catch (Exception e) { +//// // TODO Auto-generated catch block +//// //e.printStackTrace(); +//// +//// System.out.println("***************************************************************"); +//// System.out.println(); +//// +//// String error=e.getCause().toString(); +//// +//// if (error.contains("MySQLSyntaxErrorException")) +//// { +//// +//// System.out.println("ERROR "+e.getMessage()+" "+"because an error is present: "+e.getCause().getMessage()); +//// System.out.println("Suggestion: insert an alias name for the columns"); +//// +//// +//// } +//// +//// +//// } +// +// +// +//// }else { +//// System.out.println("ERRORE: Non è possibile eseguire la connessione perchè l'indirizzo non è completo: databasename non dsiponibile"); +//// }8 +// +// +// +//// } catch (IOException e1) { +//// // TODO Auto-generated catch block +//// e1.printStackTrace(); +//// } +// +// +// +// +// +// +// } +//} diff --git a/src/test/java/org/gcube/dataanalysis/test/TestDatabasesResourcesManager.java b/src/test/java/org/gcube/dataanalysis/test/TestDatabasesResourcesManager.java new file mode 100644 index 0000000..9cce310 --- /dev/null +++ b/src/test/java/org/gcube/dataanalysis/test/TestDatabasesResourcesManager.java @@ -0,0 +1,34 @@ +package org.gcube.dataanalysis.test; + +/** Class that interacts with the IS in order to recover some information about the available databases and the contained data. + * Once recovered the interested data he can import these in the database used by the SM. + * */ + +public class TestDatabasesResourcesManager { + + /** + * @param args + */ + public static void main(String[] args) { + // TODO Auto-generated method stub + + + // the user sets the scope + + + // the user views the available databases in the fixed scope + + // the user selects a database + + // the user wants to view some characteristics of the chosen database such as: the tables, rows' number and the create statement + + // the user wants to recover data from a certain table so he submits a 'select' query on the database + + // ?? the user create a table in the database's SM and import the recovered data + + + + + } + +} diff --git a/src/test/java/org/gcube/dataanalysis/test/TestGetTables.java b/src/test/java/org/gcube/dataanalysis/test/TestGetTables.java new file mode 100644 index 0000000..5200077 --- /dev/null +++ b/src/test/java/org/gcube/dataanalysis/test/TestGetTables.java @@ -0,0 +1,114 @@ +package org.gcube.dataanalysis.test; +import java.math.BigInteger; +import java.util.ArrayList; +import java.util.List; +import java.util.Scanner; + +import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger; +import org.gcube.dataanalysis.databases.utils.DatabaseManagement; + + + +/** Class that tests the code for the recovery of the tables */ +public class TestGetTables { + + + public static void main(String[] args) { + // TODO Auto-generated method stub + + + + + Scanner sc = new Scanner(System.in); + + System.out.println("Specifica il nome del file di configurazione"); + + String configurationfile=sc.next(); + + //TODO: decommentare per la corretta esecuzione della classe +// try { +// DatabaseManagement obj=new DatabaseManagement("./cfg/", configurationfile); +// +// +// //Retrieve the schema for the postgres database +// +// List schemas=new ArrayList(); +// +// schemas=obj.getSchemas(); +// +// if (schemas!=null){ +// +// //test Print +// for (int i=0;i tables=new ArrayList(); +// +// +// //Retrieve the table's names of the database +// tables=obj.getTables(); +// +// +// //test print +// for (int i=0;i=,", +// // false); +// // +// // ArrayList tokenslist=new ArrayList(); +// // +// // //StringTokenizer +// // AnalysisLogger.getLogger().debug("------------ Tokenizer ----------- "); +// // +// // +// // int count = string.countTokens(); +// // +// // for (int i=0; i< count; i++){ +// // +// // String token=string.nextToken(); +// // +// // +// // tokenslist.add(token); +// // +// // AnalysisLogger.getLogger().debug("TestParsing->: "+ token); +// // +// // } +// +// AnalysisLogger.getLogger().debug("TestParsing->: Query " + query); +// // System.out.println(); +// +// boolean AllowedQuery = false; +// +// // LexicalAnalyzer lexer=new LexicalAnalyzer(); +// // AllowedQuery=lexer.analyze(query); +// +// System.out.println("Specifica il tipo di piattaforma"); +// +// Scanner scanIn = new Scanner(System.in); +// String platform = scanIn.nextLine(); +// scanIn.close(); +// +// if (platform.toLowerCase().contains("postgres")) { +// +// PostgresLexicalAnalyzer obj = new PostgresLexicalAnalyzer(); +// +// AllowedQuery = obj.analyze(query); +// +// } +// +// if (platform.toLowerCase().contains("mysql")) { +// +// MySQLLexicalAnalyzer obj = new MySQLLexicalAnalyzer(); +// +// AllowedQuery = obj.analyze(query); +// +// } +// +// if (AllowedQuery == true) { +// +// AnalysisLogger.getLogger().debug("TestParsing->: filtered Query"); +// +// } else { +// AnalysisLogger.getLogger().debug( +// "TestParsing->: not filtered query"); +// } +// +// } +// +//} diff --git a/src/test/java/org/gcube/dataanalysis/test/TestPostgres.java b/src/test/java/org/gcube/dataanalysis/test/TestPostgres.java new file mode 100644 index 0000000..974366c --- /dev/null +++ b/src/test/java/org/gcube/dataanalysis/test/TestPostgres.java @@ -0,0 +1,284 @@ +package org.gcube.dataanalysis.test; +import java.util.List; + +import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration; +import org.gcube.dataanalysis.ecoengine.utils.DatabaseUtils; +import org.hibernate.Query; +import org.hibernate.Session; +import org.hibernate.SessionFactory; + + + +public class TestPostgres { + + /** + * @param args + */ + + @SuppressWarnings({"unchecked"}) + public static List executeHQLQuery(String query, SessionFactory DBSessionFactory, boolean useSQL) throws Exception{ + + List obj = null; + Session ss = null; + try { + ss = DBSessionFactory.getCurrentSession(); + + ss.beginTransaction(); + + Query qr = null; + + if (useSQL) + qr = ss.createSQLQuery(query); + else + qr = ss.createQuery(query); + + List result = qr.list(); + + ss.getTransaction().commit(); + + /* + if (result == null) + System.out.println("Hibernate doesn't return a valid object when org.gcube.contentmanagement.lexicalmatcher retrieve UserState Object"); + + if (result != null && result.size() == 0) + System.out.println(String.format("found nothing in database")); +*/ + if (result != null && result.size() != 0) { + obj = result; + } + + } catch (Exception e) { + +// System.out.println(String.format("Error while executing query: %1$s %2$s", query, e.getMessage())); +// e.printStackTrace(); + System.out.println(String.format("Error while executing query: %1$s %2$s", query, e.getMessage())); + throw e; + } + + return obj; + + } + + public static void main(String[] args) throws Exception{ + // TODO Auto-generated method stub + + /*AlgorithmConfiguration config = new AlgorithmConfiguration(); + config.setConfigPath("./cfg/"); + config.setParam("DatabaseUserName", "utente"); + config.setParam("DatabasePassword", "d4science"); + config.setParam("DatabaseDriver","org.postgresql.Driver"); + //config.setParam("DatabaseDialect", "org.hibernate.dialect.PostgresPlusDialect"); + config.setParam( + "DatabaseURL", + "jdbc:postgresql://statistical-manager.d.d4science.research-infrastructures.eu/testdb"); + + SessionFactory dbconnection = DatabaseUtils.initDBSession(config); + List rawnames = DatabaseFactory + .executeSQLQuery( + "select a.source_data as sourceA, b.source_data as sourceB, a.target_data_scientific_name as targetA, b.target_data_scientific_name as tagertB " + + "from bionymoutsimplefaked1csvpreprcsv as a join bionymoutfaked1csvpreprcsv as b on a.source_data=b.source_data limit 10", + dbconnection); + + + List rawnames = DatabaseFactory + .executeSQLQuery( + "select a.source_data as sourceA, a.target_data_scientific_name as targetA, b.source_data sourceB " + + "from bion_id_a1f27126_df23_4980_8e2b_4afc8aaa404f as a " + + "left join bion_id_ab251ee0_7cc6_49b2_8956_330f4716650f as b " + + "on a.source_data=b.source_data", + dbconnection); + + + //List rawnames = DatabaseFactory.executeHQLQuery(query, DBSessionFactory, useSQL); + + + + + + + System.out.println("***************************************************************"); + System.out.println(); + + for (int i = 0; i < rawnames.size(); i++) { + + Object[] row = (ObEntityManagerject[]) rawnames.get(i); + + for (int j = 0; j < row.length; j++) { + + System.out.print("\"" + row[j] + "\"; "); + + } + System.out.println(); + //System.out.println("Fine "); + + }*/ + + + AlgorithmConfiguration config = new AlgorithmConfiguration(); + config.setConfigPath("./cfg/"); + config.setParam("DatabaseUserName", "root"); + config.setParam("DatabasePassword", "test"); + config.setParam("DatabaseDriver","com.mysql.jdbc.Driver"); + config.setParam("DatabaseDialect", "org.hibernate.dialect.MySQLDialect"); + config.setParam( + "DatabaseURL", + "jdbc:mysql://146.48.87.169/timeseries"); + + SessionFactory dbconnection = DatabaseUtils.initDBSession(config); + + + /*List rawnames = DatabaseFactory + .executeSQLQuery( + "select a.source_data as sourceA, b.source_data as sourceB, a.target_data_scientific_name as targetA, b.target_data_scientific_name as tagertB " + + "from bionymoutsimplefaked1csvpreprcsv as a join bionymoutfaked1csvpreprcsv as b on a.source_data=b.source_data limit 10", + dbconnection);*/ + + //try{ + + + + +// dbconnection.getCurrentSession().; +// +// +// ss.getCause() + + // } + /*catch(JDBCExceptionReporter e) + { + Throwable t = e.getCause(); + SQLException ex = (SQLException) t.getCause(); + while(ex != null){ + while(t != null) { + t = t.getCause(); + } + // Logger logger=new Logger(); + // logger.warn("SQLException="+ex.getLocalizedMessage()); + + System.out.println("sono qui"); + + ex = ex.getNextException(); + } + } + */ + System.out.println("***************************************************************"); + System.out.println(); + + //List rawnames =((javax.persistence.Query) query).getResultList(); + + + + try{ + + /*List rawnames = executeHQLQuery( + "select * from (select a.field1, b.field1_id as bb, b.field1 from a7f768710_c7b0_11df_b2bc_e0f36cf4c8cd as a "+ + "left join cur_00d4e2d0_ecbd_11df_87fa_de008e0850ff as b on a.field3_id=b.field1_id) as cd",*/ +// "select * "+ +// "from a7f768710_c7b0_11df_b2bc_e0f36cf4c8cd as a, cur_00d4e2d0_ecbd_11df_87fa_de008e0850ff as b limit 10", +// dbconnection, true); + + + List rawnames = executeHQLQuery( + "select a.field1, b.field1_id from a7f768710_c7b0_11df_b2bc_e0f36cf4c8cd as a "+ + "left join cur_00d4e2d0_ecbd_11df_87fa_de008e0850ff as b on a.field3_id=b.field1_id",dbconnection, true); +// "select * "+ +// "from a7f768710_c7b0_11df_b2bc_e0f36cf4c8cd as a, cur_00d4e2d0_ecbd_11df_87fa_de008e0850ff as b limit 10", +// dbconnection, true); + + + + System.out.println("Size: "+rawnames.size()); + + for (int i = 0; i < rawnames.size(); i++) { + + Object[] row = (Object[]) rawnames.get(i); + + for (int j = 0; j < row.length; j++) { + + System.out.print("\"" + row[j] + "\"; "); + + } + System.out.println(); + //System.out.println("Fine "); + + } + + }catch(Exception e){ + e.printStackTrace(); + System.out.println("message: "+e.getMessage()); + //System.out.println(e.getLocalizedMessage()); + + StackTraceElement [] elem=e.getStackTrace(); + + + System.out.println("localized: "+e.getCause().toString()); + String error=e.getCause().toString(); + if (error.contains("MySQLSyntaxErrorException")) + { + + System.out.println("ERROR "+e.getMessage()+" "+"because an error is present: "+e.getCause().getMessage()); + + + } + +// System.out.println("cause: "+e.getCause().getMessage()); + +// for (int i=0;i addresses=new ArrayList(); + + obj.setScope("/gcube/devsec"); + + addresses=obj.retrieveAddress("Database"); + + //Stampa + for (int i=0; iEXCEPTION: "+ e); +// } +// +// // SmartSampleOnTable operation +// +// // mgt.smartSampleOnTable(tableName); +// +// +// +// +// +// //MYSQL Database +// +//// try { +//// mgt.createConnection( +//// "root", +//// "test", +//// "com.mysql.jdbc.Driver", +//// "org.hibernate.dialect.MySQLDialect", +////// "jdbc:mysql://146.48.87.169:3306/col2oct2010", +//// "jdbc:mysql://146.48.87.169:3306/aquamaps", +//// "hcaf_d"); +//// } catch (IOException e) { +//// // TODO Auto-generated catch block +//// e.printStackTrace(); +//// } +//// +//// // SampleOnTable operation +//// +//// try { +//// // for database postgres, if a table is not in lower case format, it +//// // is necessary to include the table name in quotes "" +//// mgt.sampleOnTable("hcaf_d", "aquamaps"); +//// } catch (Exception e) { +//// // TODO Auto-generated catch block +//// e.printStackTrace(); +//// +//// AnalysisLogger +//// .getLogger() +//// .debug("In TestSampleOnTable->EXCEPTION: "+ e); +//// } +// +// } +// +//} diff --git a/src/test/java/org/gcube/dataanalysis/test/TestSmartSampleOnTable.java b/src/test/java/org/gcube/dataanalysis/test/TestSmartSampleOnTable.java new file mode 100644 index 0000000..7320330 --- /dev/null +++ b/src/test/java/org/gcube/dataanalysis/test/TestSmartSampleOnTable.java @@ -0,0 +1,109 @@ +//package org.gcube.dataanalysis.test; +// +//import java.io.IOException; +// +//import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger; +//import org.gcube.dataanalysis.databases.utils.DatabaseManagement; +// +//public class TestSmartSampleOnTable { +// +// public static void main(String[] args) { +// +// // connection to database +// DatabaseManagement mgt = new DatabaseManagement(""); +// +//// // Postgres Database +//// try { +//// mgt.createConnection( +//// "postgres", +//// "d4science2", +//// "org.postgresql.Driver", +//// "org.hibernate.dialect.PostgreSQLDialect", +//// "jdbc:postgresql://geoserver-dev.d4science-ii.research-infrastructures.eu:5432/aquamapsdb", +//// "aquamapsdb"); +//// } catch (IOException e) { +//// // TODO Auto-generated catch block +//// e.printStackTrace(); +//// } +//// +//// // SampleOnTable operation +//// +//// try { +//// // for database postgres, if a table is not in lower case format, it +//// // is necessary to include the table name in quotes "" +//// mgt.smartSampleOnTable("Divisions", "public"); +//// } catch (Exception e) { +//// // TODO Auto-generated catch block +//// e.printStackTrace(); +//// +//// AnalysisLogger.getLogger().debug( +//// "In TestSmartSampleOnTable->EXCEPTION: " + e); +//// } +// +// +// +// //Mysql database +// +// try { +// mgt.createConnection( +// "root", +// "test", +// "com.mysql.jdbc.Driver", +// "org.hibernate.dialect.MySQLDialect", +//// "jdbc:mysql://146.48.87.169:3306/col2oct2010", +// "jdbc:mysql://146.48.87.169:3306/aquamaps", +// "hcaf_d"); +// } catch (IOException e) { +// // TODO Auto-generated catch block +// e.printStackTrace(); +// } +// +// // SampleOnTable operation +// +// try { +// // for database postgres, if a table is not in lower case format, it +// // is necessary to include the table name in quotes "" +// mgt.smartSampleOnTable("hcaf_d", "aquamaps"); +// } catch (Exception e) { +// // TODO Auto-generated catch block +// e.printStackTrace(); +// +// AnalysisLogger +// .getLogger() +// .debug("In TestSampleOnTable->EXCEPTION: "+ e); +// } +// +// //Mysql database +// +//// try { +//// mgt.createConnection( +//// "root", +//// "test", +//// "com.mysql.jdbc.Driver", +//// "org.hibernate.dialect.MySQLDialect", +////// "jdbc:mysql://146.48.87.169:3306/col2oct2010", +//// "jdbc:mysql://146.48.87.169:3306/col2oct2010", +//// "common_names"); +//// } catch (IOException e) { +//// // TODO Auto-generated catch block +//// e.printStackTrace(); +//// } +//// +//// // SampleOnTable operation +//// +//// try { +//// // for database postgres, if a table is not in lower case format, it +//// // is necessary to include the table name in quotes "" +//// mgt.smartSampleOnTable("common_names", "col2oct2010"); +//// } catch (Exception e) { +//// // TODO Auto-generated catch block +//// e.printStackTrace(); +//// +//// AnalysisLogger +//// .getLogger() +//// .debug("In TestSampleOnTable->EXCEPTION: "+ e); +//// } +// +// } +// +//}