2014-06-04 16:13:44 +02:00
package org.gcube.dataaccess.databases.utils ;
2014-06-04 15:56:35 +02:00
import java.io.BufferedWriter ;
import java.io.File ;
import java.io.FileOutputStream ;
import java.io.IOException ;
import java.io.OutputStreamWriter ;
2014-09-02 15:14:58 +02:00
import java.sql.Connection ;
2014-06-04 15:56:35 +02:00
import java.util.ArrayList ;
import java.util.HashMap ;
import java.util.LinkedHashMap ;
import java.util.List ;
2014-11-19 15:38:18 +01:00
import java.util.UUID ;
2014-06-04 15:56:35 +02:00
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger ;
2014-06-04 16:13:44 +02:00
import org.gcube.dataaccess.databases.converter.SqlDialectConverter ;
import org.gcube.dataaccess.databases.sampler.Sampler ;
import org.gcube.dataaccess.databases.structure.AbstractTableStructure ;
import org.gcube.dataaccess.databases.structure.MySQLTableStructure ;
import org.gcube.dataaccess.databases.structure.PostgresTableStructure ;
2014-06-04 15:56:35 +02:00
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration ;
import org.hibernate.SessionFactory ;
import com.adventnet.swissqlapi.sql.exception.ConvertException ;
import com.adventnet.swissqlapi.sql.parser.ParseException ;
/** Class that allows to manage a database offering several functionalities */
public class DatabaseManagement {
// AbstractTableStructure crossTableStructure;
// private List<String> tablesname = new ArrayList<String>();
private List < String > tablesname = null ;
// private String configPath = "./cfg/";
private String configPath = " " ;
private String sourceSchemaName = null ;
private SessionFactory sourceDBSession ;
private String DBType ;
private AbstractTableStructure crossTableStructure ;
// private DBAdapter typesMap;
private DatabaseOperations op = new DatabaseOperations ( ) ;
// private String destinationDBType;
// private String sourceDBType;
private MySQLTableStructure mysqlobj ;
private ConnectionManager connection ;
// private Integer estimatedRows = null;
private long estimatedRows = 0 ;
// file in which the result is stored when sample and query submit
// operations are executed
private File file = null ;
// file that will contain result
private BufferedWriter out ;
2014-12-05 16:01:07 +01:00
//total rows for a result of a submit query operation
private int submitQueryTotalRows ;
2014-06-04 15:56:35 +02:00
// file in which the table result is stored when sample
// operations are executed
// private File fileSample = null;
// map which contains the rows of the sample and query submit operations
// private LinkedHashMap<Integer, String> mapResult = new
// LinkedHashMap<Integer, String>();
// private HashMap<Integer, String> mapResult = new HashMap<Integer,
// String>();
private HashMap < String , String > mapResult = new HashMap < String , String > ( ) ;
// map which contains the rows that constitute the table result
// private LinkedHashMap<String, String> mapSampleTableResult = new
// LinkedHashMap<String, String>();
// file in which the result is stored when the query is executed
// private File fileQueryResult = null;
// map that contains the rows generated by the query
// private LinkedHashMap<String, String> mapQueryResult = new
// LinkedHashMap<String, String>();
private static final String MYSQL = " MySQL " ;
private static final String POSTGRES = " Postgres " ;
private static final String selectTablesQuery = " SELECT distinct table_name FROM information_schema.COLUMNS where table_schema='%1$s' " ;
// private static final String listSchemaNameQuery =
// "select schema_name from information_schema.schemata where schema_name <> 'information_schema' and schema_name !~ E'^pg_'";
private static final String listSchemaNameQuery = " select nspname from pg_namespace where nspname <> 'information_schema' and nspname !~ E'^pg_' " ;
// query to retrieve datatype columns of a database table
2014-11-28 12:54:54 +01:00
private static final String queryForDataTypeColumnsPostgres = " SELECT data_type, udt_name FROM information_schema.COLUMNS WHERE table_name ='%1$s' and table_schema='%2$s' order by ordinal_position asc " ;
2014-07-21 17:19:55 +02:00
private static final String queryForDataTypeColumnsMysql = " SELECT data_type FROM information_schema.COLUMNS WHERE table_name ='%1$s' and table_schema='%2$s' order by ordinal_position asc " ;
2014-06-04 15:56:35 +02:00
// query to get columns' name
2014-07-21 17:19:55 +02:00
private static final String queryForColumnsPostgres = " SELECT column_name FROM information_schema.COLUMNS WHERE table_name ='%1$s' and table_schema='%2$s' order by ordinal_position asc " ;
private static final String queryForColumnsMysql = " SELECT column_name FROM information_schema.COLUMNS WHERE table_name ='%1$s' and table_schema='%2$s' order by ordinal_position asc " ;
2014-06-04 15:56:35 +02:00
// Header Table that contains the column names of a table
private String header = " " ;
// list that contains the columns names of a table
List < String > listColumnNamesTable = null ;
// variable to set the language for translation
public static final int POSTGRESQLDialect = 4 ;
public static final int MYSQLDialect = 5 ;
// Constructor
public DatabaseManagement ( String configPath ) {
this . configPath = configPath ;
connection = new ConnectionManager ( ) ;
}
// for the exact parsing of the obtained results with the values of
// a database, a check is needed against the data type columns
// to convert from postgres and mysql datatypes to Java datatypes
private String convertToJavaType ( String type , String val ) throws Exception {
type = type . toLowerCase ( ) ;
2014-07-29 10:30:42 +02:00
// AnalysisLogger.getLogger()
// .debug("In DatabaseManagement->type: "
// + type);
2014-06-04 15:56:35 +02:00
String valConverted = val ;
2014-07-29 10:30:42 +02:00
// AnalysisLogger.getLogger()
// .debug("In DatabaseManagement->val: "
// + valConverted);
2014-06-04 15:56:35 +02:00
try {
// parse to Long
valConverted = " " + Long . parseLong ( valConverted ) ;
2014-07-29 10:30:42 +02:00
// AnalysisLogger.getLogger()
// .debug("In DatabaseManagement->parsed value Long: "
// + valConverted);
2014-06-04 15:56:35 +02:00
}
catch ( Exception e ) {
try {
// check to fix a problem for the database.
// Indeed if the string is
// an hexadecimal some strings as (6F or 6D that
// are double and float values) are
// casted to Double and the value returned is
// 6.0 altering the original value. If the string is
// an hexadecimal the cast is not performed.
if ( ( type ! = null ) ) {
// // check data type value
// AnalysisLogger.getLogger().debug(
// "In DatabaseManagement->check data type value: "
// + type);
if ( ( type . contains ( " decimal " ) ) | | ( type . contains ( " double " ) )
| | ( type . contains ( " numeric " ) )
| | ( type . contains ( " float " ) ) ) {
valConverted = " " + Double . parseDouble ( valConverted ) ;
// // check the parsed value
2014-07-29 10:30:42 +02:00
// AnalysisLogger.getLogger().debug(
// "In DatabaseManagement->parsed value Double: "
// + valConverted);
2014-06-04 15:56:35 +02:00
}
if ( type . contains ( " real " ) ) {
valConverted = " " + Float . parseFloat ( valConverted ) ;
// // check the parsed value
2014-07-29 10:30:42 +02:00
// AnalysisLogger.getLogger().debug(
// "In DatabaseManagement->parsed value Float: "
// + valConverted);
2014-07-21 12:57:49 +02:00
}
2014-07-29 10:30:42 +02:00
if ( ( ( type . contains ( " string " ) ) )
| | ( type . contains ( " varchar " ) )
| | ( type . contains ( " char " ) )
| | ( type . contains ( " text " ) )
| | ( type . contains ( " character varying " ) ) ) {
valConverted = " \" " + valConverted + " \" " ;
2014-07-21 12:57:49 +02:00
// // check the parsed value
2014-07-29 10:30:42 +02:00
// AnalysisLogger.getLogger().debug(
// "In DatabaseManagement->parsed value string: "
// + valConverted);
2014-06-04 15:56:35 +02:00
}
2014-11-28 12:54:54 +01:00
if ( ( type . contains ( " geometry " ) ) | | ( type . contains ( " geography " ) ) ) {
valConverted = " \" " + valConverted + " \" " ;
}
2014-06-04 15:56:35 +02:00
}
} catch ( Exception excp ) {
throw excp ;
}
}
2014-11-28 12:54:54 +01:00
// //to truncate value to 255 characters if it exceeds 255 characters
// if(valConverted.length()>255){
// valConverted = valConverted.substring(0, 255);
//// System.out.println("val truncated: " + valConverted);
//// System.out.println("elem geometry truncated");
//
// //add quote if it has been removed with truncation
// if((valConverted.startsWith("\""))&&(!valConverted.endsWith("\""))){
// valConverted = valConverted+"\"";
//// System.out.println("adding quote: " + valConverted);
// }
// }
2014-06-04 15:56:35 +02:00
return valConverted ;
}
// create the database's connection without using the configuration file but
// using the data input.
// Note that in this case the variable sourceSchemaName for database mysql
// is set to the database name while for database postgres will be set to
// the schema's name in the createConnection(String cfgDir, String
// SourceFile) method
public SessionFactory createConnection ( String DatabaseUserName ,
String DatabasePassword , String DatabaseDriver ,
String DatabaseDialect , String DatabaseURL , String DatabaseName )
throws IOException {
// ConnectionManager connection = new ConnectionManager();
AlgorithmConfiguration config = new AlgorithmConfiguration ( ) ;
AnalysisLogger . getLogger ( ) . debug (
" In DatabaseManagement->DatabaseName: " + DatabaseName ) ;
AnalysisLogger . getLogger ( ) . debug (
" In DatabaseManagement->DatabaseURL: " + DatabaseURL ) ;
AnalysisLogger . getLogger ( ) . debug (
" In DatabaseManagement->DatabaseUserName: " + DatabaseUserName ) ;
AnalysisLogger . getLogger ( ) . debug (
" In DatabaseManagement->DatabasePassword: " + DatabasePassword ) ;
AnalysisLogger . getLogger ( ) . debug (
" In DatabaseManagement->configPath: " + configPath ) ;
AnalysisLogger . getLogger ( ) . debug (
" In DatabaseManagement->DatabaseDriver: " + DatabaseDriver ) ;
AnalysisLogger . getLogger ( ) . debug (
" In DatabaseManagement->DatabaseDialect: " + DatabaseDialect ) ;
config = connection . setconfiguration ( configPath , DatabaseUserName ,
DatabasePassword , DatabaseDriver , DatabaseDialect , DatabaseURL ,
DatabaseName ) ;
sourceDBSession = connection . initDBConnection ( config ) ;
// sourceSchemaName = Name; // for a database mysql the variable
// "sourceschemaname" is the database's name
// (while for the database postgresql is the
// schema's name)
if ( DatabaseDriver . toLowerCase ( ) . contains ( " postgres " ) ) {
DBType = POSTGRES ;
}
if ( DatabaseDriver . toLowerCase ( ) . contains ( " mysql " ) ) {
DBType = MYSQL ;
}
return sourceDBSession ;
}
// create the database's connection using the configuration file.
// note that for database postgres the variable sourceSchemaName is set to
// the schema's name.
2014-09-10 09:57:17 +02:00
// Note that this method is not called actually
2014-06-04 15:56:35 +02:00
public SessionFactory createConnection ( String cfgDir , String SourceFile )
throws Exception {
configPath = cfgDir ;
if ( ! configPath . endsWith ( " / " ) )
configPath + = " / " ;
sourceSchemaName = op . getDBSchema ( configPath + SourceFile ) ; // the
// variable
// "sourceschemaname"
// is
// retrieved
// through
// this
// method
// that
// recovers
// the value
// by means
// of the
// configuration
// file.
// for a database postgresql it is the schema's name while for a
// database mysql it is the database's name.
sourceDBSession = connection . initDBConnection ( configPath + SourceFile ) ;
// destinationDBType = POSTGRES;
// sourceDBType = MYSQL;
//
// // typesMap = new DBAdapter(configPath + "/" + sourceDBType + "2"
// // + destinationDBType + ".properties");
DBType = op . getDBType ( ) ; // Recover the database's type by means of the
// configuration file
return sourceDBSession ;
}
// close the connection
public void closeConnection ( ) {
2014-10-24 18:44:30 +02:00
if ( sourceDBSession ! = null ) {
sourceDBSession . close ( ) ;
AnalysisLogger . getLogger ( ) . debug (
" In DatabaseManagement->Connection closed " ) ;
sourceDBSession = null ;
}
2014-06-04 15:56:35 +02:00
}
/ * *
* method that allows to submit a query .
*
* @param schemaName
* : the schema ' s name of the database postgres . For database
* mysql this parameter is null .
* @return query result
* @throws Exception
* /
// method that allows to submit a query
2014-11-19 15:38:18 +01:00
public List < Object > submitQuery ( String query , Connection conn , String pathFile ) throws Exception {
2014-06-04 15:56:35 +02:00
List < Object > results = new ArrayList < Object > ( ) ;
2014-09-02 15:14:58 +02:00
results = connection . executeQueryJDBC ( query , conn ) ;
2014-11-19 15:38:18 +01:00
2014-06-04 15:56:35 +02:00
if ( results ! = null ) {
AnalysisLogger . getLogger ( ) . debug (
" DatabaseManagement-> Query result retrieved " ) ;
2014-12-05 16:01:07 +01:00
submitQueryTotalRows = results . size ( ) ;
2014-06-04 15:56:35 +02:00
}
2014-07-29 12:09:49 +02:00
// // store table in a file
// AnalysisLogger.getLogger().debug(
// "In DatabaseManagement->store table in a file");
2014-06-04 15:56:35 +02:00
// List<String> listColumnNames = getColumnNamesTable(tableName,
// schemaName);
// to retrieve datatype columns of a table
List < String > DataTypeColumns = null ;
2014-11-19 15:38:18 +01:00
// store table in a file and build the result Map
2014-11-28 12:54:54 +01:00
String FileName = pathFile + " SubmitQueryResult_ " + UUID . randomUUID ( ) + " .csv " ;
2014-11-19 15:38:18 +01:00
// write the result in the file and in the map
AnalysisLogger . getLogger ( ) . debug (
" In DatabaseManagement-> writing the result in the file: "
+ FileName ) ;
try {
file = new File ( FileName ) ;
out = new BufferedWriter ( new OutputStreamWriter ( new FileOutputStream (
file ) , " UTF-8 " ) ) ;
2014-11-28 12:54:54 +01:00
writeSubmitResultIntoFile ( results , DataTypeColumns ) ;
2014-11-19 15:38:18 +01:00
// //truncate the result list to 10000 rows
// if ((mapResult!=null)&&(mapResult.size()!=0)){
// //result size without header
// int mapSize = mapResult.size() - 1;
// AnalysisLogger.getLogger().debug(
// "DatabaseManagement-> size of the map without header obtained from the submit operation: "
// + mapSize);
// if (mapSize>10000){
// int numElemToDelete = mapSize - 10000;
// AnalysisLogger.getLogger().debug(
// "DatabaseManagement-> The result size is greater than 10000 rows. Rows number to delete: "
// + numElemToDelete);
// AnalysisLogger.getLogger().debug(
// "DatabaseManagement-> Starting to remove elements");
// int i=10000;
// if (numElemToDelete != 0){
// while ((i < mapSize)) {
// mapResult.remove(String.valueOf(i));
// i++;
//
// }
//
// AnalysisLogger.getLogger().debug(
// "DatabaseManagement-> size of the truncated map without header: "
// + (mapResult.size()-1));
// }
//
// }
// }
2019-11-27 16:58:52 +01:00
} catch ( Throwable e ) {
throw new Exception ( e . getLocalizedMessage ( ) ) ;
2014-11-19 15:38:18 +01:00
} finally {
//close the file
if ( out ! = null ) {
out . close ( ) ;
out = null ;
AnalysisLogger . getLogger ( ) . debug (
" In DatabaseManagement-> File closed " ) ;
}
}
2014-06-04 15:56:35 +02:00
2014-07-29 12:09:49 +02:00
// build the Map of Result
2014-11-19 15:38:18 +01:00
// buildMapResult(results, DataTypeColumns);
2014-07-29 12:09:49 +02:00
2014-06-04 15:56:35 +02:00
return results ;
}
2014-12-05 16:01:07 +01:00
2014-06-04 15:56:35 +02:00
/ * *
* Get the table ' s names for a database .
*
* @param schemaName
* : the schema ' s name of the database postgres . For database
* mysql this parameter is null .
* @return List < String > listTables : a list tables ' names .
* @throws Exception
* /
public List < String > getTables ( String databaseName , String schemaName )
throws Exception {
String query = null ;
if ( DBType . equals ( POSTGRES ) ) {
query = String . format ( selectTablesQuery , schemaName ) ;
AnalysisLogger . getLogger ( ) . debug (
" DatabaseManagement->retrieving tables names with query: "
+ query ) ;
sourceSchemaName = schemaName ; // a database postgres manages schema
// concept so for every operation on
// the database it is important to
// specify the schema name
}
if ( DBType . equals ( MYSQL ) ) {
query = String . format ( selectTablesQuery , databaseName ) ;
AnalysisLogger . getLogger ( ) . debug (
" DatabaseManagement->retrieving tables names with query: "
+ query ) ;
sourceSchemaName = databaseName ; // for database mysql the schema
// name is the database name
// because the database mysql
// does not manage schema
// concept.
}
List < Object > resultSet = connection
. executeQuery ( query , sourceDBSession ) ;
if ( resultSet ! = null ) {
// AnalysisLogger.getLogger().debug(
// "DatabaseManagement->table's list: ");
AnalysisLogger . getLogger ( ) . debug (
" DatabaseManagement->table's list retrieved " ) ;
tablesname = new ArrayList < String > ( ) ;
2014-07-21 12:57:49 +02:00
2014-06-05 11:03:12 +02:00
for ( int i = 0 ; i < resultSet . size ( ) ; i + + ) {
2014-06-04 15:56:35 +02:00
2014-07-21 12:57:49 +02:00
Object element = resultSet . get ( i ) ;
ArrayList < Object > listvalues = new ArrayList < Object > (
( ( LinkedHashMap < String , Object > ) element ) . values ( ) ) ;
for ( int j = 0 ; j < listvalues . size ( ) ; j + + ) {
2014-06-04 15:56:35 +02:00
2014-07-21 12:57:49 +02:00
tablesname . add ( listvalues . get ( j ) . toString ( ) ) ;
// AnalysisLogger.getLogger().debug(
// "DatabaseManagement->" + listvalues.get(j).toString());
}
2014-06-04 15:56:35 +02:00
}
2014-06-05 11:03:12 +02:00
2014-07-21 12:57:49 +02:00
// for (Object result : resultSet) {
// tablesname.add((String) result);
//
// // AnalysisLogger.getLogger().debug(
// // "DatabaseManagement->" + (String) result);
//
// }
2014-06-04 15:56:35 +02:00
}
return tablesname ;
}
/ * *
* Get the schema ' s name for the database Postgresql .
*
* @return listSchemas : the list of the schemas ' names
* @throws Exception
* /
public List < String > getSchemas ( ) throws Exception {
List < String > list = new ArrayList < String > ( ) ;
if ( DBType . equals ( POSTGRES ) ) {
2014-09-01 16:45:00 +02:00
// AnalysisLogger.getLogger().debug("DatabaseManagement-> execute query");
2014-06-04 15:56:35 +02:00
List < Object > resultSet = connection . executeQuery (
listSchemaNameQuery , sourceDBSession ) ;
if ( resultSet ! = null ) {
2014-07-21 12:57:49 +02:00
// for (Object result : resultSet) {
// list.add((String) result);
// }
2014-06-05 11:03:12 +02:00
for ( int i = 0 ; i < resultSet . size ( ) ; i + + ) {
2014-07-21 12:57:49 +02:00
Object element = resultSet . get ( i ) ;
2014-06-05 11:03:12 +02:00
2014-07-21 12:57:49 +02:00
ArrayList < Object > listvalues = new ArrayList < Object > (
( ( LinkedHashMap < String , Object > ) element ) . values ( ) ) ;
for ( int j = 0 ; j < listvalues . size ( ) ; j + + ) {
list . add ( listvalues . get ( j ) . toString ( ) ) ;
// AnalysisLogger.getLogger().debug(
// "DatabaseManagement->" +
// listvalues.get(j).toString());
}
2014-06-05 11:03:12 +02:00
2014-06-04 15:56:35 +02:00
}
2014-07-21 12:57:49 +02:00
2014-06-04 15:56:35 +02:00
}
}
return list ;
}
/ * *
* Get the " Create Table " statement .
*
* @param schemaName
* : the schema ' s name of the database postgres . For database
* mysql this parameter is null .
* @return String : the create statement .
* @throws Exception
* /
// Get the "Create Table" statement
public String getCreateTable ( String tablename , String schemaName )
throws Exception {
String createstatement = " " ;
if ( DBType . equals ( POSTGRES ) ) {
crossTableStructure = getSourceTableObject ( tablename , schemaName ) ;
String tableBuildQuery = crossTableStructure . buildUpCreateTable ( ) ;
createstatement = tableBuildQuery ;
AnalysisLogger . getLogger ( ) . debug (
" DatabaseManagement->'Create Table' statement: "
+ tableBuildQuery ) ;
}
if ( DBType . equals ( MYSQL ) ) {
crossTableStructure = getSourceTableObject ( tablename , schemaName ) ;
try {
String createtablestatement = mysqlobj . showCreateTable (
connection , sourceDBSession ) ;
createstatement = createtablestatement ;
AnalysisLogger . getLogger ( ) . debug (
" DatabaseManagement->'Create Table' statement: "
+ createtablestatement ) ;
} catch ( Exception e ) {
AnalysisLogger . getLogger ( ) . debug (
" DatabaseManagement->Exception: " + e . getMessage ( ) ) ;
throw e ;
}
}
// to recover the column names of the table
getColumnNamesTable ( tablename , schemaName ) ;
return createstatement ;
}
// Method that creates the table object for a database
private AbstractTableStructure getSourceTableObject ( String tableName ,
String schemaName ) throws Exception {
sourceSchemaName = schemaName ;
if ( DBType . equals ( MYSQL ) ) {
mysqlobj = new MySQLTableStructure ( sourceSchemaName , tableName ,
sourceDBSession ) ;
return mysqlobj ;
}
else if ( DBType . equals ( POSTGRES ) ) {
PostgresTableStructure postobj = new PostgresTableStructure (
sourceSchemaName , tableName , sourceDBSession ) ;
return postobj ;
} else {
return null ;
}
}
// Method that returns the estimated number of rows
2014-07-29 10:30:42 +02:00
public long getNumberOfRows ( String tablename , String schemaName )
throws Exception {
2014-06-04 15:56:35 +02:00
long rows ;
2014-07-23 17:11:58 +02:00
rows = op . calculateElements ( connection , DBType , tablename , schemaName ,
2014-06-04 15:56:35 +02:00
sourceDBSession ) ;
2014-07-29 10:30:42 +02:00
2014-06-04 15:56:35 +02:00
AnalysisLogger . getLogger ( ) . debug (
" DatabaseManagement->rows' number calculated: " + rows ) ;
estimatedRows = rows ;
return rows ;
}
/ * *
* retrieve 100 rows of a table randomly that have the maximum number of
* columns not null
*
* @param tableName
* : the table ' s name
* @param schemaName
* : the schema ' s name of the database postgres . For database
* mysql this parameter is the database name .
* @return a rows ' list .
* @throws Exception
* /
// retrieve 100 rows of a table randomly that have the maximum number of
// columns not null
public void smartSampleOnTable ( String tableName , String schemaName ,
String pathFile ) throws Exception {
List < Object > resultSet = null ;
AnalysisLogger
. getLogger ( )
. debug ( " DatabaseManagement->starting the Smart Sample on table operation " ) ;
AnalysisLogger . getLogger ( ) . debug (
" DatabaseManagement->retrieving the 100 rows " ) ;
if ( estimatedRows = = 0 ) {
// estimatedRows = Integer.valueOf(getNumberOfRows(tableName));
2014-07-23 17:11:58 +02:00
estimatedRows = getNumberOfRows ( tableName , schemaName ) ;
2014-06-04 15:56:35 +02:00
}
// to retrieve datatype columns of a table
List < String > DataTypeColumns = getDataTypeColumns ( tableName , schemaName ) ;
Sampler sampler = new Sampler ( ) ;
resultSet = sampler . smartSampleOnTable ( connection , sourceDBSession ,
DBType , tableName , schemaName , estimatedRows , DataTypeColumns ) ;
if ( resultSet ! = null ) {
AnalysisLogger . getLogger ( ) . debug (
" DatabaseManagement-> rows retrieved " ) ;
}
2014-07-29 12:09:49 +02:00
// // store table in a file
// AnalysisLogger.getLogger().debug(
// "In DatabaseManagement->store table in a file");
2014-06-04 15:56:35 +02:00
// store table in a file
// writeSampleTableIntoFile(resultSet, tableName, schemaName);
2014-11-28 12:54:54 +01:00
//TODO ** COMMENTED
// String FileName = pathFile + "SampleResult.csv";
//
// // to recover columns names list
//
2014-06-04 15:56:35 +02:00
List < String > listColumns = sampler . getListColumns ( ) ;
2014-11-28 12:54:54 +01:00
// // String header = "";
//
// // //print check
// // AnalysisLogger.getLogger().debug(
// // "In DatabaseManagement->list columns size: " +listColumns.size());
//
2014-06-04 15:56:35 +02:00
// to recover columns names list
for ( int i = 0 ; i < listColumns . size ( ) ; i + + ) {
if ( i ! = listColumns . size ( ) - 1 ) {
header = header + listColumns . get ( i ) + " , " ;
} else {
header = header + listColumns . get ( i ) ;
}
}
// //print check
// AnalysisLogger.getLogger().debug(
// "In DatabaseManagement->header: " + header);
2014-07-29 12:09:49 +02:00
// // write the result in the file and in the map
// AnalysisLogger.getLogger().debug(
// "In DatabaseManagement->writing the result in the file: "
// + FileName);
// file = new File(FileName);
// out = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(
// file), "UTF-8"));
// // writeTableIntoFile(resultSet, DataTypeColumns);
2014-06-04 15:56:35 +02:00
2014-07-29 12:09:49 +02:00
// build the Map of Result
2014-11-28 12:54:54 +01:00
// buildMapResult(resultSet, DataTypeColumns);
// store table in a file and build the result Map
String FileName = pathFile + " SmartSampling_ " + UUID . randomUUID ( ) + " .csv " ;
// write the result in the file and in the map
AnalysisLogger . getLogger ( ) . debug (
" In DatabaseManagement-> writing the result in the file: "
+ FileName ) ;
try {
file = new File ( FileName ) ;
out = new BufferedWriter ( new OutputStreamWriter ( new FileOutputStream (
file ) , " UTF-8 " ) ) ;
writeSamplingResultIntoFile ( resultSet , DataTypeColumns ) ;
2019-11-27 16:58:52 +01:00
} catch ( Throwable e ) {
throw new Exception ( e . getLocalizedMessage ( ) ) ;
2014-11-28 12:54:54 +01:00
} finally {
//close the file
if ( out ! = null ) {
out . close ( ) ;
out = null ;
AnalysisLogger . getLogger ( ) . debug (
" In DatabaseManagement-> File closed " ) ;
}
}
2014-06-04 15:56:35 +02:00
}
/ * *
* Retrieve the first 100 rows of a table .
*
* @param tableName
* : the table ' s name
* @param schemaName
* : the schema ' s name of the database postgres . For database
* mysql this parameter is the database name .
* @return a rows ' list .
* @throws Exception
* /
// retrieve the first 100 rows of a table
public void sampleOnTable ( String tableName , String schemaName ,
String pathFile ) throws Exception {
List < Object > resultSet = null ;
AnalysisLogger . getLogger ( ) . debug (
" DatabaseManagement->starting the Sample on table operation " ) ;
AnalysisLogger . getLogger ( ) . debug (
" DatabaseManagement->retrieving the first 100 rows " ) ;
// to retrieve datatype columns of a table
List < String > DataTypeColumns = getDataTypeColumns ( tableName , schemaName ) ;
Sampler sampler = new Sampler ( ) ;
resultSet = sampler . sampleOnTable ( connection , sourceDBSession , DBType ,
tableName , schemaName , DataTypeColumns ) ;
if ( resultSet ! = null ) {
AnalysisLogger . getLogger ( ) . debug (
" DatabaseManagement-> rows retrieved " ) ;
}
2014-07-29 12:09:49 +02:00
// // store table in a file
// AnalysisLogger.getLogger().debug(
// "In DatabaseManagement->store table in a file");
2014-06-04 15:56:35 +02:00
2014-11-28 12:54:54 +01:00
2014-06-04 15:56:35 +02:00
// to recover columns names list
List < String > listColumns = sampler . getListColumns ( ) ;
for ( int i = 0 ; i < listColumns . size ( ) ; i + + ) {
if ( i ! = listColumns . size ( ) - 1 ) {
header = header + listColumns . get ( i ) + " , " ;
} else {
header = header + listColumns . get ( i ) ;
}
}
2014-11-28 12:54:54 +01:00
//
// // // store table in a file
// // String FileName = pathFile + "SampleResult.csv";
// // // write the result in the file and in the map
// // AnalysisLogger.getLogger().debug(
// // "In DatabaseManagement->writing the result in the file: "
// // + FileName);
// // file = new File(FileName);
// // out = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(
// // file), "UTF-8"));
// // writeTableIntoFile(resultSet, DataTypeColumns);
//
// // build the Map of Result
// buildMapResult(resultSet, DataTypeColumns);
// store table in a file and build the result Map
String FileName = pathFile + " Sampling_ " + UUID . randomUUID ( ) + " .csv " ;
// write the result in the file and in the map
AnalysisLogger . getLogger ( ) . debug (
" In DatabaseManagement-> writing the result in the file: "
+ FileName ) ;
try {
file = new File ( FileName ) ;
out = new BufferedWriter ( new OutputStreamWriter ( new FileOutputStream (
file ) , " UTF-8 " ) ) ;
writeSamplingResultIntoFile ( resultSet , DataTypeColumns ) ;
2019-11-27 16:58:52 +01:00
} catch ( Throwable e ) {
throw new Exception ( e . getLocalizedMessage ( ) ) ;
2014-11-28 12:54:54 +01:00
} finally {
//close the file
if ( out ! = null ) {
out . close ( ) ;
out = null ;
AnalysisLogger . getLogger ( ) . debug (
" In DatabaseManagement-> File closed " ) ;
}
}
}
2014-06-04 15:56:35 +02:00
/ * *
* Retrieve 100 rows of a table in a random manner .
*
* @param tableName
* : the table ' s name
* @param schemaName
* : the schema ' s name of the database postgres . For database
* mysql this parameter is the database name .
* @return a rows ' list .
* @throws Exception
* /
// to retrieve 100 rows of a table in a random manner
public void randomSampleOnTable ( String tableName , String schemaName ,
String pathFile ) throws Exception {
List < Object > resultSet = null ;
AnalysisLogger
. getLogger ( )
. debug ( " DatabaseManagement->starting the Random Sample on table operation " ) ;
AnalysisLogger . getLogger ( ) . debug (
" DatabaseManagement->retrieving 100 rows " ) ;
// to retrieve datatype columns of a table
List < String > DataTypeColumns = getDataTypeColumns ( tableName , schemaName ) ;
if ( estimatedRows = = 0 ) {
2014-07-23 17:11:58 +02:00
estimatedRows = getNumberOfRows ( tableName , schemaName ) ;
2014-06-04 15:56:35 +02:00
}
Sampler sampler = new Sampler ( ) ;
resultSet = sampler . randomSampleOnTable ( connection , sourceDBSession ,
DBType , tableName , schemaName , estimatedRows , DataTypeColumns ) ;
if ( resultSet ! = null ) {
AnalysisLogger . getLogger ( ) . debug (
" DatabaseManagement-> rows retrieved " ) ;
}
2014-07-29 12:09:49 +02:00
// // store table in a file
// AnalysisLogger.getLogger().debug(
// "In DatabaseManagement->store table in a file");
2014-06-04 15:56:35 +02:00
2014-11-28 12:54:54 +01:00
//TODO ** COMMENTED
2014-06-04 15:56:35 +02:00
// to recover columns names list
List < String > listColumns = sampler . getListColumns ( ) ;
2014-11-28 12:54:54 +01:00
//
// // //print check
// // for (int i = 0; i < listColumns.size(); i++) {
// // AnalysisLogger.getLogger()
// // .debug("In DatabaseManagement->listcolumns: "
// // + listColumns.get(i));
// // }
// // String header = "";
//
2014-06-04 15:56:35 +02:00
for ( int i = 0 ; i < listColumns . size ( ) ; i + + ) {
if ( i ! = listColumns . size ( ) - 1 ) {
header = header + listColumns . get ( i ) + " , " ;
} else {
header = header + listColumns . get ( i ) ;
}
}
2014-07-29 12:09:49 +02:00
// // store table in a file
// String FileName = pathFile + "SampleResult.csv";
// // write the result in the file and in the map
// AnalysisLogger.getLogger().debug(
// "In DatabaseManagement->writing the result in the file: "
// + FileName);
// file = new File(FileName);
// out = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(
// file), "UTF-8"));
// writeTableIntoFile(resultSet, DataTypeColumns);
// build the Map of Result
2014-11-28 12:54:54 +01:00
// buildMapResult(resultSet, DataTypeColumns);
// store table in a file and build the result Map
String FileName = pathFile + " RandomSampling_ " + UUID . randomUUID ( ) + " .csv " ;
// write the result in the file and in the map
AnalysisLogger . getLogger ( ) . debug (
" In DatabaseManagement-> writing the result in the file: "
+ FileName ) ;
try {
file = new File ( FileName ) ;
out = new BufferedWriter ( new OutputStreamWriter ( new FileOutputStream (
file ) , " UTF-8 " ) ) ;
writeSamplingResultIntoFile ( resultSet , DataTypeColumns ) ;
2019-11-27 16:58:52 +01:00
} catch ( Throwable e ) {
throw new Exception ( e . getLocalizedMessage ( ) ) ;
2014-11-28 12:54:54 +01:00
} finally {
//close the file
if ( out ! = null ) {
out . close ( ) ;
out = null ;
AnalysisLogger . getLogger ( ) . debug (
" In DatabaseManagement-> File closed " ) ;
}
}
2014-06-04 15:56:35 +02:00
}
2014-07-29 12:09:49 +02:00
// build the map of results
private void buildMapResult ( List < Object > result ,
List < String > DataTypeColumns ) throws Exception {
// to get columns names and result
// to recover columns names
if ( header . equals ( " " ) ) {
ArrayList < String > listKeys = new ArrayList < String > (
( ( LinkedHashMap < String , Object > ) ( result . get ( 0 ) ) ) . keySet ( ) ) ;
for ( int i = 0 ; i < listKeys . size ( ) ; i + + ) {
if ( i ! = listKeys . size ( ) - 1 ) {
header = header + listKeys . get ( i ) + " , " ;
} else {
header = header + listKeys . get ( i ) ;
}
}
}
2014-09-02 15:14:58 +02:00
// // print check
// AnalysisLogger.getLogger().debug(
// "DatabaseManagement->HEADERS: " + header);
2014-07-29 12:09:49 +02:00
// add headers
mapResult . put ( " HEADERS " , header ) ;
// //print check values
// AnalysisLogger.getLogger().debug(
// "DatabaseManagement->columns names: " + listKeys);
if ( result ! = null & & result . size ( ) ! = 0 ) {
2014-11-19 15:38:18 +01:00
2014-07-29 12:09:49 +02:00
// // write operation in the file
for ( int i = 0 ; i < result . size ( ) ; i + + ) {
String RowString = " " ;
Object element = result . get ( i ) ;
// arraylist in which each element is a row result
ArrayList < Object > listvalues = new ArrayList < Object > (
( ( LinkedHashMap < String , Object > ) element ) . values ( ) ) ;
2014-09-02 15:14:58 +02:00
// // print check
2014-08-28 15:57:57 +02:00
// AnalysisLogger.getLogger().debug(
// "DatabaseManagement->values: " + listvalues);
2014-07-29 12:09:49 +02:00
// each row could have several column values
Object [ ] row = listvalues . toArray ( ) ;
if ( row . length > = 1 ) {
for ( int j = 0 ; j < row . length ; j + + ) {
if ( row [ j ] = = null ) {
row [ j ] = " " ;
}
// to parse the obtained results in order to align
// number
// values with those of postgres
String original = row [ j ] . toString ( ) ;
// // check value
// AnalysisLogger.getLogger().debug(
// "In DatabaseManagement->original value: "
// + original);
String parsed = " " + row [ j ] ;
if ( original ! = " " ) {
// convert database datatypes to Java datatypes
if ( DataTypeColumns = = null
| | DataTypeColumns . size ( ) = = 0 )
parsed = convertToJavaType ( row [ j ] . getClass ( )
. getName ( ) , parsed ) ;
else
parsed = convertToJavaType (
DataTypeColumns . get ( j ) , parsed ) ;
}
// // check value
// AnalysisLogger.getLogger().debug(
// "In DatabaseManagement->row: " + (i + 1)
// + " column: " + (j + 1) + " value= "
// + parsed);
// write in a file
if ( j ! = row . length - 1 ) {
// System.out.println("write column : " + j);
// RowString = RowString + parsed + " ";
if ( j = = 0 ) {
RowString = parsed ;
} else {
RowString = RowString + " , " + parsed ;
}
}
if ( j = = row . length - 1 ) {
// to add a row to the map
if ( row . length = = 1 ) {
RowString = parsed ;
} else {
RowString = RowString + " , " + parsed ;
}
// to add a row to the map
// RowString = RowString + "," + parsed;
// mapSampleTableResult.put(String.valueOf(i),
// RowString);
// check value row
2014-07-31 11:02:47 +02:00
// AnalysisLogger.getLogger().debug(
// "writing the value: " + RowString + " key: "
// + String.valueOf(i));
2014-09-02 15:14:58 +02:00
// AnalysisLogger.getLogger().debug(
// "row: " + RowString);
2014-07-29 12:09:49 +02:00
// mapResult.put(Integer.valueOf(i), RowString);
mapResult . put ( String . valueOf ( i ) , RowString ) ;
2014-11-19 15:38:18 +01:00
2014-07-29 12:09:49 +02:00
}
}
}
// else if (result.size() == 1) {
//
// // Object RowElement = (Object) result.get(0);
//
// if (row[0] == null) {
// row[0] = "";
// }
//
// // to parse the obtained results in order to align
// // number
// // values with those of postgres
// String original = row[0].toString();
//
// // // check value
// // AnalysisLogger.getLogger().debug(
// // "In DatabaseManagement->original value: "
// // + original);
//
// String parsed = "" + row[0];
//
// if (original != "") {
// // convert database datatypes to Java datatypes
// if (DataTypeColumns == null
// || DataTypeColumns.size() == 0)
// parsed = convertToJavaType(row[0].getClass()
// .getName(), parsed);
// else
// parsed = convertToJavaType(DataTypeColumns.get(0),
// parsed);
// }
//
// out.write(row[0].toString());
// out.newLine();
//
// // to add a row to the map
// mapResult.put(String.valueOf(i), row[0].toString());
//
// }
}
}
2014-11-19 15:38:18 +01:00
//print check
// AnalysisLogger.getLogger().debug(
// "mapResult size with header: " + mapResult.size());
2014-07-29 12:09:49 +02:00
}
2014-06-04 15:56:35 +02:00
2014-11-19 15:38:18 +01:00
// write the table result in the file and build the map of results
2014-11-28 12:54:54 +01:00
private void writeSubmitResultIntoFile ( List < Object > result ,
2014-11-19 15:38:18 +01:00
List < String > DataTypeColumns ) throws Exception {
// // file that will contain result
// BufferedWriter out;
// // String fileName;
// // fileName = "./cfg/" + "table.txt";
// // fileName = "./files/" + "table.txt";
// // fileName =
// //
// "/home/loredana/workspace/DatabasesResourcesManagerAlgorithms/cfg/"
// // + "SampleOnTable.txt";
// // fileName = "./files/" + "SmartSampleOnTable.txt";
// // File file = new File(fileName);
// file = new File(FileName);
// out = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(
// file), "UTF-8"));
try {
// to get columns names and result
// write headers in the file
// to recover columns names
if ( header . equals ( " " ) ) {
ArrayList < String > listKeys = new ArrayList < String > (
( ( LinkedHashMap < String , Object > ) ( result . get ( 0 ) ) ) . keySet ( ) ) ;
for ( int i = 0 ; i < listKeys . size ( ) ; i + + ) {
if ( i ! = listKeys . size ( ) - 1 ) {
header = header + listKeys . get ( i ) + " , " ;
} else {
header = header + listKeys . get ( i ) ;
}
}
}
// // print check
// AnalysisLogger.getLogger().debug(
// "DatabaseManagement->HEADERS: " + header);
out . write ( header ) ;
out . newLine ( ) ;
mapResult . put ( " HEADERS " , header ) ;
// //print check values
// AnalysisLogger.getLogger().debug(
// "DatabaseManagement->columns names: " + listKeys);
if ( result ! = null & & result . size ( ) ! = 0 ) {
// // write operation in the file
for ( int i = 0 ; i < result . size ( ) ; i + + ) {
String RowString = " " ;
Object element = result . get ( i ) ;
// arraylist in which each element is a row result
ArrayList < Object > listvalues = new ArrayList < Object > (
( ( LinkedHashMap < String , Object > ) element ) . values ( ) ) ;
// // print check
// AnalysisLogger.getLogger().debug(
// "DatabaseManagement->values: " + listvalues);
// each row could have several column values
Object [ ] row = listvalues . toArray ( ) ;
if ( row . length > = 1 ) {
for ( int j = 0 ; j < row . length ; j + + ) {
if ( row [ j ] = = null ) {
row [ j ] = " " ;
}
// to parse the obtained results in order to align
// number
// values with those of postgres
String original = row [ j ] . toString ( ) ;
// // check value
// AnalysisLogger.getLogger().debug(
// "In DatabaseManagement->original value: "
// + original);
String parsed = " " + row [ j ] ;
2014-12-04 17:30:22 +01:00
//remove some special character using regular expressions
String regex1 = " [ \" '`] " ;
String regex2 = " [ \\ t \\ n \\ r \\ f \\ v] " ;
parsed = parsed . replaceAll ( regex1 , " " ) . replaceAll ( regex2 , " " ) ;
2014-11-19 15:38:18 +01:00
if ( original ! = " " ) {
// convert database datatypes to Java datatypes
if ( DataTypeColumns = = null
| | DataTypeColumns . size ( ) = = 0 )
parsed = convertToJavaType ( row [ j ] . getClass ( )
. getName ( ) , parsed ) ;
else
parsed = convertToJavaType (
DataTypeColumns . get ( j ) , parsed ) ;
}
2014-11-28 12:54:54 +01:00
// // check value
// AnalysisLogger.getLogger().debug(
// "In DatabaseManagement->row: " + (i + 1)
// + " column: " + (j + 1) + " value= "
// + parsed);
//write the whole column value in a file but a truncated value in a map
//to truncate value to 255 characters if it exceeds 255 characters
String truncVal = parsed ;
if ( truncVal . length ( ) > 255 ) {
truncVal = truncVal . substring ( 0 , 255 ) ;
// System.out.println("val truncated: " + valConverted);
// System.out.println("elem geometry truncated");
//add quote if it has been removed with truncation
if ( ( truncVal . startsWith ( " \" " ) ) & & ( ! truncVal . endsWith ( " \" " ) ) ) {
truncVal = truncVal + " \" " ;
// System.out.println("adding quote: " + valConverted);
}
}
2014-11-19 15:38:18 +01:00
// write in a file
if ( j ! = row . length - 1 ) {
// out.write("\"" + parsed + "\"");
// out.write(",");
out . write ( parsed ) ;
out . write ( " , " ) ;
// System.out.println("write column : " + j);
// RowString = RowString + parsed + " ";
if ( j = = 0 ) {
2014-11-28 12:54:54 +01:00
RowString = truncVal ;
2014-11-19 15:38:18 +01:00
} else {
2014-11-28 12:54:54 +01:00
RowString = RowString + " , " + truncVal ;
2014-11-19 15:38:18 +01:00
}
}
if ( j = = row . length - 1 ) {
// out.write("\"" + parsed + "\"");
// out.newLine();
out . write ( parsed ) ;
out . newLine ( ) ;
// to add a row to the map
if ( row . length = = 1 ) {
2014-11-28 12:54:54 +01:00
RowString = truncVal ;
2014-11-19 15:38:18 +01:00
} else {
2014-11-28 12:54:54 +01:00
RowString = RowString + " , " + truncVal ;
2014-11-19 15:38:18 +01:00
}
// to add a row to the map
// RowString = RowString + "," + parsed;
// mapSampleTableResult.put(String.valueOf(i),
// RowString);
2014-11-28 12:54:54 +01:00
// // check value row
// AnalysisLogger.getLogger().debug(
// "writing the value: " + RowString + " key: "
// + String.valueOf(i));
2014-11-19 15:38:18 +01:00
// mapResult.put(Integer.valueOf(i), RowString);
//add in the map only the first 1000 rows if the result list size is greater than 1000
if ( result . size ( ) > 1000 ) {
if ( i < 1000 ) {
mapResult . put ( String . valueOf ( i ) , RowString ) ;
}
} else {
mapResult . put ( String . valueOf ( i ) , RowString ) ;
}
}
}
}
// else if (result.size() == 1) {
//
// // Object RowElement = (Object) result.get(0);
//
// if (row[0] == null) {
// row[0] = "";
// }
//
// // to parse the obtained results in order to align
// // number
// // values with those of postgres
// String original = row[0].toString();
//
// // // check value
// // AnalysisLogger.getLogger().debug(
// // "In DatabaseManagement->original value: "
// // + original);
//
// String parsed = "" + row[0];
//
// if (original != "") {
// // convert database datatypes to Java datatypes
// if (DataTypeColumns == null
// || DataTypeColumns.size() == 0)
// parsed = convertToJavaType(row[0].getClass()
// .getName(), parsed);
// else
// parsed = convertToJavaType(DataTypeColumns.get(0),
// parsed);
// }
//
// out.write(row[0].toString());
// out.newLine();
//
// // to add a row to the map
// mapResult.put(String.valueOf(i), row[0].toString());
//
// }
}
}
AnalysisLogger . getLogger ( ) . debug (
" In DatabaseManagement-> map size without header: " + ( mapResult . size ( ) - 1 ) ) ;
AnalysisLogger . getLogger ( ) . debug (
" In DatabaseManagement-> Writing File and Result Map creation operations terminated " ) ;
2019-11-27 16:58:52 +01:00
} catch ( Throwable e ) {
throw new Exception ( e . getLocalizedMessage ( ) ) ;
2014-11-19 15:38:18 +01:00
} finally {
// close the file
if ( out ! = null ) {
out . close ( ) ;
out = null ;
AnalysisLogger . getLogger ( ) . debug (
" In DatabaseManagement-> File closed " ) ;
}
}
}
2014-11-28 12:54:54 +01:00
// write the table result in the file and build the map of results
private void writeSamplingResultIntoFile ( List < Object > result ,
List < String > DataTypeColumns ) throws Exception {
// // file that will contain result
// BufferedWriter out;
// // String fileName;
// // fileName = "./cfg/" + "table.txt";
// // fileName = "./files/" + "table.txt";
// // fileName =
// //
// "/home/loredana/workspace/DatabasesResourcesManagerAlgorithms/cfg/"
// // + "SampleOnTable.txt";
// // fileName = "./files/" + "SmartSampleOnTable.txt";
// // File file = new File(fileName);
// file = new File(FileName);
// out = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(
// file), "UTF-8"));
try {
// to get columns names and result
// write headers in the file
// to recover columns names
if ( header . equals ( " " ) ) {
ArrayList < String > listKeys = new ArrayList < String > (
( ( LinkedHashMap < String , Object > ) ( result . get ( 0 ) ) ) . keySet ( ) ) ;
for ( int i = 0 ; i < listKeys . size ( ) ; i + + ) {
if ( i ! = listKeys . size ( ) - 1 ) {
header = header + listKeys . get ( i ) + " , " ;
} else {
header = header + listKeys . get ( i ) ;
}
}
}
// // print check
// AnalysisLogger.getLogger().debug(
// "DatabaseManagement->HEADERS: " + header);
out . write ( header ) ;
out . newLine ( ) ;
mapResult . put ( " HEADERS " , header ) ;
// //print check values
// AnalysisLogger.getLogger().debug(
// "DatabaseManagement->columns names: " + listKeys);
if ( result ! = null & & result . size ( ) ! = 0 ) {
// // write operation in the file
for ( int i = 0 ; i < result . size ( ) ; i + + ) {
String RowString = " " ;
Object element = result . get ( i ) ;
// arraylist in which each element is a row result
ArrayList < Object > listvalues = new ArrayList < Object > (
( ( LinkedHashMap < String , Object > ) element ) . values ( ) ) ;
// // print check
// AnalysisLogger.getLogger().debug(
// "DatabaseManagement->values: " + listvalues);
// each row could have several column values
Object [ ] row = listvalues . toArray ( ) ;
if ( row . length > = 1 ) {
for ( int j = 0 ; j < row . length ; j + + ) {
if ( row [ j ] = = null ) {
row [ j ] = " " ;
}
// to parse the obtained results in order to align
// number
// values with those of postgres
String original = row [ j ] . toString ( ) ;
// // check value
// AnalysisLogger.getLogger().debug(
// "In DatabaseManagement->original value: "
// + original);
String parsed = " " + row [ j ] ;
if ( original ! = " " ) {
// convert database datatypes to Java datatypes
if ( DataTypeColumns = = null
| | DataTypeColumns . size ( ) = = 0 )
parsed = convertToJavaType ( row [ j ] . getClass ( )
. getName ( ) , parsed ) ;
else
parsed = convertToJavaType (
DataTypeColumns . get ( j ) , parsed ) ;
}
// // check value
// AnalysisLogger.getLogger().debug(
// "In DatabaseManagement->row: " + (i + 1)
// + " column: " + (j + 1) + " value= "
// + parsed);
//write the whole column value in a file but a truncated value in a map
//to truncate value to 255 characters if it exceeds 255 characters
String truncVal = parsed ;
if ( truncVal . length ( ) > 255 ) {
truncVal = truncVal . substring ( 0 , 255 ) ;
// System.out.println("val truncated: " + valConverted);
// System.out.println("elem geometry truncated");
//add quote if it has been removed with truncation
if ( ( truncVal . startsWith ( " \" " ) ) & & ( ! truncVal . endsWith ( " \" " ) ) ) {
truncVal = truncVal + " \" " ;
// System.out.println("adding quote: " + valConverted);
}
}
// write in a file
if ( j ! = row . length - 1 ) {
// out.write("\"" + parsed + "\"");
// out.write(",");
out . write ( parsed ) ;
out . write ( " , " ) ;
// System.out.println("write column : " + j);
// RowString = RowString + parsed + " ";
if ( j = = 0 ) {
RowString = truncVal ;
} else {
RowString = RowString + " , " + truncVal ;
}
}
if ( j = = row . length - 1 ) {
// out.write("\"" + parsed + "\"");
// out.newLine();
out . write ( parsed ) ;
out . newLine ( ) ;
// to add a row to the map
if ( row . length = = 1 ) {
RowString = truncVal ;
} else {
RowString = RowString + " , " + truncVal ;
}
// to add a row to the map
// RowString = RowString + "," + parsed;
// mapSampleTableResult.put(String.valueOf(i),
// RowString);
// // check value row
// AnalysisLogger.getLogger().debug(
// "writing the value: " + RowString + " key: "
// + String.valueOf(i));
// mapResult.put(Integer.valueOf(i), RowString);
//add in the map only the first 1000 rows if the result list size is greater than 1000
// if (result.size()>1000){
// if(i<1000){
// mapResult.put(String.valueOf(i), RowString);
// }
//
// }else{
// mapResult.put(String.valueOf(i), RowString);
// }
//add row in a map
mapResult . put ( String . valueOf ( i ) , RowString ) ;
}
}
}
// else if (result.size() == 1) {
//
// // Object RowElement = (Object) result.get(0);
//
// if (row[0] == null) {
// row[0] = "";
// }
//
// // to parse the obtained results in order to align
// // number
// // values with those of postgres
// String original = row[0].toString();
//
// // // check value
// // AnalysisLogger.getLogger().debug(
// // "In DatabaseManagement->original value: "
// // + original);
//
// String parsed = "" + row[0];
//
// if (original != "") {
// // convert database datatypes to Java datatypes
// if (DataTypeColumns == null
// || DataTypeColumns.size() == 0)
// parsed = convertToJavaType(row[0].getClass()
// .getName(), parsed);
// else
// parsed = convertToJavaType(DataTypeColumns.get(0),
// parsed);
// }
//
// out.write(row[0].toString());
// out.newLine();
//
// // to add a row to the map
// mapResult.put(String.valueOf(i), row[0].toString());
//
// }
}
}
AnalysisLogger . getLogger ( ) . debug (
" In DatabaseManagement-> map size without header: " + ( mapResult . size ( ) - 1 ) ) ;
AnalysisLogger . getLogger ( ) . debug (
" In DatabaseManagement-> Writing File and Result Map creation operations terminated " ) ;
2019-11-27 16:58:52 +01:00
} catch ( Throwable e ) {
throw new Exception ( e . getLocalizedMessage ( ) ) ;
2014-11-28 12:54:54 +01:00
} finally {
// close the file
if ( out ! = null ) {
out . close ( ) ;
out = null ;
AnalysisLogger . getLogger ( ) . debug (
" In DatabaseManagement-> File closed " ) ;
}
}
}
2014-06-04 15:56:35 +02:00
// to retrieve datatype columns of a table
private List < String > getDataTypeColumns ( String tableName , String schemaName )
throws Exception {
AnalysisLogger . getLogger ( ) . debug (
" In DatabaseManagement->retrieving data type columns " ) ;
String query ;
List < Object > resultDataTypeColumns = null ;
List < String > DataTypeColumns = new ArrayList < String > ( ) ;
// query to get data type columns
if ( DBType . equals ( POSTGRES ) ) {
query = String . format ( queryForDataTypeColumnsPostgres , tableName ,
schemaName ) ;
resultDataTypeColumns = connection . executeQuery ( query ,
sourceDBSession ) ;
if ( resultDataTypeColumns ! = null ) {
for ( int i = 0 ; i < resultDataTypeColumns . size ( ) ; i + + ) {
// // check data type column
// AnalysisLogger.getLogger().debug(
// "In DatabaseManagement->column: " + i
// + " data type: "
// + (String) resultDataTypeColumns.get(i));
Object element = resultDataTypeColumns . get ( i ) ;
ArrayList < Object > listvalues = new ArrayList < Object > (
( ( LinkedHashMap < String , Object > ) element ) . values ( ) ) ;
// //print check
// AnalysisLogger.getLogger().debug(
// "DatabaseManagement->datatype values: "
// + listvalues);
2014-11-28 12:54:54 +01:00
//to manage USER-DEFINED types
if ( listvalues . get ( 0 ) . toString ( ) . equals ( " USER-DEFINED " ) ) {
DataTypeColumns . add ( i , ( String ) listvalues . get ( 1 ) ) ;
} else {
2014-06-04 15:56:35 +02:00
DataTypeColumns . add ( i , ( String ) listvalues . get ( 0 ) ) ;
2014-11-28 12:54:54 +01:00
}
2014-06-04 15:56:35 +02:00
}
}
}
if ( DBType . equals ( MYSQL ) ) {
query = String . format ( queryForDataTypeColumnsMysql , tableName ,
schemaName ) ;
// System.out.println("query: " + query);
resultDataTypeColumns = connection . executeQuery ( query ,
sourceDBSession ) ;
if ( resultDataTypeColumns ! = null ) {
for ( int i = 0 ; i < resultDataTypeColumns . size ( ) ; i + + ) {
Object element = resultDataTypeColumns . get ( i ) ;
ArrayList < Object > listvalues = new ArrayList < Object > (
( ( LinkedHashMap < String , Object > ) element ) . values ( ) ) ;
// //print check
// AnalysisLogger.getLogger().debug("VALUES:" + listvalues);
// //print check
// AnalysisLogger.getLogger().debug(
// "In DatabaseManagement->column: " + i
// + " data type: " + listvalues);
// to add the data types columns
// DataTypeColumns.add(i,
// (String) resultDataTypeColumns.get(i));
DataTypeColumns . add ( i , ( String ) listvalues . get ( 0 ) ) ;
}
}
}
return DataTypeColumns ;
}
private List < String > getColumnNamesTable ( String tableName , String schemaName )
throws Exception {
// List<String> ColumnNames = new ArrayList<String>();
AnalysisLogger . getLogger ( ) . debug (
" DatabaseManagement->retrieving column names " ) ;
// preparing the query to get columns' names
String queryColumns = null ;
// build the query for database postgres. The parameter "schemaName" is
// the schema name.
if ( DBType . equals ( POSTGRES ) ) {
queryColumns = String . format ( queryForColumnsPostgres , tableName ,
schemaName ) ;
}
// build the query for database mysql. The parameter "schemaName" is the
// database name.
if ( DBType . equals ( MYSQL ) ) {
queryColumns = String . format ( queryForColumnsMysql , tableName ,
schemaName ) ;
}
List < Object > columnsSet = null ;
// List<String> listColumnNames = null;
columnsSet = connection . executeQuery ( queryColumns , sourceDBSession ) ;
AnalysisLogger . getLogger ( ) . debug (
" DatabaseManagement->query submitted successfully: "
+ queryColumns ) ;
if ( columnsSet ! = null ) {
listColumnNamesTable = new ArrayList < String > ( ) ;
2014-07-21 12:57:49 +02:00
2014-06-05 11:03:12 +02:00
for ( int i = 0 ; i < columnsSet . size ( ) ; i + + ) {
2014-07-21 12:57:49 +02:00
2014-06-05 11:03:12 +02:00
Object element = columnsSet . get ( i ) ;
2014-06-04 15:56:35 +02:00
2014-07-21 12:57:49 +02:00
ArrayList < Object > listvalues = new ArrayList < Object > (
( ( LinkedHashMap < String , Object > ) element ) . values ( ) ) ;
2014-06-04 15:56:35 +02:00
2014-07-21 12:57:49 +02:00
for ( int j = 0 ; j < listvalues . size ( ) ; j + + ) {
listColumnNamesTable . add ( listvalues . get ( j ) . toString ( ) ) ;
// AnalysisLogger.getLogger().debug(
// "DatabaseManagement->" + listvalues.get(j).toString());
}
2014-06-04 15:56:35 +02:00
}
2014-06-05 11:03:12 +02:00
2014-07-21 12:57:49 +02:00
// for (Object column : columnsSet) {
//
// AnalysisLogger.getLogger().debug(
// "DatabaseManagement->column name: " + column);
//
// listColumnNamesTable.add((String) column);
// }
2014-06-04 15:56:35 +02:00
}
return listColumnNamesTable ;
}
// to retrieve the column names of a table
public List < String > getListColumnNamesTable ( ) {
return listColumnNamesTable ;
}
// to return the map which contains the rows that constitute the table
// result
public HashMap < String , String > getMapSampleTableResult ( ) {
// return mapSampleTableResult;
return mapResult ;
}
2014-11-28 12:54:54 +01:00
// to return the file in which the result (originated from a submit
// query or sampling operations) is stored
public File getFile ( ) {
2014-11-19 15:38:18 +01:00
// return fileQueryResult;
return file ;
}
2014-07-29 12:09:49 +02:00
// // to return the file in which the table result (originated from a submit
// // query and sample operations) is stored
// public File getFileSampleTableResult() {
//
// // return fileSample;
// return file;
//
// }
2014-06-04 15:56:35 +02:00
// to return the map which contains the rows of the query result
public HashMap < String , String > getMapQueryResult ( ) {
// return mapQueryResult;
return mapResult ;
}
2014-12-05 16:01:07 +01:00
//get total rows for a result of a submit query operation
public int getSubmitQueryTotalRows ( ) {
return submitQueryTotalRows ;
}
2014-06-04 15:56:35 +02:00
// method that allows to translate the query in another language
public String smartCorrectionOnQuery ( String OriginalQuery , int dialect )
throws ParseException , ConvertException {
// //print check
// AnalysisLogger.getLogger().debug(
// "In DatabaseManagement-> smartCorrectionOnQuery, query and dialect: "
// + OriginalQuery + " " + dialect);
String queryCorrected = " " ;
// to translate the query in another language
SqlDialectConverter obj = new SqlDialectConverter ( OriginalQuery ) ;
queryCorrected = obj . convert ( dialect ) ;
AnalysisLogger . getLogger ( ) . debug (
" In DatabaseManagement-> query converted: " + queryCorrected ) ;
return queryCorrected ;
}
}