database-resource-manager/src/main/java/org/gcube/dataanalysis/databases/utils/DatabaseManagement.java

1851 lines
46 KiB
Java

package org.gcube.dataanalysis.databases.utils;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.databases.converter.SqlDialectConverter;
import org.gcube.dataanalysis.databases.sampler.Sampler;
import org.gcube.dataanalysis.databases.structure.AbstractTableStructure;
import org.gcube.dataanalysis.databases.structure.MySQLTableStructure;
import org.gcube.dataanalysis.databases.structure.PostgresTableStructure;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.hibernate.SessionFactory;
import com.adventnet.swissqlapi.sql.exception.ConvertException;
import com.adventnet.swissqlapi.sql.parser.ParseException;
/** Class that allows to manage a database offering several functionalities */
public class DatabaseManagement {
// AbstractTableStructure crossTableStructure;
// private List<String> tablesname = new ArrayList<String>();
private List<String> tablesname = null;
// private String configPath = "./cfg/";
private String configPath = "";
private String sourceSchemaName = null;
private SessionFactory sourceDBSession;
private String DBType;
private AbstractTableStructure crossTableStructure;
// private DBAdapter typesMap;
private DatabaseOperations op = new DatabaseOperations();
// private String destinationDBType;
// private String sourceDBType;
private MySQLTableStructure mysqlobj;
private ConnectionManager connection;
// private Integer estimatedRows = null;
private long estimatedRows = 0;
// file in which the result is stored when sample and query submit
// operations are executed
private File file = null;
// file that will contain result
private BufferedWriter out;
// file in which the table result is stored when sample
// operations are executed
// private File fileSample = null;
// map which contains the rows of the sample and query submit operations
// private LinkedHashMap<Integer, String> mapResult = new
// LinkedHashMap<Integer, String>();
// private HashMap<Integer, String> mapResult = new HashMap<Integer,
// String>();
private HashMap<String, String> mapResult = new HashMap<String, String>();
// map which contains the rows that constitute the table result
// private LinkedHashMap<String, String> mapSampleTableResult = new
// LinkedHashMap<String, String>();
// file in which the result is stored when the query is executed
// private File fileQueryResult = null;
// map that contains the rows generated by the query
// private LinkedHashMap<String, String> mapQueryResult = new
// LinkedHashMap<String, String>();
private static final String MYSQL = "MySQL";
private static final String POSTGRES = "Postgres";
private static final String selectTablesQuery = "SELECT distinct table_name FROM information_schema.COLUMNS where table_schema='%1$s'";
// private static final String listSchemaNameQuery =
// "select schema_name from information_schema.schemata where schema_name <> 'information_schema' and schema_name !~ E'^pg_'";
private static final String listSchemaNameQuery = "select nspname from pg_namespace where nspname <> 'information_schema' and nspname !~ E'^pg_'";
// query to retrieve datatype columns of a database table
private static final String queryForDataTypeColumnsPostgres = "SELECT data_type FROM information_schema.COLUMNS WHERE table_name ='%1$s' and table_schema='%2$s'";
private static final String queryForDataTypeColumnsMysql = "SELECT data_type FROM information_schema.COLUMNS WHERE table_name ='%1$s' and table_schema='%2$s'";
// query to get columns' name
private static final String queryForColumnsPostgres = "SELECT column_name FROM information_schema.COLUMNS WHERE table_name ='%1$s' and table_schema='%2$s'";
private static final String queryForColumnsMysql = "SELECT column_name FROM information_schema.COLUMNS WHERE table_name ='%1$s' and table_schema='%2$s'";
// Header Table that contains the column names of a table
private String header = "";
// list that contains the columns names of a table
List<String> listColumnNamesTable = null;
// variable to set the language for translation
public static final int POSTGRESQLDialect = 4;
public static final int MYSQLDialect = 5;
// Constructor
public DatabaseManagement(String configPath) {
this.configPath = configPath;
connection = new ConnectionManager();
}
// for the exact parsing of the obtained results with the values of
// a database, a check is needed against the data type columns
// to convert from postgres and mysql datatypes to Java datatypes
private String convertToJavaType(String type, String val) throws Exception {
type = type.toLowerCase();
String valConverted = val;
try {
// parse to Long
valConverted = "" + Long.parseLong(valConverted);
// AnalysisLogger.getLogger()
// .debug("In DatabaseManagement->parsed value Long: "
// + valConverted);
}
catch (Exception e) {
try {
// check to fix a problem for the database.
// Indeed if the string is
// an hexadecimal some strings as (6F or 6D that
// are double and float values) are
// casted to Double and the value returned is
// 6.0 altering the original value. If the string is
// an hexadecimal the cast is not performed.
if ((type != null)) {
// // check data type value
// AnalysisLogger.getLogger().debug(
// "In DatabaseManagement->check data type value: "
// + type);
if ((type.contains("decimal")) || (type.contains("double"))
|| (type.contains("numeric"))
|| (type.contains("float"))) {
valConverted = "" + Double.parseDouble(valConverted);
// // check the parsed value
// AnalysisLogger.getLogger().debug(
// "In DatabaseManagement->parsed value Double: "
// + valConverted);
}
if (type.contains("real")) {
valConverted = "" + Float.parseFloat(valConverted);
// // check the parsed value
// AnalysisLogger.getLogger().debug(
// "In DatabaseManagement->parsed value Float: "
// + valConverted);
}
}
} catch (Exception excp) {
throw excp;
}
}
return valConverted;
}
// create the database's connection without using the configuration file but
// using the data input.
// Note that in this case the variable sourceSchemaName for database mysql
// is set to the database name while for database postgres will be set to
// the schema's name in the createConnection(String cfgDir, String
// SourceFile) method
public SessionFactory createConnection(String DatabaseUserName,
String DatabasePassword, String DatabaseDriver,
String DatabaseDialect, String DatabaseURL, String DatabaseName)
throws IOException {
// ConnectionManager connection = new ConnectionManager();
AlgorithmConfiguration config = new AlgorithmConfiguration();
AnalysisLogger.getLogger().debug(
"In DatabaseManagement->DatabaseName: " + DatabaseName);
AnalysisLogger.getLogger().debug(
"In DatabaseManagement->DatabaseURL: " + DatabaseURL);
AnalysisLogger.getLogger().debug(
"In DatabaseManagement->DatabaseUserName: " + DatabaseUserName);
AnalysisLogger.getLogger().debug(
"In DatabaseManagement->DatabasePassword: " + DatabasePassword);
AnalysisLogger.getLogger().debug(
"In DatabaseManagement->configPath: " + configPath);
AnalysisLogger.getLogger().debug(
"In DatabaseManagement->DatabaseDriver: " + DatabaseDriver);
AnalysisLogger.getLogger().debug(
"In DatabaseManagement->DatabaseDialect: " + DatabaseDialect);
config = connection.setconfiguration(configPath, DatabaseUserName,
DatabasePassword, DatabaseDriver, DatabaseDialect, DatabaseURL,
DatabaseName);
sourceDBSession = connection.initDBConnection(config);
// sourceSchemaName = Name; // for a database mysql the variable
// "sourceschemaname" is the database's name
// (while for the database postgresql is the
// schema's name)
if (DatabaseDriver.toLowerCase().contains("postgres")) {
DBType = POSTGRES;
}
if (DatabaseDriver.toLowerCase().contains("mysql")) {
DBType = MYSQL;
}
return sourceDBSession;
}
// create the database's connection using the configuration file.
// note that for database postgres the variable sourceSchemaName is set to
// the schema's name.
public SessionFactory createConnection(String cfgDir, String SourceFile)
throws Exception {
configPath = cfgDir;
if (!configPath.endsWith("/"))
configPath += "/";
sourceSchemaName = op.getDBSchema(configPath + SourceFile); // the
// variable
// "sourceschemaname"
// is
// retrieved
// through
// this
// method
// that
// recovers
// the value
// by means
// of the
// configuration
// file.
// for a database postgresql it is the schema's name while for a
// database mysql it is the database's name.
sourceDBSession = connection.initDBConnection(configPath + SourceFile);
// destinationDBType = POSTGRES;
// sourceDBType = MYSQL;
//
// // typesMap = new DBAdapter(configPath + "/" + sourceDBType + "2"
// // + destinationDBType + ".properties");
DBType = op.getDBType(); // Recover the database's type by means of the
// configuration file
return sourceDBSession;
}
// close the connection
public void closeConnection() {
sourceDBSession.close();
}
/**
* method that allows to submit a query.
*
* @param schemaName
* : the schema's name of the database postgres. For database
* mysql this parameter is null.
* @return query result
* @throws Exception
*/
// method that allows to submit a query
public List<Object> submitQuery(String query, SessionFactory session,
String pathFile) throws Exception {
List<Object> results = new ArrayList<Object>();
results = connection.executeQuery(query, session);
if (results != null) {
AnalysisLogger.getLogger().debug(
"DatabaseManagement-> Query result retrieved");
}
// store table in a file
AnalysisLogger.getLogger().debug(
"In DatabaseManagement->store table in a file");
// store table in a file
// writeQueryResultIntoFile(results);
// writeSampleTableIntoFile(results, tableName, schemaName);
String FileName = pathFile + "QueryResult.csv";
// List<String> listColumnNames = getColumnNamesTable(tableName,
// schemaName);
// to retrieve datatype columns of a table
List<String> DataTypeColumns = null;
// write the result in the file and in the map
AnalysisLogger.getLogger().debug(
"In DatabaseManagement->writing the result in the file: "
+ FileName);
file = new File(FileName);
out = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(
file), "UTF-8"));
// String header = null;
// writeTableIntoFile(results, DataTypeColumns, header, tableName,
// schemaName,
// FileName);
writeTableIntoFile(results, DataTypeColumns);
return results;
}
/**
* Get the table's names for a database.
*
* @param schemaName
* : the schema's name of the database postgres. For database
* mysql this parameter is null.
* @return List<String> listTables: a list tables' names.
* @throws Exception
*/
public List<String> getTables(String databaseName, String schemaName)
throws Exception {
String query = null;
if (DBType.equals(POSTGRES)) {
query = String.format(selectTablesQuery, schemaName);
AnalysisLogger.getLogger().debug(
"DatabaseManagement->retrieving tables names with query: "
+ query);
sourceSchemaName = schemaName; // a database postgres manages schema
// concept so for every operation on
// the database it is important to
// specify the schema name
}
if (DBType.equals(MYSQL)) {
query = String.format(selectTablesQuery, databaseName);
AnalysisLogger.getLogger().debug(
"DatabaseManagement->retrieving tables names with query: "
+ query);
sourceSchemaName = databaseName; // for database mysql the schema
// name is the database name
// because the database mysql
// does not manage schema
// concept.
}
List<Object> resultSet = connection
.executeQuery(query, sourceDBSession);
if (resultSet != null) {
// AnalysisLogger.getLogger().debug(
// "DatabaseManagement->table's list: ");
AnalysisLogger.getLogger().debug(
"DatabaseManagement->table's list retrieved");
tablesname = new ArrayList<String>();
for (Object result : resultSet) {
tablesname.add((String) result);
// AnalysisLogger.getLogger().debug(
// "DatabaseManagement->" + (String) result);
}
}
return tablesname;
}
/**
* Get the schema's name for the database Postgresql.
*
* @return listSchemas: the list of the schemas' names
* @throws Exception
*/
public List<String> getSchemas() throws Exception {
List<String> list = new ArrayList<String>();
if (DBType.equals(POSTGRES)) {
List<Object> resultSet = connection.executeQuery(
listSchemaNameQuery, sourceDBSession);
if (resultSet != null) {
for (Object result : resultSet) {
list.add((String) result);
}
}
}
return list;
}
/**
* Get the "Create Table" statement.
*
* @param schemaName
* : the schema's name of the database postgres. For database
* mysql this parameter is null.
* @return String: the create statement .
* @throws Exception
*/
// Get the "Create Table" statement
public String getCreateTable(String tablename, String schemaName)
throws Exception {
String createstatement = "";
if (DBType.equals(POSTGRES)) {
crossTableStructure = getSourceTableObject(tablename, schemaName);
String tableBuildQuery = crossTableStructure.buildUpCreateTable();
createstatement = tableBuildQuery;
AnalysisLogger.getLogger().debug(
"DatabaseManagement->'Create Table' statement: "
+ tableBuildQuery);
}
if (DBType.equals(MYSQL)) {
crossTableStructure = getSourceTableObject(tablename, schemaName);
try {
String createtablestatement = mysqlobj.showCreateTable(
connection, sourceDBSession);
createstatement = createtablestatement;
AnalysisLogger.getLogger().debug(
"DatabaseManagement->'Create Table' statement: "
+ createtablestatement);
} catch (Exception e) {
AnalysisLogger.getLogger().debug(
"DatabaseManagement->Exception: " + e.getMessage());
throw e;
}
}
// to recover the column names of the table
getColumnNamesTable(tablename, schemaName);
return createstatement;
}
// Method that creates the table object for a database
private AbstractTableStructure getSourceTableObject(String tableName,
String schemaName) throws Exception {
sourceSchemaName = schemaName;
if (DBType.equals(MYSQL)) {
mysqlobj = new MySQLTableStructure(sourceSchemaName, tableName,
sourceDBSession);
return mysqlobj;
}
else if (DBType.equals(POSTGRES)) {
PostgresTableStructure postobj = new PostgresTableStructure(
sourceSchemaName, tableName, sourceDBSession);
return postobj;
} else {
return null;
}
}
// Method that returns the estimated number of rows
public long getNumberOfRows(String tablename) throws Exception {
long rows;
rows = op.calculateElements(connection, DBType, tablename,
sourceDBSession);
AnalysisLogger.getLogger().debug(
"DatabaseManagement->rows' number calculated: " + rows);
estimatedRows = rows;
return rows;
}
/**
* retrieve 100 rows of a table randomly that have the maximum number of
* columns not null
*
* @param tableName
* : the table's name
* @param schemaName
* : the schema's name of the database postgres. For database
* mysql this parameter is the database name.
* @return a rows' list.
* @throws Exception
*/
// retrieve 100 rows of a table randomly that have the maximum number of
// columns not null
public void smartSampleOnTable(String tableName, String schemaName,
String pathFile) throws Exception {
List<Object> resultSet = null;
AnalysisLogger
.getLogger()
.debug("DatabaseManagement->starting the Smart Sample on table operation");
AnalysisLogger.getLogger().debug(
"DatabaseManagement->retrieving the 100 rows");
if (estimatedRows == 0) {
// estimatedRows = Integer.valueOf(getNumberOfRows(tableName));
estimatedRows = getNumberOfRows(tableName);
}
// to retrieve datatype columns of a table
List<String> DataTypeColumns = getDataTypeColumns(tableName, schemaName);
Sampler sampler = new Sampler();
resultSet = sampler.smartSampleOnTable(connection, sourceDBSession,
DBType, tableName, schemaName, estimatedRows, DataTypeColumns);
if (resultSet != null) {
AnalysisLogger.getLogger().debug(
"DatabaseManagement-> rows retrieved");
}
// store table in a file
AnalysisLogger.getLogger().debug(
"In DatabaseManagement->store table in a file");
// store table in a file
// writeSampleTableIntoFile(resultSet, tableName, schemaName);
String FileName = pathFile + "SampleResult.csv";
// to recover columns names list
List<String> listColumns = sampler.getListColumns();
// String header = "";
// //print check
// AnalysisLogger.getLogger().debug(
// "In DatabaseManagement->list columns size: " +listColumns.size());
// to recover columns names list
for (int i = 0; i < listColumns.size(); i++) {
if (i != listColumns.size() - 1) {
header = header + listColumns.get(i) + ",";
} else {
header = header + listColumns.get(i);
}
}
// //print check
// AnalysisLogger.getLogger().debug(
// "In DatabaseManagement->header: " + header);
// write the result in the file and in the map
AnalysisLogger.getLogger().debug(
"In DatabaseManagement->writing the result in the file: "
+ FileName);
file = new File(FileName);
out = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(
file), "UTF-8"));
// mapResult.put("HEADERS", header);
//
// out.write(header);
// out.newLine();
// writeTableIntoFile(resultSet, DataTypeColumns, header, tableName,
// schemaName,
// FileName);
// writeTableIntoFile(resultSet, DataTypeColumns, tableName,
// schemaName,
// FileName);
writeTableIntoFile(resultSet, DataTypeColumns);
// return resultSet;
}
/**
* Retrieve the first 100 rows of a table.
*
* @param tableName
* : the table's name
* @param schemaName
* : the schema's name of the database postgres. For database
* mysql this parameter is the database name.
* @return a rows' list.
* @throws Exception
*/
// retrieve the first 100 rows of a table
public void sampleOnTable(String tableName, String schemaName,
String pathFile) throws Exception {
List<Object> resultSet = null;
AnalysisLogger.getLogger().debug(
"DatabaseManagement->starting the Sample on table operation");
AnalysisLogger.getLogger().debug(
"DatabaseManagement->retrieving the first 100 rows");
// to retrieve datatype columns of a table
List<String> DataTypeColumns = getDataTypeColumns(tableName, schemaName);
Sampler sampler = new Sampler();
resultSet = sampler.sampleOnTable(connection, sourceDBSession, DBType,
tableName, schemaName, DataTypeColumns);
if (resultSet != null) {
AnalysisLogger.getLogger().debug(
"DatabaseManagement-> rows retrieved");
}
// store table in a file
AnalysisLogger.getLogger().debug(
"In DatabaseManagement->store table in a file");
// store table in a file
// writeSampleTableIntoFile(resultSet, tableName, schemaName);
String FileName = pathFile + "SampleResult.csv";
// to recover columns names list
List<String> listColumns = sampler.getListColumns();
// String header = "";
for (int i = 0; i < listColumns.size(); i++) {
if (i != listColumns.size() - 1) {
header = header + listColumns.get(i) + ", ";
} else {
header = header + listColumns.get(i);
}
}
// write the result in the file and in the map
AnalysisLogger.getLogger().debug(
"In DatabaseManagement->writing the result in the file: "
+ FileName);
file = new File(FileName);
out = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(
file), "UTF-8"));
// mapResult.put("HEADERS", header);
//
// out.write(header);
// out.newLine();
// System.out.println("HEADER:" + header);
// writeTableIntoFile(resultSet, DataTypeColumns, header, tableName,
// schemaName,
// FileName);
// writeTableIntoFile(resultSet, DataTypeColumns, tableName, schemaName,
// FileName);
writeTableIntoFile(resultSet, DataTypeColumns);
// return the first 100 rows
// return resultSet;
}
/**
* Retrieve 100 rows of a table in a random manner.
*
* @param tableName
* : the table's name
* @param schemaName
* : the schema's name of the database postgres. For database
* mysql this parameter is the database name.
* @return a rows' list.
* @throws Exception
*/
// to retrieve 100 rows of a table in a random manner
public void randomSampleOnTable(String tableName, String schemaName,
String pathFile) throws Exception {
List<Object> resultSet = null;
AnalysisLogger
.getLogger()
.debug("DatabaseManagement->starting the Random Sample on table operation");
AnalysisLogger.getLogger().debug(
"DatabaseManagement->retrieving 100 rows");
// to retrieve datatype columns of a table
List<String> DataTypeColumns = getDataTypeColumns(tableName, schemaName);
if (estimatedRows == 0) {
estimatedRows = getNumberOfRows(tableName);
}
Sampler sampler = new Sampler();
resultSet = sampler.randomSampleOnTable(connection, sourceDBSession,
DBType, tableName, schemaName, estimatedRows, DataTypeColumns);
if (resultSet != null) {
AnalysisLogger.getLogger().debug(
"DatabaseManagement-> rows retrieved");
}
// store table in a file
AnalysisLogger.getLogger().debug(
"In DatabaseManagement->store table in a file");
// store table in a file
// writeSampleTableIntoFile(resultSet, tableName, schemaName);
String FileName = pathFile + "SampleResult.csv";
// to recover columns names list
List<String> listColumns = sampler.getListColumns();
// //print check
// for (int i = 0; i < listColumns.size(); i++) {
// AnalysisLogger.getLogger()
// .debug("In DatabaseManagement->listcolumns: "
// + listColumns.get(i));
// }
// String header = "";
for (int i = 0; i < listColumns.size(); i++) {
if (i != listColumns.size() - 1) {
header = header + listColumns.get(i) + ", ";
} else {
header = header + listColumns.get(i);
}
}
// write the result in the file and in the map
AnalysisLogger.getLogger().debug(
"In DatabaseManagement->writing the result in the file: "
+ FileName);
file = new File(FileName);
out = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(
file), "UTF-8"));
// mapResult.put("HEADERS", header);
//
// out.write(header);
// out.newLine();
// writeTableIntoFile(resultSet, DataTypeColumns, header, tableName,
// schemaName,
// FileName);
// writeTableIntoFile(resultSet, DataTypeColumns, tableName, schemaName,
// FileName);
writeTableIntoFile(resultSet, DataTypeColumns);
// return resultSet;
}
// // write the table result in the file and build the map of results
// private void writeTableIntoFile(List<Object> result,
// List<String> DataTypeColumns, String tableName, String schemaName,
// String FileName) throws Exception {
// write the table result in the file and build the map of results
private void writeTableIntoFile(List<Object> result,
List<String> DataTypeColumns) throws Exception {
// // file that will contain result
// BufferedWriter out;
//
// // String fileName;
//
// // fileName = "./cfg/" + "table.txt";
// // fileName = "./files/" + "table.txt";
//
// // fileName =
// //
// "/home/loredana/workspace/DatabasesResourcesManagerAlgorithms/cfg/"
// // + "SampleOnTable.txt";
//
// // fileName = "./files/" + "SmartSampleOnTable.txt";
// // File file = new File(fileName);
//
// AnalysisLogger.getLogger().debug(
// "In DatabaseManagement->writing the result in the file: "
// + FileName);
//
// file = new File(FileName);
//
// // FileWriter fw = new FileWriter(file.getAbsoluteFile());
// // out = new BufferedWriter(fw);
//
// out = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(
// file), "UTF-8"));
//
// // mapResult.put("HEADER", header);
//
// out.write(header);
// out.newLine();
// to get columns names and result
// write headers in the file
// to recover columns names
if (header.equals("")) {
ArrayList<String> listKeys = new ArrayList<String>(
((LinkedHashMap<String, Object>) (result.get(0))).keySet());
for (int i = 0; i < listKeys.size(); i++) {
if (i != listKeys.size() - 1) {
header = header + listKeys.get(i) + ", ";
} else {
header = header + listKeys.get(i);
}
}
}
// // print check
// AnalysisLogger.getLogger().debug(
// "DatabaseManagement->HEADERS: " + header);
out.write(header);
out.newLine();
mapResult.put("HEADERS", header);
// //print check values
// AnalysisLogger.getLogger().debug(
// "DatabaseManagement->columns names: " + listKeys);
if (result != null && result.size() != 0) {
// // write operation in the file
for (int i = 0; i < result.size(); i++) {
String RowString = "";
Object element = result.get(i);
ArrayList<Object> listvalues = new ArrayList<Object>(
((LinkedHashMap<String, Object>) element).values());
// // print check
// AnalysisLogger.getLogger().debug(
// "DatabaseManagement->values: " + listvalues);
Object[] row = listvalues.toArray();
if (row.length > 1) {
for (int j = 0; j < row.length; j++) {
if (row[j] == null) {
row[j] = "";
}
// to parse the obtained results in order to align
// number
// values with those of postgres
String original = row[j].toString();
// // check value
// AnalysisLogger.getLogger().debug(
// "In DatabaseManagement->original value: "
// + original);
String parsed = "" + row[j];
if (original != "") {
// convert database datatypes to Java datatypes
if (DataTypeColumns == null
|| DataTypeColumns.size() == 0)
parsed = convertToJavaType(row[j]
.getClass().getName(), parsed);
else
parsed = convertToJavaType(
DataTypeColumns.get(j), parsed);
}
// // check value
// AnalysisLogger.getLogger().debug(
// "In DatabaseManagement->row: " + (i + 1)
// + " column: " + (j + 1) + " value= "
// + parsed);
// write in a file
if (j != row.length - 1) {
out.write("\"" + parsed + "\"");
out.write(",");
// System.out.println("write column : " + j);
// RowString = RowString + parsed + " ";
if (j == 0) {
RowString = parsed;
} else {
RowString = RowString + "," + parsed;
}
}
if (j == row.length - 1) {
out.write("\"" + parsed + "\"");
out.newLine();
// to add a row to the map
RowString = RowString + "," + parsed;
// mapSampleTableResult.put(String.valueOf(i),
// RowString);
// // check value row
//
// AnalysisLogger.getLogger().debug(
// "writing the value: " + RowString + " key: "
// + String.valueOf(i));
// mapResult.put(Integer.valueOf(i), RowString);
mapResult.put(String.valueOf(i), RowString);
}
}
}else if (result.size()==1){
// Object RowElement = (Object) result.get(0);
if (row[0] == null) {
row[0] = "";
}
// to parse the obtained results in order to align
// number
// values with those of postgres
String original = row[0].toString();
// // check value
// AnalysisLogger.getLogger().debug(
// "In DatabaseManagement->original value: "
// + original);
String parsed = "" + row[0];
if (original != "") {
// convert database datatypes to Java datatypes
if (DataTypeColumns == null
|| DataTypeColumns.size() == 0)
parsed = convertToJavaType(row[0]
.getClass().getName(), parsed);
else
parsed = convertToJavaType(
DataTypeColumns.get(0), parsed);
}
out.write(row[0].toString());
out.newLine();
// to add a row to the map
mapResult.put(String.valueOf(i), row[0].toString());
}
}
}
// close the file
out.close();
}
// to retrieve datatype columns of a table
private List<String> getDataTypeColumns(String tableName, String schemaName)
throws Exception {
AnalysisLogger.getLogger().debug(
"In DatabaseManagement->retrieving data type columns");
String query;
List<Object> resultDataTypeColumns = null;
List<String> DataTypeColumns = new ArrayList<String>();
// query to get data type columns
if (DBType.equals(POSTGRES)) {
query = String.format(queryForDataTypeColumnsPostgres, tableName,
schemaName);
resultDataTypeColumns = connection.executeQuery(query,
sourceDBSession);
if (resultDataTypeColumns != null) {
for (int i = 0; i < resultDataTypeColumns.size(); i++) {
// // check data type column
// AnalysisLogger.getLogger().debug(
// "In DatabaseManagement->column: " + i
// + " data type: "
// + (String) resultDataTypeColumns.get(i));
Object element = resultDataTypeColumns.get(i);
ArrayList<Object> listvalues = new ArrayList<Object>(
((LinkedHashMap<String, Object>) element).values());
// //print check
// AnalysisLogger.getLogger().debug(
// "DatabaseManagement->datatype values: "
// + listvalues);
DataTypeColumns.add(i, (String) listvalues.get(0));
}
}
}
if (DBType.equals(MYSQL)) {
query = String.format(queryForDataTypeColumnsMysql, tableName,
schemaName);
// System.out.println("query: " + query);
resultDataTypeColumns = connection.executeQuery(query,
sourceDBSession);
if (resultDataTypeColumns != null) {
for (int i = 0; i < resultDataTypeColumns.size(); i++) {
Object element = resultDataTypeColumns.get(i);
ArrayList<Object> listvalues = new ArrayList<Object>(
((LinkedHashMap<String, Object>) element).values());
// //print check
// AnalysisLogger.getLogger().debug("VALUES:" + listvalues);
// //print check
// AnalysisLogger.getLogger().debug(
// "In DatabaseManagement->column: " + i
// + " data type: " + listvalues);
// to add the data types columns
// DataTypeColumns.add(i,
// (String) resultDataTypeColumns.get(i));
DataTypeColumns.add(i, (String) listvalues.get(0));
}
}
}
return DataTypeColumns;
}
private List<String> getColumnNamesTable(String tableName, String schemaName)
throws Exception {
// List<String> ColumnNames = new ArrayList<String>();
AnalysisLogger.getLogger().debug(
"DatabaseManagement->retrieving column names");
// preparing the query to get columns' names
String queryColumns = null;
// build the query for database postgres. The parameter "schemaName" is
// the schema name.
if (DBType.equals(POSTGRES)) {
queryColumns = String.format(queryForColumnsPostgres, tableName,
schemaName);
}
// build the query for database mysql. The parameter "schemaName" is the
// database name.
if (DBType.equals(MYSQL)) {
queryColumns = String.format(queryForColumnsMysql, tableName,
schemaName);
}
List<Object> columnsSet = null;
// List<String> listColumnNames = null;
columnsSet = connection.executeQuery(queryColumns, sourceDBSession);
AnalysisLogger.getLogger().debug(
"DatabaseManagement->query submitted successfully: "
+ queryColumns);
if (columnsSet != null) {
listColumnNamesTable = new ArrayList<String>();
for (Object column : columnsSet) {
AnalysisLogger.getLogger().debug(
"DatabaseManagement->column name: " + column);
listColumnNamesTable.add((String) column);
}
}
return listColumnNamesTable;
}
// to retrieve the column names of a table
public List<String> getListColumnNamesTable() {
return listColumnNamesTable;
}
// to return the file in which the table result (originated from a submit
// query and sample operations) is stored
public File getFileSampleTableResult() {
// return fileSample;
return file;
}
// to return the map which contains the rows that constitute the table
// result
public HashMap<String, String> getMapSampleTableResult() {
// return mapSampleTableResult;
return mapResult;
}
// to return the file in which the query result (originated from a submit
// query) is stored
public File getFileQueryResult() {
// return fileQueryResult;
return file;
}
// to return the map which contains the rows of the query result
public HashMap<String, String> getMapQueryResult() {
// return mapQueryResult;
return mapResult;
}
// method that allows to translate the query in another language
public String smartCorrectionOnQuery(String OriginalQuery, int dialect)
throws ParseException, ConvertException {
// //print check
// AnalysisLogger.getLogger().debug(
// "In DatabaseManagement-> smartCorrectionOnQuery, query and dialect: "
// + OriginalQuery + " " + dialect);
String queryCorrected = "";
// to translate the query in another language
SqlDialectConverter obj = new SqlDialectConverter(OriginalQuery);
queryCorrected = obj.convert(dialect);
AnalysisLogger.getLogger().debug(
"In DatabaseManagement-> query converted: " + queryCorrected);
return queryCorrected;
}
// write the table result in the file and build the map of results
// private void writeSampleTableIntoFile(List<Object> result,
// String tableName, String schemaName) throws Exception {
//
// // file that will contain sample result
// BufferedWriter out;
// String fileName;
//
// // fileName = "./cfg/" + "table.txt";
// // fileName = "./files/" + "table.txt";
//
// fileName =
// "/home/loredana/workspace/DatabasesResourcesManagerAlgorithms/cfg/"
// + "SampleOnTable.txt";
//
// // fileName = "./files/" + "SmartSampleOnTable.txt";
// // File file = new File(fileName);
//
// fileSample = new File(fileName);
//
// FileWriter fw = new FileWriter(fileSample.getAbsoluteFile());
// out = new BufferedWriter(fw);
//
// // for the exact parsing of the obtained results with the values of
// // postgres, a check is needed against the data type columns
//
// String query;
// List<Object> resultDataTypeColumns = null;
//
// List<String> DataTypeColumns = new ArrayList<String>();
//
// // query to get data type columns
// if (DBType.equals(POSTGRES)) {
//
// query = String.format(queryForDataTypeColumnsPostgres, tableName,
// schemaName);
//
// resultDataTypeColumns = connection.executeQuery(query,
// sourceDBSession);
//
// if (resultDataTypeColumns != null) {
//
// for (int i = 0; i < resultDataTypeColumns.size(); i++) {
//
// AnalysisLogger.getLogger().debug(
// "In DatabaseManagement->column: " + i
// + " data type: "
// + (String) resultDataTypeColumns.get(i));
//
// // to add the data types columns
// DataTypeColumns.add(i,
// (String) resultDataTypeColumns.get(i));
//
// }
//
// }
//
// }
//
// if (DBType.equals(MYSQL)) {
//
//
//
// query = String.format(queryForDataTypeColumnsMysql, tableName,
// schemaName);
//
// System.out.println("query: " + query);
//
// resultDataTypeColumns = connection.executeQuery(query,
// sourceDBSession);
//
// if (resultDataTypeColumns != null) {
//
// for (int i = 0; i < resultDataTypeColumns.size(); i++) {
//
// AnalysisLogger.getLogger().debug(
// "In DatabaseManagement->column: " + i
// + " data type: "
// + (String) resultDataTypeColumns.get(i));
//
// // to add the data types columns
// DataTypeColumns.add(i,
// (String) resultDataTypeColumns.get(i));
//
// }
//
// }
//
// }
//
// // write operation in the file
// for (int i = 0; i < result.size(); i++) {
//
// String RowString = "";
//
// if (result.get(i).getClass().isArray()) {
//
// Object[] row = (Object[]) result.get(i);
//
// // String RowString = "";
//
// for (int j = 0; j < row.length; j++) {
//
// if (row[j] == null) {
// row[j] = "";
// }
//
//
// // to parse the obtained results in order to align number
// // values with
// // those of postgres
// String original = row[j].toString();
//
// AnalysisLogger.getLogger().debug(
// "In DatabaseManagement->original value: "
// + original);
//
// String parsed = "" + row[j];
//
// if(original!="") {
//
// try {
//
//
// // parse to Long
// parsed = "" + Long.parseLong(original);
// AnalysisLogger.getLogger().debug(
// "In DatabaseManagement->parsed value Long: "
// + parsed);
//
// }
//
// catch (Exception e) {
// try {
//
// // check to fix a problem for the database.
// // Indeed if the string is
// // an hexadecimal some strings as (6F or 6D that are
// // double and float values) are
// // casted to Double and the value returned is 6.0
// // altering the original value. If the string is an
// // hexadecimal the cast is not performed.
//
// if ((DataTypeColumns.get(j) != null)) {
//
// AnalysisLogger.getLogger().debug(
// "In DatabaseManagement->check data type value: "
// + DataTypeColumns.get(j));
//
// if ((DataTypeColumns.get(j).contains("decimal"))
// || (DataTypeColumns.get(j)
// .contains("double"))
// || (DataTypeColumns.get(j)
// .contains("real"))
// || (DataTypeColumns.get(j)
// .contains("numeric"))
// || (DataTypeColumns.get(j)
// .contains("float"))) {
//
// parsed = "" + Double.parseDouble(parsed);
//
// AnalysisLogger.getLogger().debug(
// "In DatabaseManagement->parsed value Double: "
// + parsed);
//
// }
// }
//
// // if (DBType.equals(POSTGRES)) {
// //
// // if ((DataTypeColumns.get(j) != null)) {
// //
// // AnalysisLogger.getLogger().debug(
// // "In DatabaseManagement->check data type value: "
// // + DataTypeColumns.get(j));
// //
// // if ((DataTypeColumns.get(j)
// // .contains("decimal"))
// // || (DataTypeColumns.get(j)
// // .contains("double"))
// // || (DataTypeColumns.get(j)
// // .contains("real"))
// // || (DataTypeColumns.get(j)
// // .contains("numeric"))) {
// //
// // parsed = ""
// // + Double.parseDouble(parsed);
// //
// // AnalysisLogger.getLogger().debug(
// // "In DatabaseManagement->parsed value Double: "
// // + parsed);
// //
// // }
// // }
// // }
//
// // if (DBType.equals(MYSQL)) {
// //
// // if ((DataTypeColumns.get(j) != null)) {
// //
// // AnalysisLogger.getLogger().debug(
// // "In DatabaseManagement->check data type value: "
// // + DataTypeColumns.get(j));
// //
// // if ((DataTypeColumns.get(j)
// // .contains("decimal"))
// // || (DataTypeColumns.get(j)
// // .contains("double"))
// // || (DataTypeColumns.get(j)
// // .contains("real"))
// // || (DataTypeColumns.get(j)
// // .contains("numeric"))
// // || (DataTypeColumns.get(j)
// // .contains("float"))) {
// //
// // parsed = ""
// // + Double.parseDouble(parsed);
// //
// // AnalysisLogger.getLogger().debug(
// // "In DatabaseManagement->parsed value Double: "
// // + parsed);
// //
// // }
// // }
// // }
//
// // else {
// // parsed = "" + Double.parseDouble(parsed);
// //
// // AnalysisLogger.getLogger().debug(
// // "In DatabaseManagement->parsed value Double: "
// // + parsed);
// //
// // }
//
// } catch (Exception e1) {
// throw e1;
// }
// }
//
// }
//
// AnalysisLogger.getLogger().debug(
// "In DatabaseManagement->row: " + (i + 1)
// + " column: " + (j + 1) + " value= "
// + parsed);
//
// // write in a file
//
// if (j != row.length - 1) {
//
// out.write(parsed);
//
// out.write(",");
// // System.out.println("write column : " + j);
// RowString = RowString + parsed + " ";
//
// }
// if (j == row.length - 1) {
//
// out.write(parsed);
// out.newLine();
//
// // to add a row to the map
// mapSampleTableResult.put(String.valueOf(i), RowString);
//
// }
//
// }
//
// }
//
// else {
//
// Object RowElement = (Object) result.get(i);
//
// AnalysisLogger.getLogger().debug(
// "In DatabaseManagement->row: " + (i + 1) + " value= "
// + RowElement.toString());
//
// // write in a file
//
// out.write(RowElement.toString());
// out.newLine();
//
// //to add a row to the map
// mapSampleTableResult.put(String.valueOf(i), RowElement.toString());
//
// }
// }
// // close the file
// out.close();
//
// }
// write the query result in the file and build the map of results
// private void writeQueryResultIntoFile(List<Object> result) throws
// Exception {
//
// // file that will contain query result
// BufferedWriter out;
// String fileName;
//
// fileName =
// "/home/loredana/workspace/DatabasesResourcesManagerAlgorithms/cfg/"
// + "QueryResult.txt";
//
// fileQueryResult = new File(fileName);
//
// FileWriter fw = new FileWriter(fileQueryResult.getAbsoluteFile());
// out = new BufferedWriter(fw);
//
// for (int i = 0; i < result.size(); i++) {
//
// if (result.get(i).getClass().isArray()) {
//
// Object[] row = (Object[]) result.get(i);
//
// String RowString = "";
//
// for (int j = 0; j < row.length; j++) {
//
// AnalysisLogger.getLogger().debug(
// "In DatabaseManagement->row: " + (i + 1) + " column: "
// + (j + 1) + " value= " + row[j].toString());
//
//
// // write in a file
//
// if (j != row.length - 1) {
//
// out.write(row[j].toString());
// out.write(",");
// RowString = RowString + row[j].toString() + " ";
//
// System.out.println("storing: " + RowString);
//
// // AnalysisLogger.getLogger().debug(
// // "In SubmitQuery->row " + (j+1) + ": " +
// // row[j].toString());
//
// }
// if (j == row.length - 1) {
//
// out.write(row[j].toString());
// out.newLine();
//
// RowString = RowString + row[j].toString();
//
// System.out.println("storing: " + RowString);
//
// //to add a row to the map
// mapQueryResult.put(String.valueOf(i), RowString);
//
// }
//
// }
//
// }
//
// else {
// // String RowElement="";
//
// Object RowElement = (Object) result.get(i);
//
// // AnalysisLogger.getLogger().debug(
// // "In SubmitQuery->Query's Result: ");
//
// AnalysisLogger.getLogger().debug(
// "In DatabaseManagement->row: " + (i + 1) + " value= "
// + RowElement.toString());
//
// // write in a file
//
// out.write(RowElement.toString());
// out.newLine();
//
// //to add a row to the map
// mapQueryResult.put(String.valueOf(i), RowElement.toString());
//
// }
//
// }
//
// //close file
// out.close();
//
// }
// to check if the string is an hexadecimal
// private boolean isHexadecimal(String text) {
//
// boolean isHexadecimal = false;
//
// text = text.trim();
//
// char[] hexDigits = { 'a', 'b', 'c', 'd', 'e', 'f', 'A', 'B', 'C', 'D',
// 'E', 'F' };
//
// check: for (char symbol : text.toCharArray()) {
//
// for (char hexDigit : hexDigits) {
//
// if (symbol == hexDigit) {
//
// isHexadecimal = true;
//
// break check;
//
// }
//
// }
//
// }
//
// return isHexadecimal;
//
// }
// private void writeTableIntoFile(List<Object> result) throws IOException {
//
// // file that will contain query's result
// BufferedWriter out;
// String fileName;
//
// // fileName = "./cfg/" + "table.txt";
// fileName = "./files/" + "table.txt";
//
// // fileName = "./files/" + "SmartSampleOnTable.txt";
// File file = new File(fileName);
//
// FileWriter fw = new FileWriter(file.getAbsoluteFile());
// out = new BufferedWriter(fw);
//
// for (int i = 0; i < result.size(); i++) {
//
// // System.out.println(result.get(i).getClass().isArray());
//
// if (result.get(i).getClass().isArray()) {
//
// Object[] row = (Object[]) result.get(i);
//
// String RowString = "";
//
// for (int j = 0; j < row.length; j++) {
//
// if (row[j] == null) {
// row[j] = "";
// }
//
// // to parse the obtained results to align number values with
// // those of postgres
// String original = row[j].toString();
// String parsed = "" + row[j];
//
// try {
// parsed = "" + Long.parseLong(parsed);
// } catch (Exception e) {
// try {
// parsed = "" + Double.parseDouble(parsed);
// } catch (Exception e1) {
// }
// }
//
// // AnalysisLogger.getLogger().debug(
// // "In SubmitQuery->row: " + (i + 1) + " column: "
// // + (j + 1) + " value= "
// // + row[j].toString());
//
// AnalysisLogger.getLogger().debug(
// "In DatabaseManagement->row: " + (i + 1) + " column: "
// + (j + 1) + " value= " + parsed);
//
// // System.out.print("\"" + row[j] + "\"; ");
//
// // write in a file
//
// if (j != row.length - 1) {
//
// System.out
// .println("scrittura su file: elementi intermedi");
// // out.write("1");
//
// // out.write(row[j].toString());
// out.write(parsed);
//
// out.write(",");
// RowString = RowString + row[j].toString() + " ";
//
// // AnalysisLogger.getLogger().debug(
// // "In SubmitQuery->row " + (j+1) + ": " +
// // row[j].toString());
//
// }
// if (j == row.length - 1) {
//
// System.out
// .println("scrittura su file: elemento finale");
//
// // out.write(row[j].toString());
// out.write(parsed);
// out.newLine();
//
// }
//
// }
//
// }
//
// else {
// // String RowElement="";
//
// Object RowElement = (Object) result.get(i);
//
// // AnalysisLogger.getLogger().debug(
// // "In SubmitQuery->Query's Result: ");
//
// AnalysisLogger.getLogger().debug(
// "In DatabaseManagement->row: " + (i + 1) + " value= "
// + RowElement.toString());
//
// // write in a file
//
// out.write(RowElement.toString());
// out.newLine();
//
// }
// }
//
// out.close();
//
// }
}