database-resource-manager/src/main/java/org/gcube/dataaccess/databases/utils/DatabaseManagement.java

1881 lines
53 KiB
Java

package org.gcube.dataaccess.databases.utils;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.sql.Connection;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.UUID;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataaccess.databases.converter.SqlDialectConverter;
import org.gcube.dataaccess.databases.sampler.Sampler;
import org.gcube.dataaccess.databases.structure.AbstractTableStructure;
import org.gcube.dataaccess.databases.structure.MySQLTableStructure;
import org.gcube.dataaccess.databases.structure.PostgresTableStructure;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.hibernate.SessionFactory;
import com.adventnet.swissqlapi.sql.exception.ConvertException;
import com.adventnet.swissqlapi.sql.parser.ParseException;
/** Class that allows to manage a database offering several functionalities */
public class DatabaseManagement {
// AbstractTableStructure crossTableStructure;
// private List<String> tablesname = new ArrayList<String>();
private List<String> tablesname = null;
// private String configPath = "./cfg/";
private String configPath = "";
private String sourceSchemaName = null;
private SessionFactory sourceDBSession;
private String DBType;
private AbstractTableStructure crossTableStructure;
// private DBAdapter typesMap;
private DatabaseOperations op = new DatabaseOperations();
// private String destinationDBType;
// private String sourceDBType;
private MySQLTableStructure mysqlobj;
private ConnectionManager connection;
// private Integer estimatedRows = null;
private long estimatedRows = 0;
// file in which the result is stored when sample and query submit
// operations are executed
private File file = null;
// file that will contain result
private BufferedWriter out;
//total rows for a result of a submit query operation
private int submitQueryTotalRows;
// file in which the table result is stored when sample
// operations are executed
// private File fileSample = null;
// map which contains the rows of the sample and query submit operations
// private LinkedHashMap<Integer, String> mapResult = new
// LinkedHashMap<Integer, String>();
// private HashMap<Integer, String> mapResult = new HashMap<Integer,
// String>();
private HashMap<String, String> mapResult = new HashMap<String, String>();
// map which contains the rows that constitute the table result
// private LinkedHashMap<String, String> mapSampleTableResult = new
// LinkedHashMap<String, String>();
// file in which the result is stored when the query is executed
// private File fileQueryResult = null;
// map that contains the rows generated by the query
// private LinkedHashMap<String, String> mapQueryResult = new
// LinkedHashMap<String, String>();
private static final String MYSQL = "MySQL";
private static final String POSTGRES = "Postgres";
private static final String selectTablesQuery = "SELECT distinct table_name FROM information_schema.COLUMNS where table_schema='%1$s'";
// private static final String listSchemaNameQuery =
// "select schema_name from information_schema.schemata where schema_name <> 'information_schema' and schema_name !~ E'^pg_'";
private static final String listSchemaNameQuery = "select nspname from pg_namespace where nspname <> 'information_schema' and nspname !~ E'^pg_'";
// query to retrieve datatype columns of a database table
private static final String queryForDataTypeColumnsPostgres = "SELECT data_type, udt_name FROM information_schema.COLUMNS WHERE table_name ='%1$s' and table_schema='%2$s' order by ordinal_position asc";
private static final String queryForDataTypeColumnsMysql = "SELECT data_type FROM information_schema.COLUMNS WHERE table_name ='%1$s' and table_schema='%2$s' order by ordinal_position asc";
// query to get columns' name
private static final String queryForColumnsPostgres = "SELECT column_name FROM information_schema.COLUMNS WHERE table_name ='%1$s' and table_schema='%2$s' order by ordinal_position asc";
private static final String queryForColumnsMysql = "SELECT column_name FROM information_schema.COLUMNS WHERE table_name ='%1$s' and table_schema='%2$s' order by ordinal_position asc";
// Header Table that contains the column names of a table
private String header = "";
// list that contains the columns names of a table
List<String> listColumnNamesTable = null;
// variable to set the language for translation
public static final int POSTGRESQLDialect = 4;
public static final int MYSQLDialect = 5;
// Constructor
public DatabaseManagement(String configPath) {
this.configPath = configPath;
connection = new ConnectionManager();
}
// for the exact parsing of the obtained results with the values of
// a database, a check is needed against the data type columns
// to convert from postgres and mysql datatypes to Java datatypes
private String convertToJavaType(String type, String val) throws Exception {
type = type.toLowerCase();
// AnalysisLogger.getLogger()
// .debug("In DatabaseManagement->type: "
// + type);
String valConverted = val;
// AnalysisLogger.getLogger()
// .debug("In DatabaseManagement->val: "
// + valConverted);
try {
// parse to Long
valConverted = "" + Long.parseLong(valConverted);
// AnalysisLogger.getLogger()
// .debug("In DatabaseManagement->parsed value Long: "
// + valConverted);
}
catch (Exception e) {
try {
// check to fix a problem for the database.
// Indeed if the string is
// an hexadecimal some strings as (6F or 6D that
// are double and float values) are
// casted to Double and the value returned is
// 6.0 altering the original value. If the string is
// an hexadecimal the cast is not performed.
if ((type != null)) {
// // check data type value
// AnalysisLogger.getLogger().debug(
// "In DatabaseManagement->check data type value: "
// + type);
if ((type.contains("decimal")) || (type.contains("double"))
|| (type.contains("numeric"))
|| (type.contains("float"))) {
valConverted = "" + Double.parseDouble(valConverted);
// // check the parsed value
// AnalysisLogger.getLogger().debug(
// "In DatabaseManagement->parsed value Double: "
// + valConverted);
}
if (type.contains("real")) {
valConverted = "" + Float.parseFloat(valConverted);
// // check the parsed value
// AnalysisLogger.getLogger().debug(
// "In DatabaseManagement->parsed value Float: "
// + valConverted);
}
if (((type.contains("string")))
|| (type.contains("varchar"))
|| (type.contains("char"))
|| (type.contains("text"))
|| (type.contains("character varying"))) {
valConverted = "\"" + valConverted + "\"";
// // check the parsed value
// AnalysisLogger.getLogger().debug(
// "In DatabaseManagement->parsed value string: "
// + valConverted);
}
if((type.contains("geometry")) ||(type.contains("geography"))){
valConverted = "\"" + valConverted + "\"";
}
}
} catch (Exception excp) {
throw excp;
}
}
// //to truncate value to 255 characters if it exceeds 255 characters
// if(valConverted.length()>255){
// valConverted = valConverted.substring(0, 255);
//// System.out.println("val truncated: " + valConverted);
//// System.out.println("elem geometry truncated");
//
// //add quote if it has been removed with truncation
// if((valConverted.startsWith("\""))&&(!valConverted.endsWith("\""))){
// valConverted = valConverted+"\"";
//// System.out.println("adding quote: " + valConverted);
// }
// }
return valConverted;
}
// create the database's connection without using the configuration file but
// using the data input.
// Note that in this case the variable sourceSchemaName for database mysql
// is set to the database name while for database postgres will be set to
// the schema's name in the createConnection(String cfgDir, String
// SourceFile) method
public SessionFactory createConnection(String DatabaseUserName,
String DatabasePassword, String DatabaseDriver,
String DatabaseDialect, String DatabaseURL, String DatabaseName)
throws IOException {
// ConnectionManager connection = new ConnectionManager();
AlgorithmConfiguration config = new AlgorithmConfiguration();
AnalysisLogger.getLogger().debug(
"In DatabaseManagement->DatabaseName: " + DatabaseName);
AnalysisLogger.getLogger().debug(
"In DatabaseManagement->DatabaseURL: " + DatabaseURL);
AnalysisLogger.getLogger().debug(
"In DatabaseManagement->DatabaseUserName: " + DatabaseUserName);
AnalysisLogger.getLogger().debug(
"In DatabaseManagement->DatabasePassword: " + DatabasePassword);
AnalysisLogger.getLogger().debug(
"In DatabaseManagement->configPath: " + configPath);
AnalysisLogger.getLogger().debug(
"In DatabaseManagement->DatabaseDriver: " + DatabaseDriver);
AnalysisLogger.getLogger().debug(
"In DatabaseManagement->DatabaseDialect: " + DatabaseDialect);
config = connection.setconfiguration(configPath, DatabaseUserName,
DatabasePassword, DatabaseDriver, DatabaseDialect, DatabaseURL,
DatabaseName);
sourceDBSession = connection.initDBConnection(config);
// sourceSchemaName = Name; // for a database mysql the variable
// "sourceschemaname" is the database's name
// (while for the database postgresql is the
// schema's name)
if (DatabaseDriver.toLowerCase().contains("postgres")) {
DBType = POSTGRES;
}
if (DatabaseDriver.toLowerCase().contains("mysql")) {
DBType = MYSQL;
}
return sourceDBSession;
}
// create the database's connection using the configuration file.
// note that for database postgres the variable sourceSchemaName is set to
// the schema's name.
// Note that this method is not called actually
public SessionFactory createConnection(String cfgDir, String SourceFile)
throws Exception {
configPath = cfgDir;
if (!configPath.endsWith("/"))
configPath += "/";
sourceSchemaName = op.getDBSchema(configPath + SourceFile); // the
// variable
// "sourceschemaname"
// is
// retrieved
// through
// this
// method
// that
// recovers
// the value
// by means
// of the
// configuration
// file.
// for a database postgresql it is the schema's name while for a
// database mysql it is the database's name.
sourceDBSession = connection.initDBConnection(configPath + SourceFile);
// destinationDBType = POSTGRES;
// sourceDBType = MYSQL;
//
// // typesMap = new DBAdapter(configPath + "/" + sourceDBType + "2"
// // + destinationDBType + ".properties");
DBType = op.getDBType(); // Recover the database's type by means of the
// configuration file
return sourceDBSession;
}
// close the connection
public void closeConnection() {
if(sourceDBSession!=null){
sourceDBSession.close();
AnalysisLogger.getLogger().debug(
"In DatabaseManagement->Connection closed");
sourceDBSession = null;
}
}
/**
* method that allows to submit a query.
*
* @param schemaName
* : the schema's name of the database postgres. For database
* mysql this parameter is null.
* @return query result
* @throws Exception
*/
// method that allows to submit a query
public List<Object> submitQuery(String query, Connection conn, String pathFile) throws Exception {
List<Object> results = new ArrayList<Object>();
results = connection.executeQueryJDBC(query, conn);
if (results != null) {
AnalysisLogger.getLogger().debug(
"DatabaseManagement-> Query result retrieved");
submitQueryTotalRows = results.size();
}
// // store table in a file
// AnalysisLogger.getLogger().debug(
// "In DatabaseManagement->store table in a file");
// List<String> listColumnNames = getColumnNamesTable(tableName,
// schemaName);
// to retrieve datatype columns of a table
List<String> DataTypeColumns = null;
// store table in a file and build the result Map
String FileName = pathFile + "SubmitQueryResult_"+UUID.randomUUID()+".csv";
// write the result in the file and in the map
AnalysisLogger.getLogger().debug(
"In DatabaseManagement-> writing the result in the file: "
+ FileName);
try{
file = new File(FileName);
out = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(
file), "UTF-8"));
writeSubmitResultIntoFile(results, DataTypeColumns);
// //truncate the result list to 10000 rows
// if ((mapResult!=null)&&(mapResult.size()!=0)){
// //result size without header
// int mapSize = mapResult.size() - 1;
// AnalysisLogger.getLogger().debug(
// "DatabaseManagement-> size of the map without header obtained from the submit operation: "
// + mapSize);
// if (mapSize>10000){
// int numElemToDelete = mapSize - 10000;
// AnalysisLogger.getLogger().debug(
// "DatabaseManagement-> The result size is greater than 10000 rows. Rows number to delete: "
// + numElemToDelete);
// AnalysisLogger.getLogger().debug(
// "DatabaseManagement-> Starting to remove elements");
// int i=10000;
// if (numElemToDelete != 0){
// while ((i < mapSize)) {
// mapResult.remove(String.valueOf(i));
// i++;
//
// }
//
// AnalysisLogger.getLogger().debug(
// "DatabaseManagement-> size of the truncated map without header: "
// + (mapResult.size()-1));
// }
//
// }
// }
}catch (Throwable e) {
throw new Exception(e.getLocalizedMessage());
}finally{
//close the file
if (out!=null){
out.close();
out = null;
AnalysisLogger.getLogger().debug(
"In DatabaseManagement-> File closed");
}
}
// build the Map of Result
// buildMapResult(results, DataTypeColumns);
return results;
}
/**
* Get the table's names for a database.
*
* @param schemaName
* : the schema's name of the database postgres. For database
* mysql this parameter is null.
* @return List<String> listTables: a list tables' names.
* @throws Exception
*/
public List<String> getTables(String databaseName, String schemaName)
throws Exception {
String query = null;
if (DBType.equals(POSTGRES)) {
query = String.format(selectTablesQuery, schemaName);
AnalysisLogger.getLogger().debug(
"DatabaseManagement->retrieving tables names with query: "
+ query);
sourceSchemaName = schemaName; // a database postgres manages schema
// concept so for every operation on
// the database it is important to
// specify the schema name
}
if (DBType.equals(MYSQL)) {
query = String.format(selectTablesQuery, databaseName);
AnalysisLogger.getLogger().debug(
"DatabaseManagement->retrieving tables names with query: "
+ query);
sourceSchemaName = databaseName; // for database mysql the schema
// name is the database name
// because the database mysql
// does not manage schema
// concept.
}
List<Object> resultSet = connection
.executeQuery(query, sourceDBSession);
if (resultSet != null) {
// AnalysisLogger.getLogger().debug(
// "DatabaseManagement->table's list: ");
AnalysisLogger.getLogger().debug(
"DatabaseManagement->table's list retrieved");
tablesname = new ArrayList<String>();
for (int i = 0; i < resultSet.size(); i++) {
Object element = resultSet.get(i);
ArrayList<Object> listvalues = new ArrayList<Object>(
((LinkedHashMap<String, Object>) element).values());
for (int j = 0; j < listvalues.size(); j++) {
tablesname.add(listvalues.get(j).toString());
// AnalysisLogger.getLogger().debug(
// "DatabaseManagement->" + listvalues.get(j).toString());
}
}
// for (Object result : resultSet) {
// tablesname.add((String) result);
//
// // AnalysisLogger.getLogger().debug(
// // "DatabaseManagement->" + (String) result);
//
// }
}
return tablesname;
}
/**
* Get the schema's name for the database Postgresql.
*
* @return listSchemas: the list of the schemas' names
* @throws Exception
*/
public List<String> getSchemas() throws Exception {
List<String> list = new ArrayList<String>();
if (DBType.equals(POSTGRES)) {
// AnalysisLogger.getLogger().debug("DatabaseManagement-> execute query");
List<Object> resultSet = connection.executeQuery(
listSchemaNameQuery, sourceDBSession);
if (resultSet != null) {
// for (Object result : resultSet) {
// list.add((String) result);
// }
for (int i = 0; i < resultSet.size(); i++) {
Object element = resultSet.get(i);
ArrayList<Object> listvalues = new ArrayList<Object>(
((LinkedHashMap<String, Object>) element).values());
for (int j = 0; j < listvalues.size(); j++) {
list.add(listvalues.get(j).toString());
// AnalysisLogger.getLogger().debug(
// "DatabaseManagement->" +
// listvalues.get(j).toString());
}
}
}
}
return list;
}
/**
* Get the "Create Table" statement.
*
* @param schemaName
* : the schema's name of the database postgres. For database
* mysql this parameter is null.
* @return String: the create statement .
* @throws Exception
*/
// Get the "Create Table" statement
public String getCreateTable(String tablename, String schemaName)
throws Exception {
String createstatement = "";
if (DBType.equals(POSTGRES)) {
crossTableStructure = getSourceTableObject(tablename, schemaName);
String tableBuildQuery = crossTableStructure.buildUpCreateTable();
createstatement = tableBuildQuery;
AnalysisLogger.getLogger().debug(
"DatabaseManagement->'Create Table' statement: "
+ tableBuildQuery);
}
if (DBType.equals(MYSQL)) {
crossTableStructure = getSourceTableObject(tablename, schemaName);
try {
String createtablestatement = mysqlobj.showCreateTable(
connection, sourceDBSession);
createstatement = createtablestatement;
AnalysisLogger.getLogger().debug(
"DatabaseManagement->'Create Table' statement: "
+ createtablestatement);
} catch (Exception e) {
AnalysisLogger.getLogger().debug(
"DatabaseManagement->Exception: " + e.getMessage());
throw e;
}
}
// to recover the column names of the table
getColumnNamesTable(tablename, schemaName);
return createstatement;
}
// Method that creates the table object for a database
private AbstractTableStructure getSourceTableObject(String tableName,
String schemaName) throws Exception {
sourceSchemaName = schemaName;
if (DBType.equals(MYSQL)) {
mysqlobj = new MySQLTableStructure(sourceSchemaName, tableName,
sourceDBSession);
return mysqlobj;
}
else if (DBType.equals(POSTGRES)) {
PostgresTableStructure postobj = new PostgresTableStructure(
sourceSchemaName, tableName, sourceDBSession);
return postobj;
} else {
return null;
}
}
// Method that returns the estimated number of rows
public long getNumberOfRows(String tablename, String schemaName)
throws Exception {
long rows;
rows = op.calculateElements(connection, DBType, tablename, schemaName,
sourceDBSession);
AnalysisLogger.getLogger().debug(
"DatabaseManagement->rows' number calculated: " + rows);
estimatedRows = rows;
return rows;
}
/**
* retrieve 100 rows of a table randomly that have the maximum number of
* columns not null
*
* @param tableName
* : the table's name
* @param schemaName
* : the schema's name of the database postgres. For database
* mysql this parameter is the database name.
* @return a rows' list.
* @throws Exception
*/
// retrieve 100 rows of a table randomly that have the maximum number of
// columns not null
public void smartSampleOnTable(String tableName, String schemaName,
String pathFile) throws Exception {
List<Object> resultSet = null;
AnalysisLogger
.getLogger()
.debug("DatabaseManagement->starting the Smart Sample on table operation");
AnalysisLogger.getLogger().debug(
"DatabaseManagement->retrieving the 100 rows");
if (estimatedRows == 0) {
// estimatedRows = Integer.valueOf(getNumberOfRows(tableName));
estimatedRows = getNumberOfRows(tableName, schemaName);
}
// to retrieve datatype columns of a table
List<String> DataTypeColumns = getDataTypeColumns(tableName, schemaName);
Sampler sampler = new Sampler();
resultSet = sampler.smartSampleOnTable(connection, sourceDBSession,
DBType, tableName, schemaName, estimatedRows, DataTypeColumns);
if (resultSet != null) {
AnalysisLogger.getLogger().debug(
"DatabaseManagement-> rows retrieved");
}
// // store table in a file
// AnalysisLogger.getLogger().debug(
// "In DatabaseManagement->store table in a file");
// store table in a file
// writeSampleTableIntoFile(resultSet, tableName, schemaName);
//TODO ** COMMENTED
// String FileName = pathFile + "SampleResult.csv";
//
// // to recover columns names list
//
List<String> listColumns = sampler.getListColumns();
// // String header = "";
//
// // //print check
// // AnalysisLogger.getLogger().debug(
// // "In DatabaseManagement->list columns size: " +listColumns.size());
//
// to recover columns names list
for (int i = 0; i < listColumns.size(); i++) {
if (i != listColumns.size() - 1) {
header = header + listColumns.get(i) + ",";
} else {
header = header + listColumns.get(i);
}
}
// //print check
// AnalysisLogger.getLogger().debug(
// "In DatabaseManagement->header: " + header);
// // write the result in the file and in the map
// AnalysisLogger.getLogger().debug(
// "In DatabaseManagement->writing the result in the file: "
// + FileName);
// file = new File(FileName);
// out = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(
// file), "UTF-8"));
// // writeTableIntoFile(resultSet, DataTypeColumns);
// build the Map of Result
// buildMapResult(resultSet, DataTypeColumns);
// store table in a file and build the result Map
String FileName = pathFile + "SmartSampling_"+UUID.randomUUID()+".csv";
// write the result in the file and in the map
AnalysisLogger.getLogger().debug(
"In DatabaseManagement-> writing the result in the file: "
+ FileName);
try{
file = new File(FileName);
out = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(
file), "UTF-8"));
writeSamplingResultIntoFile(resultSet, DataTypeColumns);
}catch (Throwable e) {
throw new Exception(e.getLocalizedMessage());
}finally{
//close the file
if (out!=null){
out.close();
out = null;
AnalysisLogger.getLogger().debug(
"In DatabaseManagement-> File closed");
}
}
}
/**
* Retrieve the first 100 rows of a table.
*
* @param tableName
* : the table's name
* @param schemaName
* : the schema's name of the database postgres. For database
* mysql this parameter is the database name.
* @return a rows' list.
* @throws Exception
*/
// retrieve the first 100 rows of a table
public void sampleOnTable(String tableName, String schemaName,
String pathFile) throws Exception {
List<Object> resultSet = null;
AnalysisLogger.getLogger().debug(
"DatabaseManagement->starting the Sample on table operation");
AnalysisLogger.getLogger().debug(
"DatabaseManagement->retrieving the first 100 rows");
// to retrieve datatype columns of a table
List<String> DataTypeColumns = getDataTypeColumns(tableName, schemaName);
Sampler sampler = new Sampler();
resultSet = sampler.sampleOnTable(connection, sourceDBSession, DBType,
tableName, schemaName, DataTypeColumns);
if (resultSet != null) {
AnalysisLogger.getLogger().debug(
"DatabaseManagement-> rows retrieved");
}
// // store table in a file
// AnalysisLogger.getLogger().debug(
// "In DatabaseManagement->store table in a file");
// to recover columns names list
List<String> listColumns = sampler.getListColumns();
for (int i = 0; i < listColumns.size(); i++) {
if (i != listColumns.size() - 1) {
header = header + listColumns.get(i) + ", ";
} else {
header = header + listColumns.get(i);
}
}
//
// // // store table in a file
// // String FileName = pathFile + "SampleResult.csv";
// // // write the result in the file and in the map
// // AnalysisLogger.getLogger().debug(
// // "In DatabaseManagement->writing the result in the file: "
// // + FileName);
// // file = new File(FileName);
// // out = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(
// // file), "UTF-8"));
// // writeTableIntoFile(resultSet, DataTypeColumns);
//
// // build the Map of Result
// buildMapResult(resultSet, DataTypeColumns);
// store table in a file and build the result Map
String FileName = pathFile + "Sampling_"+UUID.randomUUID()+".csv";
// write the result in the file and in the map
AnalysisLogger.getLogger().debug(
"In DatabaseManagement-> writing the result in the file: "
+ FileName);
try{
file = new File(FileName);
out = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(
file), "UTF-8"));
writeSamplingResultIntoFile(resultSet, DataTypeColumns);
}catch (Throwable e) {
throw new Exception(e.getLocalizedMessage());
}finally{
//close the file
if (out!=null){
out.close();
out = null;
AnalysisLogger.getLogger().debug(
"In DatabaseManagement-> File closed");
}
}
}
/**
* Retrieve 100 rows of a table in a random manner.
*
* @param tableName
* : the table's name
* @param schemaName
* : the schema's name of the database postgres. For database
* mysql this parameter is the database name.
* @return a rows' list.
* @throws Exception
*/
// to retrieve 100 rows of a table in a random manner
public void randomSampleOnTable(String tableName, String schemaName,
String pathFile) throws Exception {
List<Object> resultSet = null;
AnalysisLogger
.getLogger()
.debug("DatabaseManagement->starting the Random Sample on table operation");
AnalysisLogger.getLogger().debug(
"DatabaseManagement->retrieving 100 rows");
// to retrieve datatype columns of a table
List<String> DataTypeColumns = getDataTypeColumns(tableName, schemaName);
if (estimatedRows == 0) {
estimatedRows = getNumberOfRows(tableName, schemaName);
}
Sampler sampler = new Sampler();
resultSet = sampler.randomSampleOnTable(connection, sourceDBSession,
DBType, tableName, schemaName, estimatedRows, DataTypeColumns);
if (resultSet != null) {
AnalysisLogger.getLogger().debug(
"DatabaseManagement-> rows retrieved");
}
// // store table in a file
// AnalysisLogger.getLogger().debug(
// "In DatabaseManagement->store table in a file");
//TODO ** COMMENTED
// to recover columns names list
List<String> listColumns = sampler.getListColumns();
//
// // //print check
// // for (int i = 0; i < listColumns.size(); i++) {
// // AnalysisLogger.getLogger()
// // .debug("In DatabaseManagement->listcolumns: "
// // + listColumns.get(i));
// // }
// // String header = "";
//
for (int i = 0; i < listColumns.size(); i++) {
if (i != listColumns.size() - 1) {
header = header + listColumns.get(i) + ", ";
} else {
header = header + listColumns.get(i);
}
}
// // store table in a file
// String FileName = pathFile + "SampleResult.csv";
// // write the result in the file and in the map
// AnalysisLogger.getLogger().debug(
// "In DatabaseManagement->writing the result in the file: "
// + FileName);
// file = new File(FileName);
// out = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(
// file), "UTF-8"));
// writeTableIntoFile(resultSet, DataTypeColumns);
// build the Map of Result
// buildMapResult(resultSet, DataTypeColumns);
// store table in a file and build the result Map
String FileName = pathFile + "RandomSampling_"+UUID.randomUUID()+".csv";
// write the result in the file and in the map
AnalysisLogger.getLogger().debug(
"In DatabaseManagement-> writing the result in the file: "
+ FileName);
try{
file = new File(FileName);
out = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(
file), "UTF-8"));
writeSamplingResultIntoFile(resultSet, DataTypeColumns);
}catch (Throwable e) {
throw new Exception(e.getLocalizedMessage());
}finally{
//close the file
if (out!=null){
out.close();
out = null;
AnalysisLogger.getLogger().debug(
"In DatabaseManagement-> File closed");
}
}
}
// build the map of results
private void buildMapResult(List<Object> result,
List<String> DataTypeColumns) throws Exception {
// to get columns names and result
// to recover columns names
if (header.equals("")) {
ArrayList<String> listKeys = new ArrayList<String>(
((LinkedHashMap<String, Object>) (result.get(0))).keySet());
for (int i = 0; i < listKeys.size(); i++) {
if (i != listKeys.size() - 1) {
header = header + listKeys.get(i) + ", ";
} else {
header = header + listKeys.get(i);
}
}
}
// // print check
// AnalysisLogger.getLogger().debug(
// "DatabaseManagement->HEADERS: " + header);
// add headers
mapResult.put("HEADERS", header);
// //print check values
// AnalysisLogger.getLogger().debug(
// "DatabaseManagement->columns names: " + listKeys);
if (result != null && result.size() != 0) {
// // write operation in the file
for (int i = 0; i < result.size(); i++) {
String RowString = "";
Object element = result.get(i);
// arraylist in which each element is a row result
ArrayList<Object> listvalues = new ArrayList<Object>(
((LinkedHashMap<String, Object>) element).values());
// // print check
// AnalysisLogger.getLogger().debug(
// "DatabaseManagement->values: " + listvalues);
// each row could have several column values
Object[] row = listvalues.toArray();
if (row.length >= 1) {
for (int j = 0; j < row.length; j++) {
if (row[j] == null) {
row[j] = "";
}
// to parse the obtained results in order to align
// number
// values with those of postgres
String original = row[j].toString();
// // check value
// AnalysisLogger.getLogger().debug(
// "In DatabaseManagement->original value: "
// + original);
String parsed = "" + row[j];
if (original != "") {
// convert database datatypes to Java datatypes
if (DataTypeColumns == null
|| DataTypeColumns.size() == 0)
parsed = convertToJavaType(row[j].getClass()
.getName(), parsed);
else
parsed = convertToJavaType(
DataTypeColumns.get(j), parsed);
}
// // check value
// AnalysisLogger.getLogger().debug(
// "In DatabaseManagement->row: " + (i + 1)
// + " column: " + (j + 1) + " value= "
// + parsed);
// write in a file
if (j != row.length - 1) {
// System.out.println("write column : " + j);
// RowString = RowString + parsed + " ";
if (j == 0) {
RowString = parsed;
} else {
RowString = RowString + "," + parsed;
}
}
if (j == row.length - 1) {
// to add a row to the map
if (row.length == 1) {
RowString = parsed;
} else {
RowString = RowString + "," + parsed;
}
// to add a row to the map
// RowString = RowString + "," + parsed;
// mapSampleTableResult.put(String.valueOf(i),
// RowString);
// check value row
// AnalysisLogger.getLogger().debug(
// "writing the value: " + RowString + " key: "
// + String.valueOf(i));
// AnalysisLogger.getLogger().debug(
// "row: " + RowString);
// mapResult.put(Integer.valueOf(i), RowString);
mapResult.put(String.valueOf(i), RowString);
}
}
}
// else if (result.size() == 1) {
//
// // Object RowElement = (Object) result.get(0);
//
// if (row[0] == null) {
// row[0] = "";
// }
//
// // to parse the obtained results in order to align
// // number
// // values with those of postgres
// String original = row[0].toString();
//
// // // check value
// // AnalysisLogger.getLogger().debug(
// // "In DatabaseManagement->original value: "
// // + original);
//
// String parsed = "" + row[0];
//
// if (original != "") {
// // convert database datatypes to Java datatypes
// if (DataTypeColumns == null
// || DataTypeColumns.size() == 0)
// parsed = convertToJavaType(row[0].getClass()
// .getName(), parsed);
// else
// parsed = convertToJavaType(DataTypeColumns.get(0),
// parsed);
// }
//
// out.write(row[0].toString());
// out.newLine();
//
// // to add a row to the map
// mapResult.put(String.valueOf(i), row[0].toString());
//
// }
}
}
//print check
// AnalysisLogger.getLogger().debug(
// "mapResult size with header: " + mapResult.size());
}
// write the table result in the file and build the map of results
private void writeSubmitResultIntoFile(List<Object> result,
List<String> DataTypeColumns) throws Exception {
// // file that will contain result
// BufferedWriter out;
// // String fileName;
// // fileName = "./cfg/" + "table.txt";
// // fileName = "./files/" + "table.txt";
// // fileName =
// //
// "/home/loredana/workspace/DatabasesResourcesManagerAlgorithms/cfg/"
// // + "SampleOnTable.txt";
// // fileName = "./files/" + "SmartSampleOnTable.txt";
// // File file = new File(fileName);
// file = new File(FileName);
// out = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(
// file), "UTF-8"));
try{
// to get columns names and result
// write headers in the file
// to recover columns names
if (header.equals("")) {
ArrayList<String> listKeys = new ArrayList<String>(
((LinkedHashMap<String, Object>) (result.get(0))).keySet());
for (int i = 0; i < listKeys.size(); i++) {
if (i != listKeys.size() - 1) {
header = header + listKeys.get(i) + ", ";
} else {
header = header + listKeys.get(i);
}
}
}
// // print check
// AnalysisLogger.getLogger().debug(
// "DatabaseManagement->HEADERS: " + header);
out.write(header);
out.newLine();
mapResult.put("HEADERS", header);
// //print check values
// AnalysisLogger.getLogger().debug(
// "DatabaseManagement->columns names: " + listKeys);
if (result != null && result.size() != 0) {
// // write operation in the file
for (int i = 0; i < result.size(); i++) {
String RowString = "";
Object element = result.get(i);
// arraylist in which each element is a row result
ArrayList<Object> listvalues = new ArrayList<Object>(
((LinkedHashMap<String, Object>) element).values());
// // print check
// AnalysisLogger.getLogger().debug(
// "DatabaseManagement->values: " + listvalues);
// each row could have several column values
Object[] row = listvalues.toArray();
if (row.length >= 1) {
for (int j = 0; j < row.length; j++) {
if (row[j] == null) {
row[j] = "";
}
// to parse the obtained results in order to align
// number
// values with those of postgres
String original = row[j].toString();
// // check value
// AnalysisLogger.getLogger().debug(
// "In DatabaseManagement->original value: "
// + original);
String parsed = "" + row[j];
//remove some special character using regular expressions
String regex1 = "[\"'`]";
String regex2 = "[ \\t\\n\\r\\f\\v]";
parsed = parsed.replaceAll(regex1, "").replaceAll(regex2, " ");
if (original != "") {
// convert database datatypes to Java datatypes
if (DataTypeColumns == null
|| DataTypeColumns.size() == 0)
parsed = convertToJavaType(row[j].getClass()
.getName(), parsed);
else
parsed = convertToJavaType(
DataTypeColumns.get(j), parsed);
}
// // check value
// AnalysisLogger.getLogger().debug(
// "In DatabaseManagement->row: " + (i + 1)
// + " column: " + (j + 1) + " value= "
// + parsed);
//write the whole column value in a file but a truncated value in a map
//to truncate value to 255 characters if it exceeds 255 characters
String truncVal=parsed;
if(truncVal.length()>255){
truncVal = truncVal.substring(0, 255);
// System.out.println("val truncated: " + valConverted);
// System.out.println("elem geometry truncated");
//add quote if it has been removed with truncation
if((truncVal.startsWith("\""))&&(!truncVal.endsWith("\""))){
truncVal = truncVal+"\"";
// System.out.println("adding quote: " + valConverted);
}
}
// write in a file
if (j != row.length - 1) {
// out.write("\"" + parsed + "\"");
// out.write(",");
out.write(parsed);
out.write(",");
// System.out.println("write column : " + j);
// RowString = RowString + parsed + " ";
if (j == 0) {
RowString = truncVal;
} else {
RowString = RowString + "," + truncVal;
}
}
if (j == row.length - 1) {
// out.write("\"" + parsed + "\"");
// out.newLine();
out.write(parsed);
out.newLine();
// to add a row to the map
if (row.length == 1) {
RowString = truncVal;
} else {
RowString = RowString + "," + truncVal;
}
// to add a row to the map
// RowString = RowString + "," + parsed;
// mapSampleTableResult.put(String.valueOf(i),
// RowString);
// // check value row
// AnalysisLogger.getLogger().debug(
// "writing the value: " + RowString + " key: "
// + String.valueOf(i));
// mapResult.put(Integer.valueOf(i), RowString);
//add in the map only the first 1000 rows if the result list size is greater than 1000
if (result.size()>1000){
if(i<1000){
mapResult.put(String.valueOf(i), RowString);
}
}else{
mapResult.put(String.valueOf(i), RowString);
}
}
}
}
// else if (result.size() == 1) {
//
// // Object RowElement = (Object) result.get(0);
//
// if (row[0] == null) {
// row[0] = "";
// }
//
// // to parse the obtained results in order to align
// // number
// // values with those of postgres
// String original = row[0].toString();
//
// // // check value
// // AnalysisLogger.getLogger().debug(
// // "In DatabaseManagement->original value: "
// // + original);
//
// String parsed = "" + row[0];
//
// if (original != "") {
// // convert database datatypes to Java datatypes
// if (DataTypeColumns == null
// || DataTypeColumns.size() == 0)
// parsed = convertToJavaType(row[0].getClass()
// .getName(), parsed);
// else
// parsed = convertToJavaType(DataTypeColumns.get(0),
// parsed);
// }
//
// out.write(row[0].toString());
// out.newLine();
//
// // to add a row to the map
// mapResult.put(String.valueOf(i), row[0].toString());
//
// }
}
}
AnalysisLogger.getLogger().debug(
"In DatabaseManagement-> map size without header: " + (mapResult.size()-1));
AnalysisLogger.getLogger().debug(
"In DatabaseManagement-> Writing File and Result Map creation operations terminated");
}catch (Throwable e) {
throw new Exception(e.getLocalizedMessage());
}finally{
// close the file
if (out!=null){
out.close();
out=null;
AnalysisLogger.getLogger().debug(
"In DatabaseManagement-> File closed");
}
}
}
// write the table result in the file and build the map of results
private void writeSamplingResultIntoFile(List<Object> result,
List<String> DataTypeColumns) throws Exception {
// // file that will contain result
// BufferedWriter out;
// // String fileName;
// // fileName = "./cfg/" + "table.txt";
// // fileName = "./files/" + "table.txt";
// // fileName =
// //
// "/home/loredana/workspace/DatabasesResourcesManagerAlgorithms/cfg/"
// // + "SampleOnTable.txt";
// // fileName = "./files/" + "SmartSampleOnTable.txt";
// // File file = new File(fileName);
// file = new File(FileName);
// out = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(
// file), "UTF-8"));
try{
// to get columns names and result
// write headers in the file
// to recover columns names
if (header.equals("")) {
ArrayList<String> listKeys = new ArrayList<String>(
((LinkedHashMap<String, Object>) (result.get(0))).keySet());
for (int i = 0; i < listKeys.size(); i++) {
if (i != listKeys.size() - 1) {
header = header + listKeys.get(i) + ", ";
} else {
header = header + listKeys.get(i);
}
}
}
// // print check
// AnalysisLogger.getLogger().debug(
// "DatabaseManagement->HEADERS: " + header);
out.write(header);
out.newLine();
mapResult.put("HEADERS", header);
// //print check values
// AnalysisLogger.getLogger().debug(
// "DatabaseManagement->columns names: " + listKeys);
if (result != null && result.size() != 0) {
// // write operation in the file
for (int i = 0; i < result.size(); i++) {
String RowString = "";
Object element = result.get(i);
// arraylist in which each element is a row result
ArrayList<Object> listvalues = new ArrayList<Object>(
((LinkedHashMap<String, Object>) element).values());
// // print check
// AnalysisLogger.getLogger().debug(
// "DatabaseManagement->values: " + listvalues);
// each row could have several column values
Object[] row = listvalues.toArray();
if (row.length >= 1) {
for (int j = 0; j < row.length; j++) {
if (row[j] == null) {
row[j] = "";
}
// to parse the obtained results in order to align
// number
// values with those of postgres
String original = row[j].toString();
// // check value
// AnalysisLogger.getLogger().debug(
// "In DatabaseManagement->original value: "
// + original);
String parsed = "" + row[j];
if (original != "") {
// convert database datatypes to Java datatypes
if (DataTypeColumns == null
|| DataTypeColumns.size() == 0)
parsed = convertToJavaType(row[j].getClass()
.getName(), parsed);
else
parsed = convertToJavaType(
DataTypeColumns.get(j), parsed);
}
// // check value
// AnalysisLogger.getLogger().debug(
// "In DatabaseManagement->row: " + (i + 1)
// + " column: " + (j + 1) + " value= "
// + parsed);
//write the whole column value in a file but a truncated value in a map
//to truncate value to 255 characters if it exceeds 255 characters
String truncVal=parsed;
if(truncVal.length()>255){
truncVal = truncVal.substring(0, 255);
// System.out.println("val truncated: " + valConverted);
// System.out.println("elem geometry truncated");
//add quote if it has been removed with truncation
if((truncVal.startsWith("\""))&&(!truncVal.endsWith("\""))){
truncVal = truncVal+"\"";
// System.out.println("adding quote: " + valConverted);
}
}
// write in a file
if (j != row.length - 1) {
// out.write("\"" + parsed + "\"");
// out.write(",");
out.write(parsed);
out.write(",");
// System.out.println("write column : " + j);
// RowString = RowString + parsed + " ";
if (j == 0) {
RowString = truncVal;
} else {
RowString = RowString + "," + truncVal;
}
}
if (j == row.length - 1) {
// out.write("\"" + parsed + "\"");
// out.newLine();
out.write(parsed);
out.newLine();
// to add a row to the map
if (row.length == 1) {
RowString = truncVal;
} else {
RowString = RowString + "," + truncVal;
}
// to add a row to the map
// RowString = RowString + "," + parsed;
// mapSampleTableResult.put(String.valueOf(i),
// RowString);
// // check value row
// AnalysisLogger.getLogger().debug(
// "writing the value: " + RowString + " key: "
// + String.valueOf(i));
// mapResult.put(Integer.valueOf(i), RowString);
//add in the map only the first 1000 rows if the result list size is greater than 1000
// if (result.size()>1000){
// if(i<1000){
// mapResult.put(String.valueOf(i), RowString);
// }
//
// }else{
// mapResult.put(String.valueOf(i), RowString);
// }
//add row in a map
mapResult.put(String.valueOf(i), RowString);
}
}
}
// else if (result.size() == 1) {
//
// // Object RowElement = (Object) result.get(0);
//
// if (row[0] == null) {
// row[0] = "";
// }
//
// // to parse the obtained results in order to align
// // number
// // values with those of postgres
// String original = row[0].toString();
//
// // // check value
// // AnalysisLogger.getLogger().debug(
// // "In DatabaseManagement->original value: "
// // + original);
//
// String parsed = "" + row[0];
//
// if (original != "") {
// // convert database datatypes to Java datatypes
// if (DataTypeColumns == null
// || DataTypeColumns.size() == 0)
// parsed = convertToJavaType(row[0].getClass()
// .getName(), parsed);
// else
// parsed = convertToJavaType(DataTypeColumns.get(0),
// parsed);
// }
//
// out.write(row[0].toString());
// out.newLine();
//
// // to add a row to the map
// mapResult.put(String.valueOf(i), row[0].toString());
//
// }
}
}
AnalysisLogger.getLogger().debug(
"In DatabaseManagement-> map size without header: " + (mapResult.size()-1));
AnalysisLogger.getLogger().debug(
"In DatabaseManagement-> Writing File and Result Map creation operations terminated");
}catch (Throwable e) {
throw new Exception(e.getLocalizedMessage());
}finally{
// close the file
if (out!=null){
out.close();
out=null;
AnalysisLogger.getLogger().debug(
"In DatabaseManagement-> File closed");
}
}
}
// to retrieve datatype columns of a table
private List<String> getDataTypeColumns(String tableName, String schemaName)
throws Exception {
AnalysisLogger.getLogger().debug(
"In DatabaseManagement->retrieving data type columns");
String query;
List<Object> resultDataTypeColumns = null;
List<String> DataTypeColumns = new ArrayList<String>();
// query to get data type columns
if (DBType.equals(POSTGRES)) {
query = String.format(queryForDataTypeColumnsPostgres, tableName,
schemaName);
resultDataTypeColumns = connection.executeQuery(query,
sourceDBSession);
if (resultDataTypeColumns != null) {
for (int i = 0; i < resultDataTypeColumns.size(); i++) {
// // check data type column
// AnalysisLogger.getLogger().debug(
// "In DatabaseManagement->column: " + i
// + " data type: "
// + (String) resultDataTypeColumns.get(i));
Object element = resultDataTypeColumns.get(i);
ArrayList<Object> listvalues = new ArrayList<Object>(
((LinkedHashMap<String, Object>) element).values());
// //print check
// AnalysisLogger.getLogger().debug(
// "DatabaseManagement->datatype values: "
// + listvalues);
//to manage USER-DEFINED types
if(listvalues.get(0).toString().equals("USER-DEFINED")){
DataTypeColumns.add(i, (String) listvalues.get(1));
}else{
DataTypeColumns.add(i, (String) listvalues.get(0));
}
}
}
}
if (DBType.equals(MYSQL)) {
query = String.format(queryForDataTypeColumnsMysql, tableName,
schemaName);
// System.out.println("query: " + query);
resultDataTypeColumns = connection.executeQuery(query,
sourceDBSession);
if (resultDataTypeColumns != null) {
for (int i = 0; i < resultDataTypeColumns.size(); i++) {
Object element = resultDataTypeColumns.get(i);
ArrayList<Object> listvalues = new ArrayList<Object>(
((LinkedHashMap<String, Object>) element).values());
// //print check
// AnalysisLogger.getLogger().debug("VALUES:" + listvalues);
// //print check
// AnalysisLogger.getLogger().debug(
// "In DatabaseManagement->column: " + i
// + " data type: " + listvalues);
// to add the data types columns
// DataTypeColumns.add(i,
// (String) resultDataTypeColumns.get(i));
DataTypeColumns.add(i, (String) listvalues.get(0));
}
}
}
return DataTypeColumns;
}
private List<String> getColumnNamesTable(String tableName, String schemaName)
throws Exception {
// List<String> ColumnNames = new ArrayList<String>();
AnalysisLogger.getLogger().debug(
"DatabaseManagement->retrieving column names");
// preparing the query to get columns' names
String queryColumns = null;
// build the query for database postgres. The parameter "schemaName" is
// the schema name.
if (DBType.equals(POSTGRES)) {
queryColumns = String.format(queryForColumnsPostgres, tableName,
schemaName);
}
// build the query for database mysql. The parameter "schemaName" is the
// database name.
if (DBType.equals(MYSQL)) {
queryColumns = String.format(queryForColumnsMysql, tableName,
schemaName);
}
List<Object> columnsSet = null;
// List<String> listColumnNames = null;
columnsSet = connection.executeQuery(queryColumns, sourceDBSession);
AnalysisLogger.getLogger().debug(
"DatabaseManagement->query submitted successfully: "
+ queryColumns);
if (columnsSet != null) {
listColumnNamesTable = new ArrayList<String>();
for (int i = 0; i < columnsSet.size(); i++) {
Object element = columnsSet.get(i);
ArrayList<Object> listvalues = new ArrayList<Object>(
((LinkedHashMap<String, Object>) element).values());
for (int j = 0; j < listvalues.size(); j++) {
listColumnNamesTable.add(listvalues.get(j).toString());
// AnalysisLogger.getLogger().debug(
// "DatabaseManagement->" + listvalues.get(j).toString());
}
}
// for (Object column : columnsSet) {
//
// AnalysisLogger.getLogger().debug(
// "DatabaseManagement->column name: " + column);
//
// listColumnNamesTable.add((String) column);
// }
}
return listColumnNamesTable;
}
// to retrieve the column names of a table
public List<String> getListColumnNamesTable() {
return listColumnNamesTable;
}
// to return the map which contains the rows that constitute the table
// result
public HashMap<String, String> getMapSampleTableResult() {
// return mapSampleTableResult;
return mapResult;
}
// to return the file in which the result (originated from a submit
// query or sampling operations) is stored
public File getFile() {
// return fileQueryResult;
return file;
}
// // to return the file in which the table result (originated from a submit
// // query and sample operations) is stored
// public File getFileSampleTableResult() {
//
// // return fileSample;
// return file;
//
// }
// to return the map which contains the rows of the query result
public HashMap<String, String> getMapQueryResult() {
// return mapQueryResult;
return mapResult;
}
//get total rows for a result of a submit query operation
public int getSubmitQueryTotalRows(){
return submitQueryTotalRows;
}
// method that allows to translate the query in another language
public String smartCorrectionOnQuery(String OriginalQuery, int dialect)
throws ParseException, ConvertException {
// //print check
// AnalysisLogger.getLogger().debug(
// "In DatabaseManagement-> smartCorrectionOnQuery, query and dialect: "
// + OriginalQuery + " " + dialect);
String queryCorrected = "";
// to translate the query in another language
SqlDialectConverter obj = new SqlDialectConverter(OriginalQuery);
queryCorrected = obj.convert(dialect);
AnalysisLogger.getLogger().debug(
"In DatabaseManagement-> query converted: " + queryCorrected);
return queryCorrected;
}
}