class DatabaseManagement modified in order to not make available the result in a file csv for submitquery and samplings operations.

git-svn-id: https://svn.d4science.research-infrastructures.eu/gcube/trunk/data-access/DatabasesResourcesManager@99051 82a268e6-3cf1-43bd-a215-b396298e98cf
This commit is contained in:
Loredana Liccardo 2014-07-29 08:30:42 +00:00
parent 6769c97a2d
commit 21d1bb5a27
1 changed files with 245 additions and 386 deletions

View File

@ -112,25 +112,25 @@ public class DatabaseManagement {
private String convertToJavaType(String type, String val) throws Exception { private String convertToJavaType(String type, String val) throws Exception {
type = type.toLowerCase(); type = type.toLowerCase();
// AnalysisLogger.getLogger() // AnalysisLogger.getLogger()
// .debug("In DatabaseManagement->type: " // .debug("In DatabaseManagement->type: "
// + type); // + type);
String valConverted = val; String valConverted = val;
// AnalysisLogger.getLogger() // AnalysisLogger.getLogger()
// .debug("In DatabaseManagement->val: " // .debug("In DatabaseManagement->val: "
// + valConverted); // + valConverted);
try { try {
// parse to Long // parse to Long
valConverted = "" + Long.parseLong(valConverted); valConverted = "" + Long.parseLong(valConverted);
// AnalysisLogger.getLogger() // AnalysisLogger.getLogger()
// .debug("In DatabaseManagement->parsed value Long: " // .debug("In DatabaseManagement->parsed value Long: "
// + valConverted); // + valConverted);
} }
@ -160,9 +160,9 @@ public class DatabaseManagement {
valConverted = "" + Double.parseDouble(valConverted); valConverted = "" + Double.parseDouble(valConverted);
// // check the parsed value // // check the parsed value
// AnalysisLogger.getLogger().debug( // AnalysisLogger.getLogger().debug(
// "In DatabaseManagement->parsed value Double: " // "In DatabaseManagement->parsed value Double: "
// + valConverted); // + valConverted);
} }
@ -171,24 +171,24 @@ public class DatabaseManagement {
valConverted = "" + Float.parseFloat(valConverted); valConverted = "" + Float.parseFloat(valConverted);
// // check the parsed value // // check the parsed value
// AnalysisLogger.getLogger().debug( // AnalysisLogger.getLogger().debug(
// "In DatabaseManagement->parsed value Float: " // "In DatabaseManagement->parsed value Float: "
// + valConverted); // + valConverted);
} }
if (((type.contains("string")))||(type.contains("varchar")) || (type.contains("char")) if (((type.contains("string")))
|| (type.contains("text")) || (type || (type.contains("varchar"))
.contains("character varying"))) { || (type.contains("char"))
|| (type.contains("text"))
valConverted= "\"" + valConverted + "\""; || (type.contains("character varying"))) {
valConverted = "\"" + valConverted + "\"";
// // check the parsed value // // check the parsed value
// AnalysisLogger.getLogger().debug( // AnalysisLogger.getLogger().debug(
// "In DatabaseManagement->parsed value string: " // "In DatabaseManagement->parsed value string: "
// + valConverted); // + valConverted);
} }
@ -343,38 +343,22 @@ public class DatabaseManagement {
AnalysisLogger.getLogger().debug( AnalysisLogger.getLogger().debug(
"In DatabaseManagement->store table in a file"); "In DatabaseManagement->store table in a file");
// store table in a file
// writeQueryResultIntoFile(results);
// writeSampleTableIntoFile(results, tableName, schemaName);
String FileName = pathFile + "QueryResult.csv";
// List<String> listColumnNames = getColumnNamesTable(tableName, // List<String> listColumnNames = getColumnNamesTable(tableName,
// schemaName); // schemaName);
// to retrieve datatype columns of a table // to retrieve datatype columns of a table
List<String> DataTypeColumns = null; List<String> DataTypeColumns = null;
// write the result in the file and in the map // // store table in a file
// String FileName = pathFile + "QueryResult.csv";
AnalysisLogger.getLogger().debug( // // write the result in the file and in the map
"In DatabaseManagement->writing the result in the file: " // AnalysisLogger.getLogger().debug(
+ FileName); // "In DatabaseManagement->writing the result in the file: "
// + FileName);
file = new File(FileName); // file = new File(FileName);
// out = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(
out = new BufferedWriter(new OutputStreamWriter(new FileOutputStream( // file), "UTF-8"));
file), "UTF-8")); // writeTableIntoFile(results, DataTypeColumns);
// String header = null;
// writeTableIntoFile(results, DataTypeColumns, header, tableName,
// schemaName,
// FileName);
writeTableIntoFile(results, DataTypeColumns);
return results; return results;
@ -610,13 +594,14 @@ public class DatabaseManagement {
} }
// Method that returns the estimated number of rows // Method that returns the estimated number of rows
public long getNumberOfRows(String tablename, String schemaName) throws Exception { public long getNumberOfRows(String tablename, String schemaName)
throws Exception {
long rows; long rows;
rows = op.calculateElements(connection, DBType, tablename, schemaName, rows = op.calculateElements(connection, DBType, tablename, schemaName,
sourceDBSession); sourceDBSession);
AnalysisLogger.getLogger().debug( AnalysisLogger.getLogger().debug(
"DatabaseManagement->rows' number calculated: " + rows); "DatabaseManagement->rows' number calculated: " + rows);
@ -718,23 +703,7 @@ public class DatabaseManagement {
out = new BufferedWriter(new OutputStreamWriter(new FileOutputStream( out = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(
file), "UTF-8")); file), "UTF-8"));
// mapResult.put("HEADERS", header); // writeTableIntoFile(resultSet, DataTypeColumns);
//
// out.write(header);
// out.newLine();
// writeTableIntoFile(resultSet, DataTypeColumns, header, tableName,
// schemaName,
// FileName);
// writeTableIntoFile(resultSet, DataTypeColumns, tableName,
// schemaName,
// FileName);
writeTableIntoFile(resultSet, DataTypeColumns);
// return resultSet;
} }
/** /**
@ -758,7 +727,6 @@ public class DatabaseManagement {
AnalysisLogger.getLogger().debug( AnalysisLogger.getLogger().debug(
"DatabaseManagement->starting the Sample on table operation"); "DatabaseManagement->starting the Sample on table operation");
AnalysisLogger.getLogger().debug( AnalysisLogger.getLogger().debug(
"DatabaseManagement->retrieving the first 100 rows"); "DatabaseManagement->retrieving the first 100 rows");
@ -778,57 +746,27 @@ public class DatabaseManagement {
AnalysisLogger.getLogger().debug( AnalysisLogger.getLogger().debug(
"In DatabaseManagement->store table in a file"); "In DatabaseManagement->store table in a file");
// store table in a file
// writeSampleTableIntoFile(resultSet, tableName, schemaName);
String FileName = pathFile + "SampleResult.csv";
// to recover columns names list // to recover columns names list
List<String> listColumns = sampler.getListColumns(); List<String> listColumns = sampler.getListColumns();
// String header = "";
for (int i = 0; i < listColumns.size(); i++) { for (int i = 0; i < listColumns.size(); i++) {
if (i != listColumns.size() - 1) { if (i != listColumns.size() - 1) {
header = header + listColumns.get(i) + ", "; header = header + listColumns.get(i) + ", ";
} else { } else {
header = header + listColumns.get(i); header = header + listColumns.get(i);
} }
} }
// write the result in the file and in the map // // store table in a file
// String FileName = pathFile + "SampleResult.csv";
AnalysisLogger.getLogger().debug( // // write the result in the file and in the map
"In DatabaseManagement->writing the result in the file: " // AnalysisLogger.getLogger().debug(
+ FileName); // "In DatabaseManagement->writing the result in the file: "
// + FileName);
file = new File(FileName); // file = new File(FileName);
// out = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(
out = new BufferedWriter(new OutputStreamWriter(new FileOutputStream( // file), "UTF-8"));
file), "UTF-8")); // writeTableIntoFile(resultSet, DataTypeColumns);
// mapResult.put("HEADERS", header);
//
// out.write(header);
// out.newLine();
// System.out.println("HEADER:" + header);
// writeTableIntoFile(resultSet, DataTypeColumns, header, tableName,
// schemaName,
// FileName);
// writeTableIntoFile(resultSet, DataTypeColumns, tableName, schemaName,
// FileName);
writeTableIntoFile(resultSet, DataTypeColumns);
// return the first 100 rows
// return resultSet;
} }
@ -853,7 +791,6 @@ public class DatabaseManagement {
AnalysisLogger AnalysisLogger
.getLogger() .getLogger()
.debug("DatabaseManagement->starting the Random Sample on table operation"); .debug("DatabaseManagement->starting the Random Sample on table operation");
AnalysisLogger.getLogger().debug( AnalysisLogger.getLogger().debug(
"DatabaseManagement->retrieving 100 rows"); "DatabaseManagement->retrieving 100 rows");
@ -861,7 +798,6 @@ public class DatabaseManagement {
List<String> DataTypeColumns = getDataTypeColumns(tableName, schemaName); List<String> DataTypeColumns = getDataTypeColumns(tableName, schemaName);
if (estimatedRows == 0) { if (estimatedRows == 0) {
estimatedRows = getNumberOfRows(tableName, schemaName); estimatedRows = getNumberOfRows(tableName, schemaName);
} }
@ -878,14 +814,7 @@ public class DatabaseManagement {
AnalysisLogger.getLogger().debug( AnalysisLogger.getLogger().debug(
"In DatabaseManagement->store table in a file"); "In DatabaseManagement->store table in a file");
// store table in a file
// writeSampleTableIntoFile(resultSet, tableName, schemaName);
String FileName = pathFile + "SampleResult.csv";
// to recover columns names list // to recover columns names list
List<String> listColumns = sampler.getListColumns(); List<String> listColumns = sampler.getListColumns();
// //print check // //print check
@ -897,44 +826,23 @@ public class DatabaseManagement {
// String header = ""; // String header = "";
for (int i = 0; i < listColumns.size(); i++) { for (int i = 0; i < listColumns.size(); i++) {
if (i != listColumns.size() - 1) { if (i != listColumns.size() - 1) {
header = header + listColumns.get(i) + ", "; header = header + listColumns.get(i) + ", ";
} else { } else {
header = header + listColumns.get(i); header = header + listColumns.get(i);
} }
} }
// write the result in the file and in the map // // store table in a file
// String FileName = pathFile + "SampleResult.csv";
AnalysisLogger.getLogger().debug( // // write the result in the file and in the map
"In DatabaseManagement->writing the result in the file: " // AnalysisLogger.getLogger().debug(
+ FileName); // "In DatabaseManagement->writing the result in the file: "
// + FileName);
file = new File(FileName); // file = new File(FileName);
// out = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(
out = new BufferedWriter(new OutputStreamWriter(new FileOutputStream( // file), "UTF-8"));
file), "UTF-8")); // writeTableIntoFile(resultSet, DataTypeColumns);
// mapResult.put("HEADERS", header);
//
// out.write(header);
// out.newLine();
// writeTableIntoFile(resultSet, DataTypeColumns, header, tableName,
// schemaName,
// FileName);
// writeTableIntoFile(resultSet, DataTypeColumns, tableName, schemaName,
// FileName);
writeTableIntoFile(resultSet, DataTypeColumns);
// return resultSet;
} }
@ -943,237 +851,188 @@ public class DatabaseManagement {
// List<String> DataTypeColumns, String tableName, String schemaName, // List<String> DataTypeColumns, String tableName, String schemaName,
// String FileName) throws Exception { // String FileName) throws Exception {
// write the table result in the file and build the map of results // // write the table result in the file and build the map of results
private void writeTableIntoFile(List<Object> result, // private void writeTableIntoFile(List<Object> result,
List<String> DataTypeColumns) throws Exception { // List<String> DataTypeColumns) throws Exception {
//
// // file that will contain result // // // file that will contain result
// BufferedWriter out; // // BufferedWriter out;
// // // // String fileName;
// // String fileName; // // // fileName = "./cfg/" + "table.txt";
// // // // fileName = "./files/" + "table.txt";
// // fileName = "./cfg/" + "table.txt"; // // // fileName =
// // fileName = "./files/" + "table.txt"; // // //
// // // "/home/loredana/workspace/DatabasesResourcesManagerAlgorithms/cfg/"
// // fileName = // // // + "SampleOnTable.txt";
// // // // // fileName = "./files/" + "SmartSampleOnTable.txt";
// "/home/loredana/workspace/DatabasesResourcesManagerAlgorithms/cfg/" // // // File file = new File(fileName);
// // + "SampleOnTable.txt"; // // file = new File(FileName);
// // // out = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(
// // fileName = "./files/" + "SmartSampleOnTable.txt"; // // file), "UTF-8"));
// // File file = new File(fileName); //
// // // to get columns names and result
// AnalysisLogger.getLogger().debug( // // write headers in the file
// "In DatabaseManagement->writing the result in the file: " // // to recover columns names
// + FileName); // if (header.equals("")) {
// // ArrayList<String> listKeys = new ArrayList<String>(
// file = new File(FileName); // ((LinkedHashMap<String, Object>) (result.get(0))).keySet());
// //
// // FileWriter fw = new FileWriter(file.getAbsoluteFile()); // for (int i = 0; i < listKeys.size(); i++) {
// // out = new BufferedWriter(fw); // if (i != listKeys.size() - 1) {
// // header = header + listKeys.get(i) + ", ";
// out = new BufferedWriter(new OutputStreamWriter(new FileOutputStream( // } else {
// file), "UTF-8")); // header = header + listKeys.get(i);
// // }
// // mapResult.put("HEADER", header); // }
// // }
// out.write(header); //
// out.newLine(); // // // print check
// // AnalysisLogger.getLogger().debug(
// to get columns names and result // // "DatabaseManagement->HEADERS: " + header);
//
// write headers in the file // out.write(header);
// out.newLine();
// to recover columns names // mapResult.put("HEADERS", header);
//
if (header.equals("")) { // // //print check values
ArrayList<String> listKeys = new ArrayList<String>( // // AnalysisLogger.getLogger().debug(
((LinkedHashMap<String, Object>) (result.get(0))).keySet()); // // "DatabaseManagement->columns names: " + listKeys);
//
for (int i = 0; i < listKeys.size(); i++) { // if (result != null && result.size() != 0) {
// // // write operation in the file
if (i != listKeys.size() - 1) { // for (int i = 0; i < result.size(); i++) {
// String RowString = "";
header = header + listKeys.get(i) + ", "; // Object element = result.get(i);
//
} else { // // arraylist in which each element is a row result
// ArrayList<Object> listvalues = new ArrayList<Object>(
header = header + listKeys.get(i); // ((LinkedHashMap<String, Object>) element).values());
//
} // // // print check
} // // AnalysisLogger.getLogger().debug(
// // "DatabaseManagement->values: " + listvalues);
} //
// // each row could have several column values
// // print check // Object[] row = listvalues.toArray();
// AnalysisLogger.getLogger().debug( // if (row.length >= 1) {
// "DatabaseManagement->HEADERS: " + header); // for (int j = 0; j < row.length; j++) {
// if (row[j] == null) {
out.write(header); // row[j] = "";
out.newLine(); // }
// // to parse the obtained results in order to align
mapResult.put("HEADERS", header); // // number
// // values with those of postgres
// //print check values // String original = row[j].toString();
// AnalysisLogger.getLogger().debug( //
// "DatabaseManagement->columns names: " + listKeys); // // // check value
// // AnalysisLogger.getLogger().debug(
if (result != null && result.size() != 0) { // // "In DatabaseManagement->original value: "
// // + original);
// // write operation in the file //
for (int i = 0; i < result.size(); i++) { // String parsed = "" + row[j];
// if (original != "") {
String RowString = ""; // // convert database datatypes to Java datatypes
// if (DataTypeColumns == null
Object element = result.get(i); // || DataTypeColumns.size() == 0)
// parsed = convertToJavaType(row[j].getClass()
//arraylist in which each element is a row result // .getName(), parsed);
ArrayList<Object> listvalues = new ArrayList<Object>( // else
((LinkedHashMap<String, Object>) element).values()); // parsed = convertToJavaType(
// DataTypeColumns.get(j), parsed);
// // print check // }
// AnalysisLogger.getLogger().debug( //
// "DatabaseManagement->values: " + listvalues); // // // check value
// // AnalysisLogger.getLogger().debug(
//each row could have several column values // // "In DatabaseManagement->row: " + (i + 1)
Object[] row = listvalues.toArray(); // // + " column: " + (j + 1) + " value= "
// // + parsed);
if (row.length >= 1) { //
// // write in a file
for (int j = 0; j < row.length; j++) { // if (j != row.length - 1) {
// // out.write("\"" + parsed + "\"");
if (row[j] == null) { // // out.write(",");
row[j] = ""; // out.write(parsed);
} // out.write(",");
//
// to parse the obtained results in order to align // // System.out.println("write column : " + j);
// number // // RowString = RowString + parsed + " ";
// values with those of postgres // if (j == 0) {
String original = row[j].toString(); // RowString = parsed;
// } else {
// // check value // RowString = RowString + "," + parsed;
// AnalysisLogger.getLogger().debug( // }
// "In DatabaseManagement->original value: " // }
// + original); // if (j == row.length - 1) {
// // out.write("\"" + parsed + "\"");
String parsed = "" + row[j]; // // out.newLine();
// out.write(parsed);
if (original != "") { // out.newLine();
// convert database datatypes to Java datatypes //
if (DataTypeColumns == null // // to add a row to the map
|| DataTypeColumns.size() == 0) // if (row.length == 1) {
parsed = convertToJavaType(row[j].getClass() // RowString = parsed;
.getName(), parsed); // } else {
else // RowString = RowString + "," + parsed;
parsed = convertToJavaType( // }
DataTypeColumns.get(j), parsed); //
} // // to add a row to the map
// // RowString = RowString + "," + parsed;
// // check value // // mapSampleTableResult.put(String.valueOf(i),
// AnalysisLogger.getLogger().debug( // // RowString);
// "In DatabaseManagement->row: " + (i + 1) //
// + " column: " + (j + 1) + " value= " // // // check value row
// + parsed); // // AnalysisLogger.getLogger().debug(
// // "writing the value: " + RowString + " key: "
// write in a file // // + String.valueOf(i));
//
if (j != row.length - 1) { // // mapResult.put(Integer.valueOf(i), RowString);
// mapResult.put(String.valueOf(i), RowString);
// out.write("\"" + parsed + "\""); // }
// }
// out.write(","); // }
out.write(parsed); // // else if (result.size() == 1) {
out.write(","); // //
// // // Object RowElement = (Object) result.get(0);
// System.out.println("write column : " + j); // //
// RowString = RowString + parsed + " "; // // if (row[0] == null) {
if (j == 0) { // // row[0] = "";
RowString = parsed; // // }
} else { // //
// // // to parse the obtained results in order to align
RowString = RowString + "," + parsed; // // // number
} // // // values with those of postgres
// // String original = row[0].toString();
} // //
if (j == row.length - 1) { // // // // check value
// // // AnalysisLogger.getLogger().debug(
// out.write("\"" + parsed + "\""); // // // "In DatabaseManagement->original value: "
// out.newLine(); // // // + original);
// //
out.write(parsed); // // String parsed = "" + row[0];
out.newLine(); // //
// // if (original != "") {
// to add a row to the map // // // convert database datatypes to Java datatypes
if (row.length ==1){ // // if (DataTypeColumns == null
RowString = parsed; // // || DataTypeColumns.size() == 0)
}else{ // // parsed = convertToJavaType(row[0].getClass()
RowString = RowString + "," + parsed; // // .getName(), parsed);
} // // else
// // parsed = convertToJavaType(DataTypeColumns.get(0),
// to add a row to the map // // parsed);
// RowString = RowString + "," + parsed; // // }
// mapSampleTableResult.put(String.valueOf(i), // //
// RowString); // // out.write(row[0].toString());
// // out.newLine();
// // check value row // //
// AnalysisLogger.getLogger().debug( // // // to add a row to the map
// "writing the value: " + RowString + " key: " // // mapResult.put(String.valueOf(i), row[0].toString());
// + String.valueOf(i)); // //
// // }
// mapResult.put(Integer.valueOf(i), RowString); // }
mapResult.put(String.valueOf(i), RowString); // }
// // close the file
} // out.close();
// }
}
}
// else if (result.size() == 1) {
//
// // Object RowElement = (Object) result.get(0);
//
// if (row[0] == null) {
// row[0] = "";
// }
//
// // to parse the obtained results in order to align
// // number
// // values with those of postgres
// String original = row[0].toString();
//
// // // check value
// // AnalysisLogger.getLogger().debug(
// // "In DatabaseManagement->original value: "
// // + original);
//
// String parsed = "" + row[0];
//
// if (original != "") {
// // convert database datatypes to Java datatypes
// if (DataTypeColumns == null
// || DataTypeColumns.size() == 0)
// parsed = convertToJavaType(row[0].getClass()
// .getName(), parsed);
// else
// parsed = convertToJavaType(DataTypeColumns.get(0),
// parsed);
// }
//
// out.write(row[0].toString());
// out.newLine();
//
// // to add a row to the map
// mapResult.put(String.valueOf(i), row[0].toString());
//
// }
}
}
// close the file
out.close();
}
// to retrieve datatype columns of a table // to retrieve datatype columns of a table
private List<String> getDataTypeColumns(String tableName, String schemaName) private List<String> getDataTypeColumns(String tableName, String schemaName)