Improve the names of some methods.

This commit is contained in:
Lampros Smyrnaios 2023-10-16 23:39:43 +03:00
parent def21b991d
commit f05eee7569
1 changed files with 6 additions and 6 deletions

View File

@ -196,13 +196,13 @@ public class ParquetFileUtils {
for ( int i = 0; i < numSubListsPayload; ++i ) {
int finalI = i;
callableTasks.add(() -> { // Handle inserts to the "payload" table. Around 20% of the total amount.
return new ParquetReport(ParquetReport.ParquetType.payload, createAndLoadParquetDataIntoPayloadTable(finalI, finalSubLists.get(finalI), curReportAssignments, currentParquetPath, (parquetHDFSDirectoryPathPayloadsAggregated + curReportAssignments + "/")));
return new ParquetReport(ParquetReport.ParquetType.payload, createPayloadParquetDataAndUploadToHDFS(finalI, finalSubLists.get(finalI), curReportAssignments, currentParquetPath, (parquetHDFSDirectoryPathPayloadsAggregated + curReportAssignments + "/")));
});
}
} else {
// If the "urlReports" are so few, that we cannot get big "sublists", assign a single task to handle all the payload (sizeOfEachSubList * 5).
callableTasks.add(() -> { // Handle inserts to the "payload" table. Around 20% of the total amount.
return new ParquetReport(ParquetReport.ParquetType.payload, createAndLoadParquetDataIntoPayloadTable(0, urlReports, curReportAssignments, currentParquetPath, (parquetHDFSDirectoryPathPayloadsAggregated + curReportAssignments + "/")));
return new ParquetReport(ParquetReport.ParquetType.payload, createPayloadParquetDataAndUploadToHDFS(0, urlReports, curReportAssignments, currentParquetPath, (parquetHDFSDirectoryPathPayloadsAggregated + curReportAssignments + "/")));
});
}
}
@ -218,13 +218,13 @@ public class ParquetFileUtils {
for ( int i = 0; i < numSubListsAttempt; ++i ) {
int finalI = i;
callableTasks.add(() -> { // Handle inserts to the "attempt" table. Insert 20% of the "attempt" queries.
return new ParquetReport(ParquetReport.ParquetType.attempt, createAndLoadParquetDataIntoAttemptTable(finalI, finalSubLists.get(finalI), curReportAssignments, currentParquetPath));
return new ParquetReport(ParquetReport.ParquetType.attempt, createAttemptParquetDataAndUploadToHDFS(finalI, finalSubLists.get(finalI), curReportAssignments, currentParquetPath));
});
}
} else {
// If the "urlReports" are so few, that we cannot get big "sublists", assign a single task to handle all the attempts (sizeOfEachSubList * 5).
callableTasks.add(() -> { // Handle inserts to the "attempt" table.
return new ParquetReport(ParquetReport.ParquetType.attempt, createAndLoadParquetDataIntoAttemptTable(0, urlReports, curReportAssignments, currentParquetPath));
return new ParquetReport(ParquetReport.ParquetType.attempt, createAttemptParquetDataAndUploadToHDFS(0, urlReports, curReportAssignments, currentParquetPath));
});
}
@ -232,7 +232,7 @@ public class ParquetFileUtils {
}
public boolean createAndLoadParquetDataIntoAttemptTable(int attemptsIncNum, List<UrlReport> urlReports, long curReportAssignmentsCounter, String localParquetPath)
public boolean createAttemptParquetDataAndUploadToHDFS(int attemptsIncNum, List<UrlReport> urlReports, long curReportAssignmentsCounter, String localParquetPath)
{
List<GenericData.Record> recordList = new ArrayList<>(urlReports.size());
GenericData.Record record;
@ -289,7 +289,7 @@ public class ParquetFileUtils {
}
public boolean createAndLoadParquetDataIntoPayloadTable(int payloadsCounter, List<UrlReport> urlReports, long curReportAssignmentsCounter, String localParquetPath, String parquetHDFSDirectoryPathPayloads)
public boolean createPayloadParquetDataAndUploadToHDFS(int payloadsCounter, List<UrlReport> urlReports, long curReportAssignmentsCounter, String localParquetPath, String parquetHDFSDirectoryPathPayloads)
{
List<GenericData.Record> recordList = new ArrayList<>((int) (urlReports.size() * 0.2));
GenericData.Record record;