forked from lsmyrnaios/UrlsController
- Create the HDFS-subDirs before generating "callableTasks" for creating and uploading the parquetFiles.
- Delete gradle .zip file after installation.
This commit is contained in:
parent
860c73ea91
commit
a524375656
|
@ -35,6 +35,7 @@ if [[ justInstall -eq 0 ]]; then
|
|||
echo -e "\nAsking for sudo, in order to install 'gradle'..\n"
|
||||
sudo mkdir /opt/gradle
|
||||
sudo apt install -y unzip && sudo unzip -d /opt/gradle gradle-${gradleVersion}-bin.zip
|
||||
sudo rm -rf gradle-${gradleVersion}-bin.zip
|
||||
#ls /opt/gradle/gradle-${gradleVersion} # For debugging installation
|
||||
fi
|
||||
|
||||
|
|
|
@ -201,7 +201,7 @@ public class BulkImportController {
|
|||
bulkImportService.bulkImportFullTextsFromDirectory(bulkImportReport, finalRelativeBulkImportDir, finalBulkImportDir, givenDir, provenance, bulkImportSource, shouldDeleteFilesOnFinish)
|
||||
);
|
||||
|
||||
return ResponseEntity.ok().body(new BulkImportResponse(msg, bulkImportReportID));
|
||||
return ResponseEntity.ok().body(new BulkImportResponse(msg, bulkImportReportID)); // The response is automatically serialized to json and it's of type "application/json".
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -311,17 +311,17 @@ public class UrlsServiceImpl implements UrlsService {
|
|||
|
||||
logger.debug("Going to write the results in the parquet files, then upload them to HDFS, and then load them into the database's tables. For batch-assignments_" + curReportAssignmentsCounter);
|
||||
|
||||
List<Callable<ParquetReport>> callableTasks = parquetFileUtils.getTasksForCreatingAndUploadingParquetFiles(urlReports, sizeOfUrlReports, curReportAssignmentsCounter, localParquetPath, uploadFullTextsResponse);
|
||||
|
||||
// Create HDFS subDirs for these assignments. Other background threads handling other assignments will not interfere with loading of parquetFiles to the DB tables.
|
||||
String endingMkDirAndParams = curReportAssignmentsCounter + "/" + parquetFileUtils.mkDirsAndParams;
|
||||
if ( !parquetFileUtils.applyHDFOperation(parquetFileUtils.webHDFSBaseUrl + parquetFileUtils.parquetHDFSDirectoryPathAttempts + endingMkDirAndParams)
|
||||
|| !parquetFileUtils.applyHDFOperation(parquetFileUtils.webHDFSBaseUrl + parquetFileUtils.parquetHDFSDirectoryPathPayloadsAggregated + endingMkDirAndParams) )
|
||||
|| !parquetFileUtils.applyHDFOperation(parquetFileUtils.webHDFSBaseUrl + parquetFileUtils.parquetHDFSDirectoryPathPayloadsAggregated + endingMkDirAndParams) )
|
||||
{
|
||||
postReportResultToWorker(curWorkerId, curReportAssignmentsCounter, "Error when creating the HDFS sub-directories for assignments_" + curReportAssignmentsCounter);
|
||||
return false;
|
||||
}
|
||||
|
||||
List<Callable<ParquetReport>> callableTasks = parquetFileUtils.getTasksForCreatingAndUploadingParquetFiles(urlReports, sizeOfUrlReports, curReportAssignmentsCounter, localParquetPath, uploadFullTextsResponse);
|
||||
|
||||
boolean hasAttemptParquetFileProblem = false;
|
||||
boolean hasPayloadParquetFileProblem = false;
|
||||
|
||||
|
|
Loading…
Reference in New Issue