forked from lsmyrnaios/UrlsController
- Create the HDFS-subDirs before generating "callableTasks" for creating and uploading the parquetFiles.
- Delete gradle .zip file after installation.
This commit is contained in:
parent
860c73ea91
commit
a524375656
|
@ -35,6 +35,7 @@ if [[ justInstall -eq 0 ]]; then
|
||||||
echo -e "\nAsking for sudo, in order to install 'gradle'..\n"
|
echo -e "\nAsking for sudo, in order to install 'gradle'..\n"
|
||||||
sudo mkdir /opt/gradle
|
sudo mkdir /opt/gradle
|
||||||
sudo apt install -y unzip && sudo unzip -d /opt/gradle gradle-${gradleVersion}-bin.zip
|
sudo apt install -y unzip && sudo unzip -d /opt/gradle gradle-${gradleVersion}-bin.zip
|
||||||
|
sudo rm -rf gradle-${gradleVersion}-bin.zip
|
||||||
#ls /opt/gradle/gradle-${gradleVersion} # For debugging installation
|
#ls /opt/gradle/gradle-${gradleVersion} # For debugging installation
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
|
|
@ -201,7 +201,7 @@ public class BulkImportController {
|
||||||
bulkImportService.bulkImportFullTextsFromDirectory(bulkImportReport, finalRelativeBulkImportDir, finalBulkImportDir, givenDir, provenance, bulkImportSource, shouldDeleteFilesOnFinish)
|
bulkImportService.bulkImportFullTextsFromDirectory(bulkImportReport, finalRelativeBulkImportDir, finalBulkImportDir, givenDir, provenance, bulkImportSource, shouldDeleteFilesOnFinish)
|
||||||
);
|
);
|
||||||
|
|
||||||
return ResponseEntity.ok().body(new BulkImportResponse(msg, bulkImportReportID));
|
return ResponseEntity.ok().body(new BulkImportResponse(msg, bulkImportReportID)); // The response is automatically serialized to json and it's of type "application/json".
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -311,8 +311,6 @@ public class UrlsServiceImpl implements UrlsService {
|
||||||
|
|
||||||
logger.debug("Going to write the results in the parquet files, then upload them to HDFS, and then load them into the database's tables. For batch-assignments_" + curReportAssignmentsCounter);
|
logger.debug("Going to write the results in the parquet files, then upload them to HDFS, and then load them into the database's tables. For batch-assignments_" + curReportAssignmentsCounter);
|
||||||
|
|
||||||
List<Callable<ParquetReport>> callableTasks = parquetFileUtils.getTasksForCreatingAndUploadingParquetFiles(urlReports, sizeOfUrlReports, curReportAssignmentsCounter, localParquetPath, uploadFullTextsResponse);
|
|
||||||
|
|
||||||
// Create HDFS subDirs for these assignments. Other background threads handling other assignments will not interfere with loading of parquetFiles to the DB tables.
|
// Create HDFS subDirs for these assignments. Other background threads handling other assignments will not interfere with loading of parquetFiles to the DB tables.
|
||||||
String endingMkDirAndParams = curReportAssignmentsCounter + "/" + parquetFileUtils.mkDirsAndParams;
|
String endingMkDirAndParams = curReportAssignmentsCounter + "/" + parquetFileUtils.mkDirsAndParams;
|
||||||
if ( !parquetFileUtils.applyHDFOperation(parquetFileUtils.webHDFSBaseUrl + parquetFileUtils.parquetHDFSDirectoryPathAttempts + endingMkDirAndParams)
|
if ( !parquetFileUtils.applyHDFOperation(parquetFileUtils.webHDFSBaseUrl + parquetFileUtils.parquetHDFSDirectoryPathAttempts + endingMkDirAndParams)
|
||||||
|
@ -322,6 +320,8 @@ public class UrlsServiceImpl implements UrlsService {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
List<Callable<ParquetReport>> callableTasks = parquetFileUtils.getTasksForCreatingAndUploadingParquetFiles(urlReports, sizeOfUrlReports, curReportAssignmentsCounter, localParquetPath, uploadFullTextsResponse);
|
||||||
|
|
||||||
boolean hasAttemptParquetFileProblem = false;
|
boolean hasAttemptParquetFileProblem = false;
|
||||||
boolean hasPayloadParquetFileProblem = false;
|
boolean hasPayloadParquetFileProblem = false;
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue