From 724eae1514538d908f8d997b877f30973cf81442 Mon Sep 17 00:00:00 2001 From: LSmyrnaios Date: Wed, 20 Mar 2024 15:08:01 +0200 Subject: [PATCH] - Optimize the placement of "DatabaseConnector.databaseLock.unlock()" statements. - Rename a maven-repository. --- build.gradle | 2 +- .../urls_controller/services/BulkImportServiceImpl.java | 9 ++++----- .../urls_controller/services/UrlsServiceImpl.java | 2 +- 3 files changed, 6 insertions(+), 7 deletions(-) diff --git a/build.gradle b/build.gradle index b3774ca..39180a2 100644 --- a/build.gradle +++ b/build.gradle @@ -13,7 +13,7 @@ java { repositories { mavenCentral() maven { - name "omtd" + name "madgik" url "https://repo.madgik.di.uoa.gr/content/repositories/thirdparty/" } } diff --git a/src/main/java/eu/openaire/urls_controller/services/BulkImportServiceImpl.java b/src/main/java/eu/openaire/urls_controller/services/BulkImportServiceImpl.java index e0c38b4..3cbe8a0 100644 --- a/src/main/java/eu/openaire/urls_controller/services/BulkImportServiceImpl.java +++ b/src/main/java/eu/openaire/urls_controller/services/BulkImportServiceImpl.java @@ -209,14 +209,13 @@ public class BulkImportServiceImpl implements BulkImportService { // Merge the parquet files inside the table "payload_bulk_import", to improve performance of future operations. DatabaseConnector.databaseLock.lock(); String mergeErrorMsg = fileUtils.mergeParquetFiles("payload_bulk_import", "", null); // msg is already logged + DatabaseConnector.databaseLock.unlock(); if ( mergeErrorMsg != null ) { // the message in already logged - DatabaseConnector.databaseLock.unlock(); bulkImportReport.addEvent(mergeErrorMsg); fileUtils.writeToFile(bulkImportReportLocation, bulkImportReport.getJsonReport(), true); BulkImportController.bulkImportDirsUnderProcessing.remove(bulkImportDirName); return false; } - DatabaseConnector.databaseLock.unlock(); String successMsg = "Finished the bulk-import procedure for " + provenance + " and bulkImportDir: " + bulkImportDirName; logger.info(successMsg); @@ -329,8 +328,9 @@ public class BulkImportServiceImpl implements BulkImportService { logger.trace("Going to load the data of parquet-file: \"" + parquetFileName + "\" to the database-table: \"payload_bulk_import\"." + additionalLoggingMsg); // DEBUG! DatabaseConnector.databaseLock.lock(); - if ( !parquetFileUtils.loadParquetDataIntoTable((currentBulkImportHdfsDir + parquetFileName), "payload_bulk_import") ) { - DatabaseConnector.databaseLock.unlock(); + boolean parquetDataLoaded = parquetFileUtils.loadParquetDataIntoTable((currentBulkImportHdfsDir + parquetFileName), "payload_bulk_import"); + DatabaseConnector.databaseLock.unlock(); + if ( !parquetDataLoaded ) { errorMsg = "Could not load the payload-records to the database, for segment-" + segmentCounter + "!"; logger.error(errorMsg + additionalLoggingMsg); bulkImportReport.addEvent(errorMsg); @@ -338,7 +338,6 @@ public class BulkImportServiceImpl implements BulkImportService { // None of the files of this segment will be deleted, in any case. return numOfFilesInSegment; // All files of this segment have failed. } - DatabaseConnector.databaseLock.unlock(); String segmentSuccessMsg = "Finished importing " + numOfPayloadRecords + " files, out of " + numOfFilesInSegment + ", for segment-" + segmentCounter + "."; logger.info(segmentSuccessMsg + additionalLoggingMsg); diff --git a/src/main/java/eu/openaire/urls_controller/services/UrlsServiceImpl.java b/src/main/java/eu/openaire/urls_controller/services/UrlsServiceImpl.java index 362d682..3706924 100644 --- a/src/main/java/eu/openaire/urls_controller/services/UrlsServiceImpl.java +++ b/src/main/java/eu/openaire/urls_controller/services/UrlsServiceImpl.java @@ -334,8 +334,8 @@ public class UrlsServiceImpl implements UrlsService { SumParquetSuccess sumParquetSuccess = parquetFileUtils.checkParquetFilesSuccess(futures); ResponseEntity errorResponseEntity = sumParquetSuccess.getResponseEntity(); if ( errorResponseEntity != null ) { // The related log is already shown in this case. - postReportResultToWorker(curWorkerId, curReportAssignmentsCounter, "Error when creating or uploading the parquet files!"); DatabaseConnector.databaseLock.unlock(); + postReportResultToWorker(curWorkerId, curReportAssignmentsCounter, "Error when creating or uploading the parquet files!"); return false; } hasAttemptParquetFileProblem = sumParquetSuccess.isAttemptParquetFileProblem();