forked from lsmyrnaios/UrlsController
- Avoid displaying a warning for the "test" HDFS directory, when the Controller is running in PROD mode.
- Add a missing change for the optimization of reading files. - Update dependencies.
This commit is contained in:
parent
3dd349dd00
commit
6891c467d4
|
@ -49,7 +49,7 @@ dependencies {
|
|||
implementation group: 'org.apache.commons', name: 'commons-lang3', version: '3.13.0'
|
||||
|
||||
// https://mvnrepository.com/artifact/org.apache.commons/commons-compress
|
||||
implementation("org.apache.commons:commons-compress:1.23.0") {
|
||||
implementation("org.apache.commons:commons-compress:1.24.0") {
|
||||
exclude group: 'com.github.luben', module: 'zstd-jni'
|
||||
}
|
||||
implementation 'com.github.luben:zstd-jni:1.5.5-5' // Even though this is part of the above dependency, the Apache commons rarely updates it, while the zstd team makes improvements very often.
|
||||
|
@ -120,7 +120,7 @@ dependencies {
|
|||
|
||||
|
||||
// https://mvnrepository.com/artifact/io.micrometer/micrometer-registry-prometheus
|
||||
runtimeOnly 'io.micrometer:micrometer-registry-prometheus:1.11.3'
|
||||
runtimeOnly 'io.micrometer:micrometer-registry-prometheus:1.11.4'
|
||||
|
||||
testImplementation 'org.springframework.security:spring-security-test'
|
||||
testImplementation "org.springframework.boot:spring-boot-starter-test"
|
||||
|
|
|
@ -378,7 +378,7 @@ public class ScheduledTasks {
|
|||
logger.debug("Going to load and parse the workerReport: " + workerReportName);
|
||||
|
||||
// Load the file's json content into a "WorkerReport" object.
|
||||
try ( BufferedReader bfRead = new BufferedReader(new FileReader(workerReportFile)) ) { // The default size is sufficient here.
|
||||
try ( BufferedReader bfRead = new BufferedReader(new FileReader(workerReportFile), FileUtils.halfMb) ) {
|
||||
String line;
|
||||
while ( (line = bfRead.readLine()) != null ) // The line, without any line-termination-characters.
|
||||
jsonStringBuilder.append(line).append("\n");
|
||||
|
|
|
@ -28,7 +28,10 @@ import java.sql.Connection;
|
|||
import java.sql.PreparedStatement;
|
||||
import java.sql.SQLException;
|
||||
import java.sql.Timestamp;
|
||||
import java.util.*;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Calendar;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.Callable;
|
||||
import java.util.concurrent.ExecutorService;
|
||||
import java.util.concurrent.Executors;
|
||||
|
|
|
@ -558,7 +558,7 @@ public class ParquetFileUtils {
|
|||
foundPayloadsAggregatedDir = true;
|
||||
else if ( dirPath.equals("payloads_bulk_import") )
|
||||
foundPayloadsBulkImportDir = true;
|
||||
else
|
||||
else if ( ! dirPath.equals("test") ) // The "test" directory helps with testing the service, without interfering with the production directories.
|
||||
logger.warn("Unknown remote parquet HDFS-directory found: " + dirPath);
|
||||
}
|
||||
} catch (JSONException je) { // In case any of the above "json-keys" was not found.
|
||||
|
|
Loading…
Reference in New Issue