- Resolve a concurrency issue, by enforcing synchronization on the "BulkImportReport.getJsonReport()" method.

- Increase the number of stacktrace-lines to 20, for bulkImport-segment-failures.
- Improve "GenericUtils.getSelectiveStackTrace()".
This commit is contained in:
Lampros Smyrnaios 2024-05-01 01:29:25 +03:00
parent 8e14d4dbe0
commit 39c36f9e66
4 changed files with 14 additions and 7 deletions

View File

@ -46,7 +46,10 @@ public class BulkImportReport {
eventsMultimap.put(GenericUtils.getReadableCurrentTimeAndZone(), event); // This is synchronized.
}
public String getJsonReport()
/**
* Synchronize it to avoid concurrency issues when concurrent calls are made to the same bulkImport-Report object.
* */
public synchronized String getJsonReport()
{
//Convert the LinkedHashMultiMap<String, String> to Map<String, Collection<String>>, since Gson cannot serialize Multimaps.
eventsMap = eventsMultimap.asMap();

View File

@ -157,7 +157,7 @@ public class BulkImportServiceImpl implements BulkImportService {
} catch (ExecutionException ee) { // These can be serious errors like an "out of memory exception" (Java HEAP).
numFailedSegments ++;
numAllFailedFiles += subLists.get(i).size(); // We assume all files of this segment failed, as all are passed through the same parts of code, so any serious exception should arise from the 1st files being processed and the rest of the files wil be skipped..
logger.error(GenericUtils.getSelectedStackTraceForCausedException(ee, "Task_" + i + " failed with: ", additionalLoggingMsg, 15));
logger.error(GenericUtils.getSelectedStackTraceForCausedException(ee, "Task_" + i + " failed with: ", additionalLoggingMsg, 20));
bulkImportReport.addEvent("Segment_" + i + " failed with: " + ee.getCause().getMessage());
} catch (CancellationException ce) {
numFailedSegments ++;

View File

@ -816,6 +816,9 @@ public class FileUtils {
if ( shouldLockThreads ) // In case multiple threads write to the same file. for ex. during the bulk-import procedure.
fileAccessLock.lock();
// TODO - Make this method to be synchronized be specific file, not in general.
// TODO - NOW: Multiple bulkImport procedures (with diff DIRs), are blocked while writing to DIFFERENT files..
try ( BufferedWriter bufferedWriter = new BufferedWriter(Files.newBufferedWriter(Paths.get(fileFullPath)), halfMb) )
{
bufferedWriter.write(stringToWrite); // This will overwrite the file. If the new string is smaller, then it does not matter.

View File

@ -13,6 +13,7 @@ public class GenericUtils {
public static final String endOfLine = "\n";
public static final String tab = "\t";
private static final SimpleDateFormat simpleDateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS z");
@ -46,11 +47,11 @@ public class GenericUtils {
StackTraceElement[] stels = thr.getStackTrace();
StringBuilder sb = new StringBuilder(numOfLines *100); // This StringBuilder is thread-safe as a local-variable.
if ( initialMessage != null )
sb.append(initialMessage).append(GenericUtils.endOfLine);
sb.append("Stacktrace:").append(GenericUtils.endOfLine);
for ( int i = 0; (i < stels.length) && (i <= numOfLines); ++i ) {
sb.append(stels[i]);
if (i < numOfLines) sb.append(GenericUtils.endOfLine);
sb.append(initialMessage).append(endOfLine);
sb.append("Stacktrace:").append(endOfLine);
for ( int i = 0; (i < stels.length) && (i < numOfLines); ++i ) {
sb.append(tab).append(stels[i]);
if (i < (numOfLines -1)) sb.append(endOfLine);
}
return sb.toString();
}