package eu.openaire.urls_controller.controllers; import com.google.common.collect.HashMultimap; import eu.openaire.urls_controller.configuration.DatabaseConnector; import eu.openaire.urls_controller.models.Assignment; import eu.openaire.urls_controller.models.Datasource; import eu.openaire.urls_controller.payloads.responces.AssignmentsResponse; import eu.openaire.urls_controller.util.GenericUtils; import eu.openaire.urls_controller.util.ParquetFileUtils; import eu.openaire.urls_controller.util.TestFileUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Value; import org.springframework.http.HttpStatus; import org.springframework.http.ResponseEntity; import org.springframework.jdbc.core.JdbcTemplate; import org.springframework.web.bind.annotation.*; import java.io.File; import java.sql.Timestamp; import java.util.*; import java.util.concurrent.atomic.AtomicLong; @RestController @RequestMapping("/test") public class TestController { private static final Logger logger = LoggerFactory.getLogger(TestController.class); @Autowired private JdbcTemplate jdbcTemplate; @Autowired private ParquetFileUtils parquetFileUtils; @Autowired private TestFileUtils testFileUtils; @Value("${services.pdfaggregation.controller.assignmentLimit}") private int assignmentLimit; private static final AtomicLong assignmentsBatchCounter = new AtomicLong(0); @GetMapping("urls") public ResponseEntity getTestUrls(@RequestParam String workerId, @RequestParam int workerAssignmentsLimit) { logger.info("Worker with id: \"" + workerId + "\", requested " + workerAssignmentsLimit + " test-assignments. The assignments-limit of the controller is: " + this.assignmentLimit); logger.debug("Going to retrieve the data from the inputResourceFile: " + testFileUtils.testResource.getFilename()); List assignments = new ArrayList<>(); HashMultimap loadedIdUrlPairs; boolean isFirstRun = true; boolean assignmentsLimitReached = false; Timestamp timestamp = new Timestamp(System.currentTimeMillis()); // Store it here, in order to have the same for all current records. long curAssignmentsBatchCounter = assignmentsBatchCounter.incrementAndGet(); // Start loading urls. while ( true ) { loadedIdUrlPairs = testFileUtils.getNextIdUrlPairBatchFromJson(); // Take urls from jsonFile. if ( testFileUtils.isFinishedLoading(loadedIdUrlPairs.isEmpty(), isFirstRun) ) // Throws RuntimeException which is automatically passed on. break; else isFirstRun = false; Set> pairs = loadedIdUrlPairs.entries(); for ( Map.Entry pair : pairs ) { if ( assignments.size() >= workerAssignmentsLimit ) { assignmentsLimitReached = true; break; } int randomNum = GenericUtils.getRandomNumber(1, 5); assignments.add(new Assignment(pair.getKey(), pair.getValue(), new Datasource("ID_" + randomNum, "NAME_" + randomNum), workerId, curAssignmentsBatchCounter, timestamp)); }// end pairs-for-loop if ( assignmentsLimitReached ) { logger.debug("Done loading urls from the inputFile as the assignmentsLimit (" + workerAssignmentsLimit + ") was reached."); break; } }// end loading-while-loop Scanner scanner = testFileUtils.inputScanner.get(); if ( scanner != null ) // Check if the initial value is null. scanner.close(); logger.info("Sending batch_" + curAssignmentsBatchCounter + " with " + assignments.size() + " assignments (" + testFileUtils.duplicateIdUrlEntries.get() + " more assignments were discarded as duplicates), to worker with ID: " + workerId); return ResponseEntity.status(HttpStatus.OK).header("Content-Type", "application/json").body(new AssignmentsResponse(curAssignmentsBatchCounter, assignments)); } @GetMapping("get10PublicationIdsTest") public ResponseEntity get10PublicationIdsTest() { String query = "SELECT id FROM " + DatabaseConnector.databaseName + ".publication LIMIT 10;"; try { List publications = jdbcTemplate.queryForList(query, String.class); return new ResponseEntity<>(publications.toString(), HttpStatus.OK); } catch (Exception e) { String errorMsg = "Problem when executing \"getAssignmentsQuery\": " + query; logger.error(errorMsg, e); return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR).body(errorMsg); } } @PostMapping("uploadParquetFile") public ResponseEntity uploadParquetFile() { logger.debug("We got a \"parquet_upload\" request."); String parquetFileName = "1_attempts_0.parquet"; String parquetFileFullPath = System.getProperty("user.dir") + File.separator + parquetFileName; String errorMsg = parquetFileUtils.uploadParquetFileToHDFS(parquetFileFullPath, parquetFileName, parquetFileUtils.parquetHDFSDirectoryPathAttempts); if ( errorMsg != null ) // The error-message is already logged by the Controller. return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR).body(errorMsg); return ResponseEntity.ok().build(); } }