forked from D-Net/dnet-hadoop
changed the implemetation of RabitMQ Comunication
This commit is contained in:
parent
403c13eebf
commit
53ec9bccca
|
@ -192,16 +192,18 @@ public class DnetCollectorWorkerApplication implements CommandLineRunner {
|
|||
|
||||
log.info("Created path "+hdfswritepath.toString());
|
||||
|
||||
final Map<String, String> ongoingMap = new HashMap<>();
|
||||
final Map<String, String> reportMap = new HashMap<>();
|
||||
final AtomicInteger counter = new AtomicInteger(0);
|
||||
try(SequenceFile.Writer writer = SequenceFile.createWriter(conf,
|
||||
SequenceFile.Writer.file(hdfswritepath), SequenceFile.Writer.keyClass(IntWritable.class),
|
||||
SequenceFile.Writer.valueClass(Text.class))) {
|
||||
|
||||
final AtomicInteger counter = new AtomicInteger(0);
|
||||
|
||||
final IntWritable key = new IntWritable(counter.get());
|
||||
final Text value = new Text();
|
||||
|
||||
final Map<String, String> ongoingMap = new HashMap<>();
|
||||
final Map<String, String> reportMap = new HashMap<>();
|
||||
|
||||
|
||||
plugin.collect(api).forEach(content -> {
|
||||
|
||||
|
@ -223,12 +225,13 @@ public class DnetCollectorWorkerApplication implements CommandLineRunner {
|
|||
}
|
||||
|
||||
});
|
||||
|
||||
}
|
||||
ongoingMap.put("ongoing", ""+counter.get());
|
||||
manager.sendMessage(new Message(workflowId,"Collection", MessageType.ONGOING, ongoingMap ), rabbitOngoingQueue, true, false);
|
||||
reportMap.put("collected", ""+counter.get());
|
||||
manager.sendMessage(new Message(workflowId,"Collection", MessageType.REPORT, reportMap ), rabbitReportQueue, true, false);
|
||||
|
||||
}
|
||||
manager.close();
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -25,10 +25,6 @@
|
|||
</repository> </repositories> -->
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
<!-- Add typical dependencies for a web application -->
|
||||
<dependencies>
|
||||
<dependency>
|
||||
|
@ -86,6 +82,17 @@
|
|||
</dependency>
|
||||
</dependencies>
|
||||
|
||||
<build>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
<artifactId>spring-boot-maven-plugin</artifactId>
|
||||
<configuration>
|
||||
<executable>true</executable>
|
||||
</configuration>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
|
||||
|
||||
</project>
|
||||
|
|
|
@ -2,8 +2,8 @@ spring.main.banner-mode = console
|
|||
logging.level.root = INFO
|
||||
|
||||
spring.datasource.url=jdbc:postgresql://localhost:5432/mdstoremanager
|
||||
spring.datasource.username=
|
||||
spring.datasource.password=
|
||||
spring.datasource.username=dnet
|
||||
spring.datasource.password=dnetPwd
|
||||
|
||||
spring.jpa.properties.hibernate.dialect = org.hibernate.dialect.PostgreSQLDialect
|
||||
|
||||
|
|
|
@ -19,6 +19,9 @@ public class MessageManager {
|
|||
|
||||
private final String password;
|
||||
|
||||
private Connection connection;
|
||||
|
||||
private Map<String , Channel> channels = new HashMap<>();
|
||||
|
||||
private boolean durable;
|
||||
|
||||
|
@ -59,9 +62,36 @@ public class MessageManager {
|
|||
channel.queueDeclare(queueName, durable, false, this.autodelete, args);
|
||||
return channel;
|
||||
}
|
||||
public boolean sendMessage(final Message message, String queueName) throws Exception {
|
||||
try (Connection connection = createConnection(); Channel channel = createChannel(connection, queueName, this.durable, this.autodelete)) {
|
||||
|
||||
private Channel getOrCreateChannel(final String queueName, boolean durable, boolean autodelete) throws Exception {
|
||||
if (channels.containsKey(queueName)) {
|
||||
return channels.get(queueName);
|
||||
}
|
||||
|
||||
if (this.connection == null) {
|
||||
this.connection = createConnection();
|
||||
}
|
||||
channels.put(queueName, createChannel(this.connection, queueName, durable, autodelete));
|
||||
return channels.get(queueName);
|
||||
}
|
||||
|
||||
|
||||
|
||||
public void close() throws IOException {
|
||||
channels.values().forEach(ch-> {
|
||||
try {
|
||||
ch.close();
|
||||
} catch (Exception e) {
|
||||
//TODO LOG
|
||||
}
|
||||
});
|
||||
|
||||
this.connection.close();
|
||||
}
|
||||
|
||||
public boolean sendMessage(final Message message, String queueName) throws Exception {
|
||||
try {
|
||||
Channel channel = getOrCreateChannel(queueName, this.durable, this.autodelete);
|
||||
channel.basicPublish("", queueName,null, message.toString().getBytes());
|
||||
return true;
|
||||
} catch (Throwable e) {
|
||||
|
@ -70,8 +100,8 @@ public class MessageManager {
|
|||
}
|
||||
|
||||
public boolean sendMessage(final Message message, String queueName, boolean durable_var, boolean autodelete_var) throws Exception {
|
||||
try (Connection connection = createConnection(); Channel channel = createChannel(connection, queueName, durable_var, autodelete_var)) {
|
||||
|
||||
try {
|
||||
Channel channel = getOrCreateChannel(queueName, durable_var, autodelete_var);
|
||||
channel.basicPublish("", queueName,null, message.toString().getBytes());
|
||||
return true;
|
||||
} catch (Throwable e) {
|
||||
|
|
|
@ -140,12 +140,19 @@ public class TransformSparkJobNode {
|
|||
|
||||
|
||||
if (rabbitHost != null) {
|
||||
|
||||
System.out.println("SEND FINAL REPORT");
|
||||
|
||||
final Map<String, String> reportMap = new HashMap<>();
|
||||
reportMap.put("inputItem" , ""+ totalItems.value());
|
||||
reportMap.put("invalidRecords", "" + errorItems.value());
|
||||
reportMap.put("mdStoreSize", "" + transformedItems.value());
|
||||
final MessageManager manager = new MessageManager(rabbitHost, rabbitUser, rabbitPassword, false, false, null);
|
||||
|
||||
|
||||
System.out.println(new Message(workflowId, "Transform", MessageType.REPORT, reportMap));
|
||||
manager.sendMessage(new Message(workflowId, "Transform", MessageType.REPORT, reportMap), rabbitReportQueue, true, false);
|
||||
manager.close();
|
||||
}
|
||||
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue