diff --git a/.azure/azure-pipelines.yml b/.azure/azure-pipelines.yml
index 526ba8c39..a07bde981 100644
--- a/.azure/azure-pipelines.yml
+++ b/.azure/azure-pipelines.yml
@@ -16,6 +16,9 @@ pool:
vmImage: ubuntu-latest
steps:
+ - checkout: self
+ submodules: recursive
+
- task: CopyFiles@2
inputs:
SourceFolder: '$(Build.SourcesDirectory)'
diff --git a/docs/compatibility.md b/docs/compatibility.md
index 98f6598a9..14ecf6349 100644
--- a/docs/compatibility.md
+++ b/docs/compatibility.md
@@ -2,17 +2,18 @@
This table serves as a guide, suggesting which versions of individual submodules are best suited to accompany each version of the main module. It helps users ensure compatibility and smooth integration by recommending specific submodule versions for their chosen main module version.
| [ODE (this project)](https://github.com/usdot-jpo-ode/jpo-ode/releases) | [ACM](https://github.com/usdot-jpo-ode/asn1_codec/releases) | [PPM](https://github.com/usdot-jpo-ode/jpo-cvdp/releases) | [SEC](https://github.com/usdot-jpo-ode/jpo-security-svcs/releases) | [SDWD](https://github.com/usdot-jpo-ode/jpo-sdw-depositor/releases) | [S3D](https://github.com/usdot-jpo-ode/jpo-s3-deposit/releases) | [GJConverter](https://github.com/usdot-jpo-ode/jpo-geojsonconverter/releases) | [CMonitor](https://github.com/usdot-jpo-ode/jpo-conflictmonitor/releases) | [CVisualizer](https://github.com/usdot-jpo-ode/jpo-conflictvisualizer/releases) | [CVManager](https://github.com/usdot-jpo-ode/jpo-cvmanager/releases) | [MEC](https://github.com/usdot-jpo-ode/jpo-mec-deposit/releases) |
-|-------|-------|-------|-------|-------|-------|-------|-------|-------|-------|-------|
-| 5.1.0 | 3.2.0 | 1.6.0 | 1.7.0 | 1.10.0 | 1.7.1 | 3.2.0 | 3.1.0 | N/A | 2.0.0 | 1.0.0 |
-| 4.1.2 | 3.1.0 | 1.5.0 | 1.6.0 | 1.9.1 | 1.7.1 | 2.1.0 | 2.1.0 | 1.5.0 | 1.6.0 | N/A |
-| 4.0.0 | 3.0.0 | 1.5.0 | 1.5.0 | 1.9.0 | 1.7.0 | 2.0.0 | 2.0.0 | 1.5.0 | 1.5.0 | N/A |
-| 3.0.0 | 2.2.0 | 1.4.0 | 1.5.0 | 1.8.0 | 1.6.0 | 1.4.2 | 1.4.2 | 1.4.1 | 1.4.0 | N/A |
-| 2.1.0 | 2.1.0 | 1.3.0 | 1.4.0 | 1.7.0 | 1.5.0 | 1.3.0 | 1.3.0 | 1.3.0 | 1.3.0 | N/A |
-| 2.0.x | 2.0.0 | 1.3.0 | 1.4.0 | 1.6.0 | 1.4.0 | 1.2.0 | 1.2.0 | 1.2.0 | 1.2.0 | N/A |
-| 1.5.1 | 1.5.0 | 1.2.0 | 1.3.0 | 1.5.0 | 1.3.0 | 1.1.0 | 1.1.0 | 1.1.0 | 1.1.0 | N/A |
-| 1.4.1 | 1.4.1 | 1.1.1 | 1.2.1 | 1.4.1 | 1.2.1 | 1.0.0 | 1.0.1 | 1.0.1 | 1.0.1 | N/A |
-| 1.4.0 | 1.4.0 | 1.1.0 | 1.2.0 | 1.4.0 | 1.2.0 | N/A | N/A | N/A | N/A | N/A |
-| 1.3.0 | 1.3.0 | 1.0.0 | 1.0.1 | 1.3.0 | 1.1.0 | N/A | N/A | N/A | N/A | N/A |
+|-------------------------------------------------------------------------|-------|-------|-------|-------|-------|-------|-------|-------|-------|-------|
+| 6.0.0 | TBD - migration pending | TBD - migration pending | TBD - migration pending | TBD - migration pending | TBD - migration pending |TBD - migration pending | TBD - migration pending | TBD - migration pending | TBD - migration pending | TBD - migration pending |
+| 5.1.0 | 3.2.0 | 1.6.0 | 1.7.0 | 1.10.0 | 1.7.1 | 3.2.0 | 3.1.0 | N/A | 2.0.0 | 1.0.0 |
+| 4.1.2 | 3.1.0 | 1.5.0 | 1.6.0 | 1.9.1 | 1.7.1 | 2.1.0 | 2.1.0 | 1.5.0 | 1.6.0 | N/A |
+| 4.0.0 | 3.0.0 | 1.5.0 | 1.5.0 | 1.9.0 | 1.7.0 | 2.0.0 | 2.0.0 | 1.5.0 | 1.5.0 | N/A |
+| 3.0.0 | 2.2.0 | 1.4.0 | 1.5.0 | 1.8.0 | 1.6.0 | 1.4.2 | 1.4.2 | 1.4.1 | 1.4.0 | N/A |
+| 2.1.0 | 2.1.0 | 1.3.0 | 1.4.0 | 1.7.0 | 1.5.0 | 1.3.0 | 1.3.0 | 1.3.0 | 1.3.0 | N/A |
+| 2.0.x | 2.0.0 | 1.3.0 | 1.4.0 | 1.6.0 | 1.4.0 | 1.2.0 | 1.2.0 | 1.2.0 | 1.2.0 | N/A |
+| 1.5.1 | 1.5.0 | 1.2.0 | 1.3.0 | 1.5.0 | 1.3.0 | 1.1.0 | 1.1.0 | 1.1.0 | 1.1.0 | N/A |
+| 1.4.1 | 1.4.1 | 1.1.1 | 1.2.1 | 1.4.1 | 1.2.1 | 1.0.0 | 1.0.1 | 1.0.1 | 1.0.1 | N/A |
+| 1.4.0 | 1.4.0 | 1.1.0 | 1.2.0 | 1.4.0 | 1.2.0 | N/A | N/A | N/A | N/A | N/A |
+| 1.3.0 | 1.3.0 | 1.0.0 | 1.0.1 | 1.3.0 | 1.1.0 | N/A | N/A | N/A | N/A | N/A |
For example, if you're using ODE version 2.0.1, it's recommended to use ACM 2.0.0, PPM 1.3.0, SEC 1.4.0, SDWD 1.6.0, S3D 1.4.0, GJConverter 1.2.0, CMonitor 1.2.0, CVisualizer 1.2.0, and CVManager 1.2.0. While other combinations may work, these versions are suggested for the best compatibility.
diff --git a/jpo-ode-common/pom.xml b/jpo-ode-common/pom.xml
index c01fb219c..d5660ba7d 100644
--- a/jpo-ode-common/pom.xml
+++ b/jpo-ode-common/pom.xml
@@ -5,7 +5,7 @@
usdot.jpo.ode
jpo-ode
- 5.1.0
+ 6.0.0
jpo-ode-common
@@ -39,12 +39,11 @@
jakarta.xml.bind
jakarta.xml.bind-api
- 4.0.0
+ 4.0.5
- javax.websocket
- javax.websocket-client-api
- 1.1
+ jakarta.websocket
+ jakarta.websocket-client-api
com.esotericsoftware
diff --git a/jpo-ode-core/pom.xml b/jpo-ode-core/pom.xml
index c5ea2bfd1..7cc96730c 100644
--- a/jpo-ode-core/pom.xml
+++ b/jpo-ode-core/pom.xml
@@ -7,7 +7,7 @@
usdot.jpo.ode
jpo-ode
- 5.1.0
+ 6.0.0
jpo-ode-core
@@ -25,12 +25,12 @@
usdot.jpo.ode
jpo-ode-common
- 5.1.0
+ 6.0.0
usdot.jpo.ode
jpo-ode-plugins
- 5.1.0
+ 6.0.0
org.apache.httpcomponents
@@ -57,21 +57,6 @@
-
- org.apache.kafka
- kafka_2.11
- 0.10.1.0
-
-
- org.slf4j
- slf4j-log4j12
-
-
- log4j
- log4j
-
-
-
org.apache.kafka
kafka-streams
diff --git a/jpo-ode-plugins/pom.xml b/jpo-ode-plugins/pom.xml
index 6b1324f33..b4705224f 100644
--- a/jpo-ode-plugins/pom.xml
+++ b/jpo-ode-plugins/pom.xml
@@ -11,7 +11,7 @@
usdot.jpo.ode
jpo-ode
- 5.1.0
+ 6.0.0
@@ -27,7 +27,7 @@
usdot.jpo.ode
jpo-ode-common
- 5.1.0
+ 6.0.0
org.webjars
- webjars-locator
- 0.40
+ webjars-locator-lite
org.webjars
@@ -127,12 +130,12 @@
usdot.jpo.ode
jpo-ode-core
- 5.1.0
+ 6.0.0
usdot.jpo.ode
jpo-ode-plugins
- 5.1.0
+ 6.0.0
org.springframework
@@ -147,11 +150,7 @@
jakarta.annotation
jakarta.annotation-api
2.1.1
-
-
- javax.annotation
- javax.annotation-api
- 1.3.2
+ provided
com.networknt
diff --git a/jpo-ode-svcs/src/main/java/us/dot/its/jpo/ode/config/MetricsConfig.java b/jpo-ode-svcs/src/main/java/us/dot/its/jpo/ode/config/MetricsConfig.java
index 4727d86ad..673e2b2af 100644
--- a/jpo-ode-svcs/src/main/java/us/dot/its/jpo/ode/config/MetricsConfig.java
+++ b/jpo-ode-svcs/src/main/java/us/dot/its/jpo/ode/config/MetricsConfig.java
@@ -3,7 +3,7 @@
import io.micrometer.core.instrument.MeterRegistry;
import java.net.InetAddress;
import java.net.UnknownHostException;
-import org.springframework.boot.actuate.autoconfigure.metrics.MeterRegistryCustomizer;
+import org.springframework.boot.micrometer.metrics.autoconfigure.MeterRegistryCustomizer;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
@@ -28,12 +28,12 @@ public MeterRegistryCustomizer metricsCommonTags() {
private String getHostName() {
try {
- // Get hostname from environment variable if running in Kubernetes
+ // Get the hostname from the environment variable if running in Kubernetes
String hostFromEnv = System.getenv("HOSTNAME");
if (hostFromEnv != null && !hostFromEnv.isEmpty()) {
return hostFromEnv;
}
- // Fallback to system hostname for local deployments in Docker
+ // Fallback to the system hostname for local deployments in Docker
return InetAddress.getLocalHost().getHostName();
} catch (UnknownHostException e) {
return "unknown";
diff --git a/jpo-ode-svcs/src/main/java/us/dot/its/jpo/ode/kafka/KafkaConsumerConfig.java b/jpo-ode-svcs/src/main/java/us/dot/its/jpo/ode/kafka/KafkaConsumerConfig.java
index 4dd35f434..d4822b539 100644
--- a/jpo-ode-svcs/src/main/java/us/dot/its/jpo/ode/kafka/KafkaConsumerConfig.java
+++ b/jpo-ode-svcs/src/main/java/us/dot/its/jpo/ode/kafka/KafkaConsumerConfig.java
@@ -3,7 +3,7 @@
import java.util.HashMap;
import java.util.Map;
import lombok.extern.slf4j.Slf4j;
-import org.springframework.boot.autoconfigure.kafka.KafkaProperties;
+import org.springframework.boot.kafka.autoconfigure.KafkaProperties;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.kafka.annotation.EnableKafka;
diff --git a/jpo-ode-svcs/src/main/java/us/dot/its/jpo/ode/kafka/producer/KafkaProducerConfig.java b/jpo-ode-svcs/src/main/java/us/dot/its/jpo/ode/kafka/producer/KafkaProducerConfig.java
index 5380e3ecb..457386abe 100644
--- a/jpo-ode-svcs/src/main/java/us/dot/its/jpo/ode/kafka/producer/KafkaProducerConfig.java
+++ b/jpo-ode-svcs/src/main/java/us/dot/its/jpo/ode/kafka/producer/KafkaProducerConfig.java
@@ -6,7 +6,7 @@
import java.util.Map;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.common.serialization.StringSerializer;
-import org.springframework.boot.autoconfigure.kafka.KafkaProperties;
+import org.springframework.boot.kafka.autoconfigure.KafkaProperties;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.kafka.annotation.EnableKafka;
diff --git a/jpo-ode-svcs/src/main/java/us/dot/its/jpo/ode/udp/controller/UdpServicesController.java b/jpo-ode-svcs/src/main/java/us/dot/its/jpo/ode/udp/controller/UdpServicesController.java
index ae69b128c..e239fbe87 100644
--- a/jpo-ode-svcs/src/main/java/us/dot/its/jpo/ode/udp/controller/UdpServicesController.java
+++ b/jpo-ode-svcs/src/main/java/us/dot/its/jpo/ode/udp/controller/UdpServicesController.java
@@ -1,11 +1,11 @@
package us.dot.its.jpo.ode.udp.controller;
+import jakarta.annotation.PreDestroy;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
-import javax.annotation.PreDestroy;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.kafka.core.KafkaTemplate;
diff --git a/jpo-ode-svcs/src/main/java/us/dot/its/jpo/ode/udp/generic/GenericReceiver.java b/jpo-ode-svcs/src/main/java/us/dot/its/jpo/ode/udp/generic/GenericReceiver.java
index 0b5447bba..20d7c4825 100644
--- a/jpo-ode-svcs/src/main/java/us/dot/its/jpo/ode/udp/generic/GenericReceiver.java
+++ b/jpo-ode-svcs/src/main/java/us/dot/its/jpo/ode/udp/generic/GenericReceiver.java
@@ -1,6 +1,5 @@
package us.dot.its.jpo.ode.udp.generic;
-import io.netty.handler.codec.UnsupportedMessageTypeException;
import java.net.DatagramPacket;
import lombok.extern.slf4j.Slf4j;
import org.apache.tomcat.util.buf.HexUtils;
@@ -152,4 +151,18 @@ private void routeMessageByMessageType(
default -> throw new UnsupportedMessageTypeException(messageType);
}
}
-}
+
+ /**
+ * Exception class for Unsupported Message Types.
+ */
+ public static class UnsupportedMessageTypeException extends Exception {
+ /**
+ * Constructs a new UnsupportedMessageTypeException with the specified detail message.
+ *
+ * @param message the detail message
+ */
+ public UnsupportedMessageTypeException(String message) {
+ super(message);
+ }
+ }
+}
\ No newline at end of file
diff --git a/jpo-ode-svcs/src/test/java/us/dot/its/jpo/ode/kafka/KafkaProducerConfigTest.java b/jpo-ode-svcs/src/test/java/us/dot/its/jpo/ode/kafka/KafkaProducerConfigTest.java
index cc71a9951..83a02bdde 100644
--- a/jpo-ode-svcs/src/test/java/us/dot/its/jpo/ode/kafka/KafkaProducerConfigTest.java
+++ b/jpo-ode-svcs/src/test/java/us/dot/its/jpo/ode/kafka/KafkaProducerConfigTest.java
@@ -18,7 +18,7 @@
import org.junit.jupiter.api.extension.ExtendWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
-import org.springframework.boot.autoconfigure.kafka.KafkaProperties;
+import org.springframework.boot.kafka.autoconfigure.KafkaProperties;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.boot.test.context.TestConfiguration;
import org.springframework.context.annotation.Bean;
@@ -27,6 +27,7 @@
import org.springframework.kafka.core.DefaultKafkaProducerFactory;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.kafka.core.ProducerFactory;
+import org.springframework.kafka.test.context.EmbeddedKafka;
import org.springframework.kafka.test.EmbeddedKafkaBroker;
import org.springframework.kafka.test.utils.KafkaTestUtils;
import org.springframework.test.annotation.DirtiesContext;
@@ -35,11 +36,11 @@
import us.dot.its.jpo.ode.kafka.OdeKafkaProperties.Producer;
import us.dot.its.jpo.ode.kafka.producer.KafkaProducerConfig;
import us.dot.its.jpo.ode.model.OdeObject;
-import us.dot.its.jpo.ode.test.utilities.EmbeddedKafkaHolder;
@Slf4j
@ExtendWith(SpringExtension.class)
@DirtiesContext
+@EmbeddedKafka
@EnableConfigurationProperties({ KafkaProperties.class })
@Import({ KafkaProducerConfigTest.KafkaProducerConfigTestConfig.class, SerializationConfig.class })
class KafkaProducerConfigTest {
@@ -50,13 +51,12 @@ class KafkaProducerConfigTest {
@Autowired
@Qualifier("testOdeKafkaProperties")
OdeKafkaProperties odeKafkaProperties;
- @Autowired
- @Qualifier("testMeterRegistry")
- MeterRegistry meterRegistry;
+
XmlMapper xmlMapper;
ObjectMapper objectMapper = new ObjectMapper();
- EmbeddedKafkaBroker embeddedKafka = EmbeddedKafkaHolder.getEmbeddedKafka();
+ @Autowired
+ EmbeddedKafkaBroker embeddedKafka;
@Test
void odeDataProducerFactory_shouldReturnNonNull() {
@@ -73,10 +73,14 @@ void odeDataProducerFactory_shouldReturnDefaultKafkaProducerFactory() {
@Test
void kafkaTemplateInterceptorPreventsSendingToDisabledTopics() {
- EmbeddedKafkaHolder.addTopics(odeKafkaProperties.getDisabledTopics().toArray(new String[0]));
- var consumerProps = KafkaTestUtils.consumerProps("interceptor-disabled",
- "false",
- embeddedKafka);
+ for (String topic : odeKafkaProperties.getDisabledTopics()) {
+ if (!Set.of(embeddedKafka.getTopics()).contains(topic)) {
+ embeddedKafka.addTopics(topic);
+ }
+ }
+ var consumerProps = KafkaTestUtils.consumerProps(embeddedKafka,
+ "interceptor-disabled",
+ false);
var cf = new DefaultKafkaConsumerFactory<>(consumerProps,
new StringDeserializer(), new StringDeserializer());
var consumer = cf.createConsumer();
@@ -102,9 +106,11 @@ void kafkaTemplateInterceptorPreventsSendingToDisabledTopics() {
@Test
void kafkaTemplateInterceptorAllowsSendingToTopicsNotInDisabledSet() {
String enabledTopic = "topic.enabled" + this.getClass().getSimpleName();
- EmbeddedKafkaHolder.addTopics(enabledTopic);
+ if (!Set.of(embeddedKafka.getTopics()).contains(enabledTopic)) {
+ embeddedKafka.addTopics(enabledTopic);
+ }
- var consumerProps = KafkaTestUtils.consumerProps("interceptor-enabled", "false", embeddedKafka);
+ var consumerProps = KafkaTestUtils.consumerProps(embeddedKafka, "interceptor-enabled", false);
var cf = new DefaultKafkaConsumerFactory<>(consumerProps,
new StringDeserializer(), new StringDeserializer());
var consumer = cf.createConsumer();
@@ -124,10 +130,12 @@ void kafkaTemplateInterceptorAllowsSendingToTopicsNotInDisabledSet() {
@Test
void kafkaTemplateInterceptorCanSendAfterAttemptToSendToDisabledTopic() {
- String enabledTopic = "topic.enabled" + this.getClass().getSimpleName();
- EmbeddedKafkaHolder.addTopics(enabledTopic);
+ String enabledTopic = "topic.enabled" + this.getClass().getSimpleName() + "2";
+ if (!Set.of(embeddedKafka.getTopics()).contains(enabledTopic)) {
+ embeddedKafka.addTopics(enabledTopic);
+ }
- var consumerProps = KafkaTestUtils.consumerProps("send-after", "false", embeddedKafka);
+ var consumerProps = KafkaTestUtils.consumerProps(embeddedKafka, "send-after", false);
var cf = new DefaultKafkaConsumerFactory<>(consumerProps,
new StringDeserializer(), new StringDeserializer());
var consumer = cf.createConsumer();
diff --git a/jpo-ode-svcs/src/test/java/us/dot/its/jpo/ode/kafka/listeners/Asn1DecodedDataRouterApprovalTest.java b/jpo-ode-svcs/src/test/java/us/dot/its/jpo/ode/kafka/listeners/Asn1DecodedDataRouterApprovalTest.java
index 41de57efb..e93722cb7 100644
--- a/jpo-ode-svcs/src/test/java/us/dot/its/jpo/ode/kafka/listeners/Asn1DecodedDataRouterApprovalTest.java
+++ b/jpo-ode-svcs/src/test/java/us/dot/its/jpo/ode/kafka/listeners/Asn1DecodedDataRouterApprovalTest.java
@@ -1,25 +1,27 @@
package us.dot.its.jpo.ode.kafka.listeners;
import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.assertj.core.api.Assertions.assertThat;
import com.fasterxml.jackson.databind.ObjectMapper;
import java.io.IOException;
import java.util.List;
-import java.util.Map;
+import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.TimeUnit;
+
import lombok.extern.slf4j.Slf4j;
-import org.apache.kafka.clients.consumer.Consumer;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
-import org.springframework.boot.autoconfigure.kafka.KafkaProperties;
+import org.springframework.boot.kafka.autoconfigure.KafkaProperties;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.boot.test.context.SpringBootTest;
-import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
+import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.kafka.core.KafkaTemplate;
-import org.springframework.kafka.test.EmbeddedKafkaBroker;
-import org.springframework.kafka.test.utils.KafkaTestUtils;
+import org.springframework.kafka.test.context.EmbeddedKafka;
import org.springframework.test.annotation.DirtiesContext;
-import org.springframework.test.context.ContextConfiguration;
+import org.springframework.test.context.TestPropertySource;
+
import us.dot.its.jpo.ode.config.SerializationConfig;
import us.dot.its.jpo.ode.kafka.KafkaConsumerConfig;
import us.dot.its.jpo.ode.kafka.OdeKafkaProperties;
@@ -30,7 +32,6 @@
import us.dot.its.jpo.ode.kafka.topics.RawEncodedJsonTopics;
import us.dot.its.jpo.ode.model.OdeMessageFrameData;
import us.dot.its.jpo.ode.test.utilities.ApprovalTestCase;
-import us.dot.its.jpo.ode.test.utilities.EmbeddedKafkaHolder;
import us.dot.its.jpo.ode.udp.controller.UDPReceiverProperties;
@Slf4j
@@ -42,57 +43,58 @@
KafkaConsumerConfig.class,
SerializationConfig.class,
TestMetricsConfig.class,
+ UDPReceiverProperties.class,
+ OdeKafkaProperties.class,
+ RawEncodedJsonTopics.class,
+ JsonTopics.class
},
properties = {
"ode.kafka.topics.asn1.decoder-output=topic.Asn1DecoderOutputRouterApprovalTest",
"ode.kafka.topics.json.map=topic.OdeMapJsonRouterApprovalTest"
})
+@EmbeddedKafka
+@TestPropertySource(properties = {"spring.kafka.bootstrap-servers=${spring.embedded.kafka.brokers}"})
@EnableConfigurationProperties
-@ContextConfiguration(classes = {
- UDPReceiverProperties.class, OdeKafkaProperties.class,
- RawEncodedJsonTopics.class, KafkaProperties.class, JsonTopics.class
-})
@DirtiesContext
class Asn1DecodedDataRouterApprovalTest {
@Value("${ode.kafka.topics.asn1.decoder-output}")
private String decoderOutputTopic;
- @Value("${ode.kafka.topics.json.map}")
- private String jsonMapTopic;
-
@Autowired
KafkaTemplate producer;
- EmbeddedKafkaBroker embeddedKafka = EmbeddedKafkaHolder.getEmbeddedKafka();
+ private final ObjectMapper mapper = new ObjectMapper();
- @Test
- void testAsn1DecodedDataRouter_MAPDataFlow() throws IOException {
- String[] topics = {decoderOutputTopic, jsonMapTopic};
- EmbeddedKafkaHolder.addTopics(topics);
-
- Map consumerProps =
- KafkaTestUtils.consumerProps("testT", "false", embeddedKafka);
- DefaultKafkaConsumerFactory cf =
- new DefaultKafkaConsumerFactory<>(consumerProps);
+ private CountDownLatch latch;
+ private String actualPayload;
- Consumer consumer = cf.createConsumer();
- embeddedKafka.consumeFromEmbeddedTopics(consumer, jsonMapTopic);
+ @Test
+ void testAsn1DecodedDataRouter_MAPDataFlow() throws IOException, InterruptedException {
- @SuppressWarnings("checkstyle:linelength")
List jsonTestCases = ApprovalTestCase.deserializeTestCases(
"src/test/resources/us.dot.its.jpo.ode.udp.map/Asn1DecoderRouter_ApprovalTestCases_MapJson.json");
for (ApprovalTestCase testCase : jsonTestCases) {
+ latch = new CountDownLatch(1);
+ actualPayload = null;
+
producer.send(decoderOutputTopic, testCase.getInput());
- String received = KafkaTestUtils.getSingleRecord(consumer, jsonMapTopic).value();
- ObjectMapper mapper = new ObjectMapper();
- OdeMessageFrameData receivedMapData = mapper.readValue(received, OdeMessageFrameData.class);
+ assertThat(latch.await(3, TimeUnit.SECONDS)).isTrue();
+
+ OdeMessageFrameData receivedMapData = mapper.readValue(actualPayload, OdeMessageFrameData.class);
+
OdeMessageFrameData expectedMapData = mapper.readValue(testCase.getExpected(), OdeMessageFrameData.class);
+
assertEquals(expectedMapData.toJson(), receivedMapData.toJson(),
"Failed test case: " + testCase.getDescription());
}
- consumer.close();
+ }
+
+ @KafkaListener(topics = "topic.OdeMapJsonRouterApprovalTest")
+ public void receive(String payload) {
+ this.actualPayload = payload;
+ latch.countDown();
}
}
\ No newline at end of file
diff --git a/jpo-ode-svcs/src/test/java/us/dot/its/jpo/ode/kafka/listeners/Asn1DecodedDataRouterTest.java b/jpo-ode-svcs/src/test/java/us/dot/its/jpo/ode/kafka/listeners/Asn1DecodedDataRouterTest.java
index 3d5899dec..d0e9ae634 100644
--- a/jpo-ode-svcs/src/test/java/us/dot/its/jpo/ode/kafka/listeners/Asn1DecodedDataRouterTest.java
+++ b/jpo-ode-svcs/src/test/java/us/dot/its/jpo/ode/kafka/listeners/Asn1DecodedDataRouterTest.java
@@ -16,17 +16,18 @@
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.assertj.core.util.Arrays;
+import org.junit.jupiter.api.Disabled;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.boot.autoconfigure.kafka.KafkaProperties;
+import org.springframework.boot.kafka.autoconfigure.KafkaProperties;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.kafka.test.EmbeddedKafkaBroker;
+import org.springframework.kafka.test.context.EmbeddedKafka;
import org.springframework.kafka.test.utils.KafkaTestUtils;
import org.springframework.test.annotation.DirtiesContext;
-import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.TestPropertySource;
import us.dot.its.jpo.ode.config.SerializationConfig;
import us.dot.its.jpo.ode.kafka.KafkaConsumerConfig;
@@ -39,7 +40,6 @@
import us.dot.its.jpo.ode.kafka.topics.RawEncodedJsonTopics;
import us.dot.its.jpo.ode.model.OdeLogMetadata.RecordType;
import us.dot.its.jpo.ode.model.OdeMessageFrameData;
-import us.dot.its.jpo.ode.test.utilities.EmbeddedKafkaHolder;
import us.dot.its.jpo.ode.udp.controller.UDPReceiverProperties;
import us.dot.its.jpo.ode.util.JsonUtils;
@@ -48,16 +48,16 @@
classes = {KafkaProperties.class, JsonTopics.class, Asn1CoderTopics.class,
KafkaConsumerConfig.class, KafkaProducerConfig.class, RawEncodedJsonTopics.class,
Asn1CoderTopics.class, OdeKafkaProperties.class, Asn1DecodedDataRouter.class,
- SerializationConfig.class, TestMetricsConfig.class},
+ SerializationConfig.class, TestMetricsConfig.class, UDPReceiverProperties.class},
properties = {"ode.kafka.disabled-topics="})
@EnableConfigurationProperties
-@ContextConfiguration(
- classes = {UDPReceiverProperties.class, OdeKafkaProperties.class, KafkaProperties.class})
@DirtiesContext
+@EmbeddedKafka
@TestPropertySource(properties = "logging.level.org.springframework.kafka=DEBUG")
class Asn1DecodedDataRouterTest {
- EmbeddedKafkaBroker embeddedKafka = EmbeddedKafkaHolder.getEmbeddedKafka();
+ @Autowired
+ EmbeddedKafkaBroker embeddedKafka;
@Autowired
KafkaTemplate kafkaStringTemplate;
@Autowired
@@ -72,12 +72,12 @@ class Asn1DecodedDataRouterTest {
@Test
void testAsn1DecodedDataRouterBSMDataFlow() throws IOException {
String[] topics = Arrays.array(jsonTopics.getBsm());
- EmbeddedKafkaHolder.addTopics(topics);
+ embeddedKafka.addTopics(topics);
String baseTestData =
loadFromResource("us/dot/its/jpo/ode/services/asn1/decoder-output-bsm.xml");
- var consumerProps = KafkaTestUtils.consumerProps("bsmDecoderTest", "false", embeddedKafka);
+ var consumerProps = KafkaTestUtils.consumerProps(embeddedKafka, "bsmDecoderTest", false);
var consumerFactory = new DefaultKafkaConsumerFactory<>(consumerProps, new StringDeserializer(),
new StringDeserializer());
var testConsumer = consumerFactory.createConsumer();
@@ -116,15 +116,16 @@ void testAsn1DecodedDataRouterBSMDataFlow() throws IOException {
testConsumer.close();
}
+ @Disabled("466943")
@Test
void testAsn1DecodedDataRouterTIMDataFlow() throws IOException {
String[] topics = Arrays.array(jsonTopics.getTim());
- EmbeddedKafkaHolder.addTopics(topics);
+ embeddedKafka.addTopics(topics);
String baseTestData =
loadFromResource("us/dot/its/jpo/ode/services/asn1/decoder-output-tim.xml");
- var consumerProps = KafkaTestUtils.consumerProps("timDecoderTest", "false", embeddedKafka);
+ var consumerProps = KafkaTestUtils.consumerProps(embeddedKafka, "timDecoderTest", false);
var consumerFactory = new DefaultKafkaConsumerFactory<>(consumerProps, new StringDeserializer(),
new StringDeserializer());
var testConsumer = consumerFactory.createConsumer();
@@ -165,12 +166,12 @@ void testAsn1DecodedDataRouterTIMDataFlow() throws IOException {
@Test
void testAsn1DecodedDataRouter_SPaTDataFlow() throws IOException {
String[] topics = Arrays.array(jsonTopics.getSpat());
- EmbeddedKafkaHolder.addTopics(topics);
+ embeddedKafka.addTopics(topics);
String baseTestData =
loadFromResource("us/dot/its/jpo/ode/services/asn1/decoder-output-spat.xml");
- var consumerProps = KafkaTestUtils.consumerProps("spatDecoderTest", "false", embeddedKafka);
+ var consumerProps = KafkaTestUtils.consumerProps(embeddedKafka, "spatDecoderTest", false);
var consumerFactory = new DefaultKafkaConsumerFactory<>(consumerProps, new StringDeserializer(),
new StringDeserializer());
var testConsumer = consumerFactory.createConsumer();
@@ -210,12 +211,12 @@ void testAsn1DecodedDataRouter_SPaTDataFlow() throws IOException {
@Test
void testAsn1DecodedDataRouter_SSMDataFlow() throws IOException {
String[] topics = Arrays.array(jsonTopics.getSsm());
- EmbeddedKafkaHolder.addTopics(topics);
+ embeddedKafka.addTopics(topics);
String baseTestData =
loadFromResource("us/dot/its/jpo/ode/services/asn1/decoder-output-ssm.xml");
- var consumerProps = KafkaTestUtils.consumerProps("ssmDecoderTest", "false", embeddedKafka);
+ var consumerProps = KafkaTestUtils.consumerProps(embeddedKafka, "ssmDecoderTest", false);
var consumerFactory = new DefaultKafkaConsumerFactory<>(consumerProps, new StringDeserializer(),
new StringDeserializer());
var testConsumer = consumerFactory.createConsumer();
@@ -254,12 +255,12 @@ void testAsn1DecodedDataRouter_SSMDataFlow() throws IOException {
@Test
void testAsn1DecodedDataRouter_SRMDataFlow() throws IOException {
String[] topics = Arrays.array(jsonTopics.getSrm());
- EmbeddedKafkaHolder.addTopics(topics);
+ embeddedKafka.addTopics(topics);
String baseTestData =
loadFromResource("us/dot/its/jpo/ode/services/asn1/decoder-output-srm.xml");
- var consumerProps = KafkaTestUtils.consumerProps("srmDecoderTest", "false", embeddedKafka);
+ var consumerProps = KafkaTestUtils.consumerProps(embeddedKafka, "srmDecoderTest", false);
var consumerFactory = new DefaultKafkaConsumerFactory<>(consumerProps, new StringDeserializer(),
new StringDeserializer());
var testConsumer = consumerFactory.createConsumer();
@@ -298,12 +299,12 @@ void testAsn1DecodedDataRouter_SRMDataFlow() throws IOException {
@Test
void testAsn1DecodedDataRouter_PSMDataFlow() throws IOException {
String[] topics = Arrays.array(jsonTopics.getPsm());
- EmbeddedKafkaHolder.addTopics(topics);
+ embeddedKafka.addTopics(topics);
String baseTestData =
loadFromResource("us/dot/its/jpo/ode/services/asn1/decoder-output-psm.xml");
- var consumerProps = KafkaTestUtils.consumerProps("psmDecoderTest", "false", embeddedKafka);
+ var consumerProps = KafkaTestUtils.consumerProps(embeddedKafka, "psmDecoderTest", false);
var consumerFactory = new DefaultKafkaConsumerFactory<>(consumerProps, new StringDeserializer(),
new StringDeserializer());
var testConsumer = consumerFactory.createConsumer();
@@ -342,12 +343,12 @@ void testAsn1DecodedDataRouter_PSMDataFlow() throws IOException {
@Test
void testAsn1DecodedDataRouter_MAPDataFlow() throws IOException {
String[] topics = Arrays.array(jsonTopics.getMap());
- EmbeddedKafkaHolder.addTopics(topics);
+ embeddedKafka.addTopics(topics);
String baseTestData =
loadFromResource("us/dot/its/jpo/ode/services/asn1/decoder-output-map.xml");
- var consumerProps = KafkaTestUtils.consumerProps("mapDecoderTest", "false", embeddedKafka);
+ var consumerProps = KafkaTestUtils.consumerProps(embeddedKafka, "mapDecoderTest", false);
var consumerFactory = new DefaultKafkaConsumerFactory<>(consumerProps, new StringDeserializer(),
new StringDeserializer());
var testConsumer = consumerFactory.createConsumer();
@@ -386,12 +387,12 @@ void testAsn1DecodedDataRouter_MAPDataFlow() throws IOException {
@Test
void testAsn1DecodedDataRouter_SDSMDataFlow() throws IOException {
String[] topics = Arrays.array(jsonTopics.getSdsm());
- EmbeddedKafkaHolder.addTopics(topics);
+ embeddedKafka.addTopics(topics);
String baseTestData =
loadFromResource("us/dot/its/jpo/ode/services/asn1/decoder-output-sdsm.xml");
- var consumerProps = KafkaTestUtils.consumerProps("sdsmDecoderTest", "false", embeddedKafka);
+ var consumerProps = KafkaTestUtils.consumerProps(embeddedKafka, "sdsmDecoderTest", false);
var consumerFactory = new DefaultKafkaConsumerFactory<>(consumerProps, new StringDeserializer(),
new StringDeserializer());
var testConsumer = consumerFactory.createConsumer();
@@ -431,12 +432,12 @@ void testAsn1DecodedDataRouter_SDSMDataFlow() throws IOException {
@Test
void testAsn1DecodedDataRouter_RTCMDataFlow() throws IOException {
String[] topics = Arrays.array(jsonTopics.getRtcm());
- EmbeddedKafkaHolder.addTopics(topics);
+ embeddedKafka.addTopics(topics);
String baseTestData =
loadFromResource("us/dot/its/jpo/ode/services/asn1/decoder-output-rtcm.xml");
- var consumerProps = KafkaTestUtils.consumerProps("rtcmDecoderTest", "false", embeddedKafka);
+ var consumerProps = KafkaTestUtils.consumerProps(embeddedKafka, "rtcmDecoderTest", false);
var consumerFactory = new DefaultKafkaConsumerFactory<>(consumerProps, new StringDeserializer(),
new StringDeserializer());
var testConsumer = consumerFactory.createConsumer();
@@ -476,12 +477,12 @@ void testAsn1DecodedDataRouter_RTCMDataFlow() throws IOException {
@Test
void testAsn1DecodedDataRouter_RSMDataFlow() throws IOException {
String[] topics = Arrays.array(jsonTopics.getRsm());
- EmbeddedKafkaHolder.addTopics(topics);
+ embeddedKafka.addTopics(topics);
String baseTestData =
loadFromResource("us/dot/its/jpo/ode/services/asn1/decoder-output-rsm.xml");
- var consumerProps = KafkaTestUtils.consumerProps("rsmDecoderTest", "false", embeddedKafka);
+ var consumerProps = KafkaTestUtils.consumerProps(embeddedKafka, "rsmDecoderTest", false);
var consumerFactory = new DefaultKafkaConsumerFactory<>(consumerProps, new StringDeserializer(),
new StringDeserializer());
var testConsumer = consumerFactory.createConsumer();
diff --git a/jpo-ode-svcs/src/test/java/us/dot/its/jpo/ode/kafka/listeners/asn1/RawEncodedBSMJsonRouterTest.java b/jpo-ode-svcs/src/test/java/us/dot/its/jpo/ode/kafka/listeners/asn1/RawEncodedBSMJsonRouterTest.java
index 57ba0bd7f..9713b8b5d 100644
--- a/jpo-ode-svcs/src/test/java/us/dot/its/jpo/ode/kafka/listeners/asn1/RawEncodedBSMJsonRouterTest.java
+++ b/jpo-ode-svcs/src/test/java/us/dot/its/jpo/ode/kafka/listeners/asn1/RawEncodedBSMJsonRouterTest.java
@@ -5,21 +5,22 @@
import java.io.IOException;
import java.io.InputStream;
import java.nio.charset.StandardCharsets;
-import java.util.Map;
-import org.apache.kafka.clients.consumer.Consumer;
-import org.apache.kafka.common.serialization.StringDeserializer;
+import java.util.concurrent.CompletableFuture;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.TimeoutException;
+
import org.json.JSONException;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.beans.factory.annotation.Value;
-import org.springframework.boot.autoconfigure.kafka.KafkaProperties;
+import org.springframework.boot.kafka.autoconfigure.KafkaProperties;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.boot.test.context.SpringBootTest;
-import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
+import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.kafka.core.KafkaTemplate;
-import org.springframework.kafka.test.utils.KafkaTestUtils;
+import org.springframework.kafka.test.context.EmbeddedKafka;
import org.springframework.test.annotation.DirtiesContext;
-import org.springframework.test.context.ContextConfiguration;
+import org.springframework.test.context.TestPropertySource;
import us.dot.its.jpo.ode.config.SerializationConfig;
import us.dot.its.jpo.ode.kafka.KafkaConsumerConfig;
import us.dot.its.jpo.ode.kafka.OdeKafkaProperties;
@@ -28,56 +29,57 @@
import us.dot.its.jpo.ode.kafka.listeners.json.RawEncodedJsonService;
import us.dot.its.jpo.ode.kafka.producer.KafkaProducerConfig;
import us.dot.its.jpo.ode.kafka.topics.RawEncodedJsonTopics;
-import us.dot.its.jpo.ode.test.utilities.EmbeddedKafkaHolder;
import us.dot.its.jpo.ode.udp.controller.UDPReceiverProperties;
@SpringBootTest(
classes = { KafkaProducerConfig.class, KafkaConsumerConfig.class, RawEncodedBSMJsonRouter.class,
- RawEncodedJsonService.class, SerializationConfig.class, TestMetricsConfig.class, },
- properties = {"ode.kafka.topics.raw-encoded-json.bsm=topic.Asn1DecoderTestBSMJSON",
- "ode.kafka.topics.asn1.decoder-input=topic.Asn1DecoderBSMInput"})
+ RawEncodedJsonService.class, SerializationConfig.class, TestMetricsConfig.class,
+ UDPReceiverProperties.class, OdeKafkaProperties.class,
+ RawEncodedJsonTopics.class, KafkaProperties.class},
+ properties = {"ode.kafka.topics.asn1.decoder-input=topic.Asn1DecoderBSMInput",
+ "ode.kafka.topics.raw-encoded-json.bsm=topic.OdeRawEncodedBSMJson"})
+@EmbeddedKafka
+@TestPropertySource(properties = "spring.kafka.bootstrap-servers=${spring.embedded.kafka.brokers}")
@EnableConfigurationProperties
-@ContextConfiguration(classes = {UDPReceiverProperties.class, OdeKafkaProperties.class,
- RawEncodedJsonTopics.class, KafkaProperties.class})
@DirtiesContext
class RawEncodedBSMJsonRouterTest {
- @Value(value = "${ode.kafka.topics.raw-encoded-json.bsm}")
- private String rawEncodedBsmJson;
-
- @Value(value = "${ode.kafka.topics.asn1.decoder-input}")
- private String asn1DecoderInput;
+ @Autowired
+ RawEncodedJsonTopics rawEncodedJsonTopics;
@Autowired
KafkaTemplate kafkaTemplate;
- @Test
- void testListen() throws JSONException, IOException {
- var embeddedKafka = EmbeddedKafkaHolder.getEmbeddedKafka();
- EmbeddedKafkaHolder.addTopics(asn1DecoderInput, rawEncodedBsmJson);
+ private CompletableFuture future;
- Map consumerProps =
- KafkaTestUtils.consumerProps("Asn1DecodeBSMJSONTestConsumer", "false", embeddedKafka);
- var cf = new DefaultKafkaConsumerFactory<>(consumerProps, new StringDeserializer(),
- new StringDeserializer());
- Consumer testConsumer = cf.createConsumer();
- embeddedKafka.consumeFromAnEmbeddedTopic(testConsumer, asn1DecoderInput);
+ @Test
+ void testListen() throws JSONException, IOException, InterruptedException {
+ future = new CompletableFuture<>();
var classLoader = getClass().getClassLoader();
InputStream inputStream = classLoader
.getResourceAsStream("us/dot/its/jpo/ode/kafka/listeners/asn1/decoder-input-bsm.json");
assert inputStream != null;
var bsmJson = new String(inputStream.readAllBytes(), StandardCharsets.UTF_8);
- kafkaTemplate.send(rawEncodedBsmJson, bsmJson);
+ kafkaTemplate.send(rawEncodedJsonTopics.getBsm(), bsmJson);
inputStream =
classLoader.getResourceAsStream("us/dot/its/jpo/ode/kafka/listeners/asn1/expected-bsm.xml");
assert inputStream != null;
var expectedBsm = new String(inputStream.readAllBytes(), StandardCharsets.UTF_8);
- var produced = KafkaTestUtils.getSingleRecord(testConsumer, asn1DecoderInput);
- var odeBsmData = produced.value();
+ String odeBsmData;
+ try {
+ odeBsmData = future.get(3, TimeUnit.SECONDS);
+ } catch (ExecutionException | TimeoutException e) {
+ throw new AssertionError("BSM message was not received within the timeout period", e);
+ }
+
assertEquals(expectedBsm, odeBsmData);
- testConsumer.close();
}
+
+ @KafkaListener(topics = {"topic.Asn1DecoderBSMInput"} , groupId = "test-group")
+ public void receive(String payload) {
+ future.complete(payload);
+ }
}
\ No newline at end of file
diff --git a/jpo-ode-svcs/src/test/java/us/dot/its/jpo/ode/kafka/listeners/asn1/RawEncodedMAPJsonRouterTest.java b/jpo-ode-svcs/src/test/java/us/dot/its/jpo/ode/kafka/listeners/asn1/RawEncodedMAPJsonRouterTest.java
index 3042d84d4..5ee803dd1 100644
--- a/jpo-ode-svcs/src/test/java/us/dot/its/jpo/ode/kafka/listeners/asn1/RawEncodedMAPJsonRouterTest.java
+++ b/jpo-ode-svcs/src/test/java/us/dot/its/jpo/ode/kafka/listeners/asn1/RawEncodedMAPJsonRouterTest.java
@@ -5,22 +5,24 @@
import java.io.IOException;
import java.util.List;
-import java.util.Map;
+import java.util.concurrent.CompletableFuture;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.TimeoutException;
+
import lombok.extern.slf4j.Slf4j;
-import org.apache.kafka.clients.consumer.Consumer;
-import org.apache.kafka.common.serialization.StringDeserializer;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
-import org.springframework.boot.autoconfigure.kafka.KafkaProperties;
+import org.springframework.boot.kafka.autoconfigure.KafkaProperties;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.boot.test.context.SpringBootTest;
-import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
+import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.kafka.core.KafkaTemplate;
-import org.springframework.kafka.test.EmbeddedKafkaBroker;
-import org.springframework.kafka.test.utils.KafkaTestUtils;
+import org.springframework.kafka.test.context.EmbeddedKafka;
import org.springframework.test.annotation.DirtiesContext;
-import org.springframework.test.context.ContextConfiguration;
+import org.springframework.test.context.TestPropertySource;
+
import us.dot.its.jpo.ode.config.SerializationConfig;
import us.dot.its.jpo.ode.kafka.KafkaConsumerConfig;
import us.dot.its.jpo.ode.kafka.OdeKafkaProperties;
@@ -30,7 +32,6 @@
import us.dot.its.jpo.ode.kafka.producer.KafkaProducerConfig;
import us.dot.its.jpo.ode.kafka.topics.RawEncodedJsonTopics;
import us.dot.its.jpo.ode.test.utilities.ApprovalTestCase;
-import us.dot.its.jpo.ode.test.utilities.EmbeddedKafkaHolder;
import us.dot.its.jpo.ode.udp.controller.UDPReceiverProperties;
@Slf4j
@@ -42,55 +43,58 @@
RawEncodedJsonService.class,
SerializationConfig.class,
TestMetricsConfig.class,
+ UDPReceiverProperties.class,
+ OdeKafkaProperties.class,
+ RawEncodedJsonTopics.class,
+ KafkaProperties.class
},
properties = {
"ode.kafka.topics.raw-encoded-json.map=topic.Asn1DecoderTestMAPJSON",
"ode.kafka.topics.asn1.decoder-input=topic.Asn1DecoderMAPInput"
})
-@EnableConfigurationProperties
-@ContextConfiguration(classes = {
- UDPReceiverProperties.class, OdeKafkaProperties.class,
- RawEncodedJsonTopics.class, KafkaProperties.class
+@EmbeddedKafka
+@TestPropertySource(properties = {
+ "spring.kafka.bootstrap-servers=${spring.embedded.kafka.brokers}"
})
+@EnableConfigurationProperties
@DirtiesContext
class RawEncodedMAPJsonRouterTest {
- @Value(value = "${ode.kafka.topics.raw-encoded-json.map}")
+ @Value("${ode.kafka.topics.raw-encoded-json.map}")
private String rawEncodedMapJson;
- @Value(value = "${ode.kafka.topics.asn1.decoder-input}")
- private String asn1DecoderInput;
@Autowired
- KafkaTemplate producer;
+ KafkaTemplate kafkaTemplate;
- private static final EmbeddedKafkaBroker embeddedKafka = EmbeddedKafkaHolder.getEmbeddedKafka();
+ private CompletableFuture future;
@Test
- void testProcess_ApprovalTest() throws IOException {
- String[] topics = {rawEncodedMapJson, asn1DecoderInput};
- EmbeddedKafkaHolder.addTopics(topics);
+ void testProcess_ApprovalTest() throws IOException, InterruptedException {
String path =
"src/test/resources/us.dot.its.jpo.ode.udp.map/JSONEncodedMAP_to_Asn1DecoderInput_Validation.json";
List approvalTestCases = deserializeTestCases(path);
- Map consumerProps =
- KafkaTestUtils.consumerProps("Asn1DecodeMapJSONTestConsumer", "false", embeddedKafka);
- var cf =
- new DefaultKafkaConsumerFactory<>(consumerProps,
- new StringDeserializer(), new StringDeserializer());
- Consumer testConsumer = cf.createConsumer();
- embeddedKafka.consumeFromAnEmbeddedTopic(testConsumer, asn1DecoderInput);
-
for (ApprovalTestCase approvalTestCase : approvalTestCases) {
- // produce the test case input to the topic for consumption by the asn1RawMAPJSONConsumer
- producer.send(rawEncodedMapJson, approvalTestCase.getInput());
- var actualRecord =
- KafkaTestUtils.getSingleRecord(testConsumer, asn1DecoderInput);
- assertEquals(approvalTestCase.getExpected(), actualRecord.value(),
+ future = new CompletableFuture<>();
+
+ kafkaTemplate.send(rawEncodedMapJson, approvalTestCase.getInput());
+
+ String actualPayload;
+ try {
+ actualPayload = future.get(3, TimeUnit.SECONDS);
+ } catch (ExecutionException | TimeoutException e) {
+ throw new AssertionError("MAP message was not received within the timeout period", e);
+ }
+
+ assertEquals(approvalTestCase.getExpected(), actualPayload,
approvalTestCase.getDescription());
}
- testConsumer.close();
+ }
+
+ @KafkaListener(topics = {"topic.Asn1DecoderMAPInput"})
+ public void receive(String payload) {
+ future.complete(payload);
}
}
\ No newline at end of file
diff --git a/jpo-ode-svcs/src/test/java/us/dot/its/jpo/ode/kafka/listeners/asn1/RawEncodedPSMJsonRouterTest.java b/jpo-ode-svcs/src/test/java/us/dot/its/jpo/ode/kafka/listeners/asn1/RawEncodedPSMJsonRouterTest.java
index 242b27529..39f79eecf 100644
--- a/jpo-ode-svcs/src/test/java/us/dot/its/jpo/ode/kafka/listeners/asn1/RawEncodedPSMJsonRouterTest.java
+++ b/jpo-ode-svcs/src/test/java/us/dot/its/jpo/ode/kafka/listeners/asn1/RawEncodedPSMJsonRouterTest.java
@@ -5,19 +5,25 @@
import java.io.IOException;
import java.io.InputStream;
import java.nio.charset.StandardCharsets;
-import java.util.Map;
-import org.apache.kafka.common.serialization.StringDeserializer;
+import java.util.concurrent.CompletableFuture;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.TimeoutException;
+
import org.json.JSONException;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.boot.autoconfigure.kafka.KafkaProperties;
+import org.springframework.boot.kafka.autoconfigure.KafkaProperties;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.boot.test.context.ConfigDataApplicationContextInitializer;
import org.springframework.boot.test.context.SpringBootTest;
-import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
+import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.kafka.core.KafkaTemplate;
-import org.springframework.kafka.test.utils.KafkaTestUtils;
+import org.springframework.kafka.test.context.EmbeddedKafka;
+import org.springframework.test.annotation.DirtiesContext;
import org.springframework.test.context.ContextConfiguration;
+import org.springframework.test.context.TestPropertySource;
+
import us.dot.its.jpo.ode.config.SerializationConfig;
import us.dot.its.jpo.ode.kafka.KafkaConsumerConfig;
import us.dot.its.jpo.ode.kafka.OdeKafkaProperties;
@@ -27,7 +33,6 @@
import us.dot.its.jpo.ode.kafka.producer.KafkaProducerConfig;
import us.dot.its.jpo.ode.kafka.topics.Asn1CoderTopics;
import us.dot.its.jpo.ode.kafka.topics.RawEncodedJsonTopics;
-import us.dot.its.jpo.ode.test.utilities.EmbeddedKafkaHolder;
@SpringBootTest(
classes = {
@@ -44,32 +49,27 @@
"ode.kafka.topics.raw-encoded-json.psm=topic.Asn1DecoderTestPSMJSON",
"ode.kafka.topics.asn1.decoder-input=topic.Asn1DecoderPSMInput"
})
+@EmbeddedKafka
+@TestPropertySource(properties = {"spring.kafka.bootstrap-servers=${spring.embedded.kafka.brokers}"})
@ContextConfiguration(initializers = ConfigDataApplicationContextInitializer.class)
@EnableConfigurationProperties(value = {
OdeKafkaProperties.class,
Asn1CoderTopics.class,
RawEncodedJsonTopics.class})
+@DirtiesContext
class RawEncodedPSMJsonRouterTest {
- @Autowired
- Asn1CoderTopics asn1CoderTopics;
@Autowired
RawEncodedJsonTopics rawEncodedJsonTopics;
@Autowired
private KafkaTemplate kafkaTemplate;
+ private CompletableFuture future;
+
@Test
- void testListen() throws JSONException, IOException {
- var embeddedKafka = EmbeddedKafkaHolder.getEmbeddedKafka();
- EmbeddedKafkaHolder.addTopics(asn1CoderTopics.getDecoderInput(), rawEncodedJsonTopics.getPsm());
+ void testListen() throws JSONException, IOException, InterruptedException {
- Map consumerProps =
- KafkaTestUtils.consumerProps("RawEncodedPSMJsonRouterTest", "false", embeddedKafka);
- var cf =
- new DefaultKafkaConsumerFactory<>(consumerProps,
- new StringDeserializer(), new StringDeserializer());
- var testConsumer = cf.createConsumer();
- embeddedKafka.consumeFromAnEmbeddedTopic(testConsumer, asn1CoderTopics.getDecoderInput());
+ future = new CompletableFuture<>();
var classLoader = getClass().getClassLoader();
InputStream inputStream = classLoader
@@ -77,17 +77,26 @@ void testListen() throws JSONException, IOException {
"us/dot/its/jpo/ode/kafka/listeners/asn1/decoder-input-psm.json");
assert inputStream != null;
var psmJson = new String(inputStream.readAllBytes(), StandardCharsets.UTF_8);
- kafkaTemplate.send(rawEncodedJsonTopics.getPsm(), psmJson);
inputStream = classLoader
.getResourceAsStream("us/dot/its/jpo/ode/kafka/listeners/asn1/expected-psm.xml");
assert inputStream != null;
var expectedPsm = new String(inputStream.readAllBytes(), StandardCharsets.UTF_8);
- var produced =
- KafkaTestUtils.getSingleRecord(testConsumer, asn1CoderTopics.getDecoderInput());
- var odePsmData = produced.value();
- assertEquals(expectedPsm, odePsmData);
- testConsumer.close();
+ kafkaTemplate.send(rawEncodedJsonTopics.getPsm(), psmJson);
+
+ String actualPayload;
+ try {
+ actualPayload = future.get(3, TimeUnit.SECONDS);
+ } catch (ExecutionException | TimeoutException e) {
+ throw new AssertionError("PSM message was not received within the timeout period", e);
+ }
+
+ assertEquals(expectedPsm, actualPayload);
+ }
+
+ @KafkaListener(topics = "topic.Asn1DecoderPSMInput")
+ public void receive(String payload) {
+ future.complete(payload);
}
}
\ No newline at end of file
diff --git a/jpo-ode-svcs/src/test/java/us/dot/its/jpo/ode/kafka/listeners/asn1/RawEncodedRSMJsonRouterTest.java b/jpo-ode-svcs/src/test/java/us/dot/its/jpo/ode/kafka/listeners/asn1/RawEncodedRSMJsonRouterTest.java
index 6696d9334..888c52104 100644
--- a/jpo-ode-svcs/src/test/java/us/dot/its/jpo/ode/kafka/listeners/asn1/RawEncodedRSMJsonRouterTest.java
+++ b/jpo-ode-svcs/src/test/java/us/dot/its/jpo/ode/kafka/listeners/asn1/RawEncodedRSMJsonRouterTest.java
@@ -5,20 +5,23 @@
import java.io.IOException;
import java.io.InputStream;
import java.nio.charset.StandardCharsets;
-import java.util.Map;
-import org.apache.kafka.clients.consumer.Consumer;
-import org.apache.kafka.common.serialization.StringDeserializer;
+import java.util.concurrent.CompletableFuture;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.TimeoutException;
+
import org.json.JSONException;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.boot.autoconfigure.kafka.KafkaProperties;
+import org.springframework.boot.kafka.autoconfigure.KafkaProperties;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.boot.test.context.SpringBootTest;
-import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
+import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.kafka.core.KafkaTemplate;
-import org.springframework.kafka.test.utils.KafkaTestUtils;
+import org.springframework.kafka.test.context.EmbeddedKafka;
import org.springframework.test.annotation.DirtiesContext;
-import org.springframework.test.context.ContextConfiguration;
+import org.springframework.test.context.TestPropertySource;
+
import us.dot.its.jpo.ode.config.SerializationConfig;
import us.dot.its.jpo.ode.kafka.KafkaConsumerConfig;
import us.dot.its.jpo.ode.kafka.OdeKafkaProperties;
@@ -28,7 +31,6 @@
import us.dot.its.jpo.ode.kafka.producer.KafkaProducerConfig;
import us.dot.its.jpo.ode.kafka.topics.Asn1CoderTopics;
import us.dot.its.jpo.ode.kafka.topics.RawEncodedJsonTopics;
-import us.dot.its.jpo.ode.test.utilities.EmbeddedKafkaHolder;
import us.dot.its.jpo.ode.udp.controller.UDPReceiverProperties;
/**
@@ -43,38 +45,33 @@
RawEncodedJsonService.class,
SerializationConfig.class,
TestMetricsConfig.class,
+ UDPReceiverProperties.class,
+ OdeKafkaProperties.class,
+ RawEncodedJsonTopics.class,
+ KafkaProperties.class,
+ Asn1CoderTopics.class
},
properties = {
"ode.kafka.topics.raw-encoded-json.rsm=topic.Asn1DecoderTestRSMJSON",
"ode.kafka.topics.asn1.decoder-input=topic.Asn1DecoderRSMInput"
})
+@EmbeddedKafka
+@TestPropertySource(properties = {"spring.kafka.bootstrap-servers=${spring.embedded.kafka.brokers}"})
@EnableConfigurationProperties
-@ContextConfiguration(classes = {
- UDPReceiverProperties.class, OdeKafkaProperties.class,
- RawEncodedJsonTopics.class, KafkaProperties.class, Asn1CoderTopics.class
-})
@DirtiesContext
public class RawEncodedRSMJsonRouterTest {
-
- @Autowired
- Asn1CoderTopics asn1CoderTopics;
+
@Autowired
RawEncodedJsonTopics rawEncodedJsonTopics;
@Autowired
private KafkaTemplate kafkaTemplate;
+ private CompletableFuture future;
+
@Test
- void testListen() throws JSONException, IOException {
- var embeddedKafka = EmbeddedKafkaHolder.getEmbeddedKafka();
- EmbeddedKafkaHolder.addTopics(asn1CoderTopics.getDecoderInput(), rawEncodedJsonTopics.getRsm());
+ void testListen() throws JSONException, IOException, InterruptedException {
- Map consumerProps =
- KafkaTestUtils.consumerProps("Asn1DecodeRSMJSONTestConsumer", "false", embeddedKafka);
- var cf =
- new DefaultKafkaConsumerFactory<>(consumerProps,
- new StringDeserializer(), new StringDeserializer());
- Consumer testConsumer = cf.createConsumer();
- embeddedKafka.consumeFromAnEmbeddedTopic(testConsumer, asn1CoderTopics.getDecoderInput());
+ future = new CompletableFuture<>();
var classLoader = getClass().getClassLoader();
InputStream inputStream = classLoader
@@ -82,15 +79,26 @@ void testListen() throws JSONException, IOException {
"us/dot/its/jpo/ode/kafka/listeners/asn1/decoder-input-rsm.json");
assert inputStream != null;
var json = new String(inputStream.readAllBytes(), StandardCharsets.UTF_8);
- kafkaTemplate.send(rawEncodedJsonTopics.getRsm(), json);
inputStream = classLoader
.getResourceAsStream("us/dot/its/jpo/ode/kafka/listeners/asn1/expected-rsm.xml");
assert inputStream != null;
var expectedRSM = new String(inputStream.readAllBytes(), StandardCharsets.UTF_8);
- var consumedRSM = KafkaTestUtils.getSingleRecord(testConsumer, asn1CoderTopics.getDecoderInput());
- assertEquals(expectedRSM, consumedRSM.value());
- testConsumer.close();
+ kafkaTemplate.send(rawEncodedJsonTopics.getRsm(), json);
+
+ String actualPayload;
+ try {
+ actualPayload = future.get(3, TimeUnit.SECONDS);
+ } catch (ExecutionException | TimeoutException e) {
+ throw new AssertionError("RSM message was not received within the timeout period", e);
+ }
+
+ assertEquals(expectedRSM, actualPayload);
+ }
+
+ @KafkaListener(topics = "topic.Asn1DecoderRSMInput")
+ public void receive(String payload) {
+ future.complete(payload);
}
}
\ No newline at end of file
diff --git a/jpo-ode-svcs/src/test/java/us/dot/its/jpo/ode/kafka/listeners/asn1/RawEncodedRTCMJsonRouterTest.java b/jpo-ode-svcs/src/test/java/us/dot/its/jpo/ode/kafka/listeners/asn1/RawEncodedRTCMJsonRouterTest.java
index 84f3c088e..5de444b9d 100644
--- a/jpo-ode-svcs/src/test/java/us/dot/its/jpo/ode/kafka/listeners/asn1/RawEncodedRTCMJsonRouterTest.java
+++ b/jpo-ode-svcs/src/test/java/us/dot/its/jpo/ode/kafka/listeners/asn1/RawEncodedRTCMJsonRouterTest.java
@@ -5,20 +5,22 @@
import java.io.IOException;
import java.io.InputStream;
import java.nio.charset.StandardCharsets;
-import java.util.Map;
-import org.apache.kafka.clients.consumer.Consumer;
-import org.apache.kafka.common.serialization.StringDeserializer;
+import java.util.concurrent.CompletableFuture;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.TimeoutException;
+
import org.json.JSONException;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.boot.autoconfigure.kafka.KafkaProperties;
+import org.springframework.boot.kafka.autoconfigure.KafkaProperties;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.boot.test.context.SpringBootTest;
-import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
+import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.kafka.core.KafkaTemplate;
-import org.springframework.kafka.test.utils.KafkaTestUtils;
+import org.springframework.kafka.test.context.EmbeddedKafka;
import org.springframework.test.annotation.DirtiesContext;
-import org.springframework.test.context.ContextConfiguration;
+import org.springframework.test.context.TestPropertySource;
import us.dot.its.jpo.ode.config.SerializationConfig;
import us.dot.its.jpo.ode.kafka.KafkaConsumerConfig;
import us.dot.its.jpo.ode.kafka.OdeKafkaProperties;
@@ -28,7 +30,6 @@
import us.dot.its.jpo.ode.kafka.producer.KafkaProducerConfig;
import us.dot.its.jpo.ode.kafka.topics.Asn1CoderTopics;
import us.dot.its.jpo.ode.kafka.topics.RawEncodedJsonTopics;
-import us.dot.its.jpo.ode.test.utilities.EmbeddedKafkaHolder;
import us.dot.its.jpo.ode.udp.controller.UDPReceiverProperties;
/**
@@ -43,38 +44,32 @@
RawEncodedJsonService.class,
SerializationConfig.class,
TestMetricsConfig.class,
+ UDPReceiverProperties.class,
+ OdeKafkaProperties.class,
+ RawEncodedJsonTopics.class,
+ KafkaProperties.class,
+ Asn1CoderTopics.class
},
properties = {
"ode.kafka.topics.raw-encoded-json.rtcm=topic.Asn1DecoderTestRTCMJSON",
"ode.kafka.topics.asn1.decoder-input=topic.Asn1DecoderRTCMInput"
})
@EnableConfigurationProperties
-@ContextConfiguration(classes = {
- UDPReceiverProperties.class, OdeKafkaProperties.class,
- RawEncodedJsonTopics.class, KafkaProperties.class, Asn1CoderTopics.class
-})
+@EmbeddedKafka
+@TestPropertySource(properties = {"spring.kafka.bootstrap-servers=${spring.embedded.kafka.brokers}"})
@DirtiesContext
public class RawEncodedRTCMJsonRouterTest {
-
- @Autowired
- Asn1CoderTopics asn1CoderTopics;
+
@Autowired
RawEncodedJsonTopics rawEncodedJsonTopics;
@Autowired
private KafkaTemplate kafkaTemplate;
- @Test
- void testListen() throws JSONException, IOException {
- var embeddedKafka = EmbeddedKafkaHolder.getEmbeddedKafka();
- EmbeddedKafkaHolder.addTopics(asn1CoderTopics.getDecoderInput(), rawEncodedJsonTopics.getRtcm());
+ private CompletableFuture future;
- Map consumerProps =
- KafkaTestUtils.consumerProps("Asn1DecodeRTCMJSONTestConsumer", "false", embeddedKafka);
- var cf =
- new DefaultKafkaConsumerFactory<>(consumerProps,
- new StringDeserializer(), new StringDeserializer());
- Consumer testConsumer = cf.createConsumer();
- embeddedKafka.consumeFromAnEmbeddedTopic(testConsumer, asn1CoderTopics.getDecoderInput());
+ @Test
+ void testListen() throws JSONException, IOException, InterruptedException {
+ future = new CompletableFuture<>();
var classLoader = getClass().getClassLoader();
InputStream inputStream = classLoader
@@ -89,8 +84,18 @@ void testListen() throws JSONException, IOException {
assert inputStream != null;
var expectedRTCM = new String(inputStream.readAllBytes(), StandardCharsets.UTF_8);
- var consumedRTCM = KafkaTestUtils.getSingleRecord(testConsumer, asn1CoderTopics.getDecoderInput());
- assertEquals(expectedRTCM, consumedRTCM.value());
- testConsumer.close();
+ String actualPayload;
+ try {
+ actualPayload = future.get(3, TimeUnit.SECONDS);
+ } catch (ExecutionException | TimeoutException e) {
+ throw new AssertionError("RTCM message was not received within the timeout period", e);
+ }
+
+ assertEquals(expectedRTCM, actualPayload);
+ }
+
+ @KafkaListener(topics = "topic.Asn1DecoderRTCMInput")
+ public void receive(String payload) {
+ future.complete(payload);
}
}
\ No newline at end of file
diff --git a/jpo-ode-svcs/src/test/java/us/dot/its/jpo/ode/kafka/listeners/asn1/RawEncodedSPATJsonRouterTest.java b/jpo-ode-svcs/src/test/java/us/dot/its/jpo/ode/kafka/listeners/asn1/RawEncodedSPATJsonRouterTest.java
index 7efbcbbcf..1f754437a 100644
--- a/jpo-ode-svcs/src/test/java/us/dot/its/jpo/ode/kafka/listeners/asn1/RawEncodedSPATJsonRouterTest.java
+++ b/jpo-ode-svcs/src/test/java/us/dot/its/jpo/ode/kafka/listeners/asn1/RawEncodedSPATJsonRouterTest.java
@@ -5,20 +5,23 @@
import java.io.IOException;
import java.io.InputStream;
import java.nio.charset.StandardCharsets;
-import java.util.Map;
-import org.apache.kafka.clients.consumer.Consumer;
-import org.apache.kafka.common.serialization.StringDeserializer;
+import java.util.concurrent.CompletableFuture;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.TimeoutException;
+
import org.json.JSONException;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.boot.autoconfigure.kafka.KafkaProperties;
+import org.springframework.boot.kafka.autoconfigure.KafkaProperties;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.boot.test.context.SpringBootTest;
-import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
+import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.kafka.core.KafkaTemplate;
-import org.springframework.kafka.test.utils.KafkaTestUtils;
+import org.springframework.kafka.test.context.EmbeddedKafka;
import org.springframework.test.annotation.DirtiesContext;
-import org.springframework.test.context.ContextConfiguration;
+import org.springframework.test.context.TestPropertySource;
+
import us.dot.its.jpo.ode.config.SerializationConfig;
import us.dot.its.jpo.ode.kafka.KafkaConsumerConfig;
import us.dot.its.jpo.ode.kafka.OdeKafkaProperties;
@@ -28,7 +31,6 @@
import us.dot.its.jpo.ode.kafka.producer.KafkaProducerConfig;
import us.dot.its.jpo.ode.kafka.topics.Asn1CoderTopics;
import us.dot.its.jpo.ode.kafka.topics.RawEncodedJsonTopics;
-import us.dot.its.jpo.ode.test.utilities.EmbeddedKafkaHolder;
import us.dot.its.jpo.ode.udp.controller.UDPReceiverProperties;
@SpringBootTest(
@@ -39,39 +41,33 @@
KafkaConsumerConfig.class,
SerializationConfig.class,
TestMetricsConfig.class,
+ UDPReceiverProperties.class,
+ OdeKafkaProperties.class,
+ RawEncodedJsonTopics.class,
+ KafkaProperties.class,
+ Asn1CoderTopics.class
},
properties = {
"ode.kafka.topics.raw-encoded-json.spat=topic.Asn1DecoderTestSPATJSON",
"ode.kafka.topics.asn1.decoder-input=topic.Asn1DecoderSPATInput"
})
+@EmbeddedKafka
+@TestPropertySource(properties = {"spring.kafka.bootstrap-servers=${spring.embedded.kafka.brokers}"})
@EnableConfigurationProperties
-@ContextConfiguration(classes = {
- UDPReceiverProperties.class, OdeKafkaProperties.class,
- RawEncodedJsonTopics.class, KafkaProperties.class, Asn1CoderTopics.class
-})
@DirtiesContext
class RawEncodedSPATJsonRouterTest {
- @Autowired
- Asn1CoderTopics asn1CoderTopics;
@Autowired
RawEncodedJsonTopics rawEncodedJsonTopics;
@Autowired
KafkaTemplate kafkaTemplate;
+ private CompletableFuture future;
+
@Test
- void testListen() throws JSONException, IOException {
- var embeddedKafka = EmbeddedKafkaHolder.getEmbeddedKafka();
- EmbeddedKafkaHolder.addTopics(asn1CoderTopics.getDecoderInput(),
- rawEncodedJsonTopics.getSpat());
+ void testListen() throws JSONException, IOException, InterruptedException {
- Map consumerProps =
- KafkaTestUtils.consumerProps("Asn1DecodeSPATJSONTestConsumer", "false", embeddedKafka);
- var cf =
- new DefaultKafkaConsumerFactory<>(consumerProps,
- new StringDeserializer(), new StringDeserializer());
- Consumer testConsumer = cf.createConsumer();
- embeddedKafka.consumeFromAnEmbeddedTopic(testConsumer, asn1CoderTopics.getDecoderInput());
+ future = new CompletableFuture<>();
var classLoader = getClass().getClassLoader();
InputStream inputStream = classLoader
@@ -79,16 +75,26 @@ void testListen() throws JSONException, IOException {
"us/dot/its/jpo/ode/kafka/listeners/asn1/decoder-input-spat.json");
assert inputStream != null;
var spatJson = new String(inputStream.readAllBytes(), StandardCharsets.UTF_8);
- kafkaTemplate.send(rawEncodedJsonTopics.getSpat(), spatJson);
- var consumedSpat =
- KafkaTestUtils.getSingleRecord(testConsumer, asn1CoderTopics.getDecoderInput());
inputStream = classLoader
.getResourceAsStream("us/dot/its/jpo/ode/kafka/listeners/asn1/expected-spat.xml");
assert inputStream != null;
var expectedSpat = new String(inputStream.readAllBytes(), StandardCharsets.UTF_8);
- assertEquals(expectedSpat, consumedSpat.value());
- testConsumer.close();
+ kafkaTemplate.send(rawEncodedJsonTopics.getSpat(), spatJson);
+
+ String actualPayload;
+ try {
+ actualPayload = future.get(3, TimeUnit.SECONDS);
+ } catch (ExecutionException | TimeoutException e) {
+ throw new AssertionError("SPAT message was not received within the timeout period", e);
+ }
+
+ assertEquals(expectedSpat, actualPayload);
+ }
+
+ @KafkaListener(topics = "topic.Asn1DecoderSPATInput", groupId = "test-group")
+ public void receive(String payload) {
+ future.complete(payload);
}
}
\ No newline at end of file
diff --git a/jpo-ode-svcs/src/test/java/us/dot/its/jpo/ode/kafka/listeners/asn1/RawEncodedSRMJsonRouterTest.java b/jpo-ode-svcs/src/test/java/us/dot/its/jpo/ode/kafka/listeners/asn1/RawEncodedSRMJsonRouterTest.java
index 60c65da18..63b342fb8 100644
--- a/jpo-ode-svcs/src/test/java/us/dot/its/jpo/ode/kafka/listeners/asn1/RawEncodedSRMJsonRouterTest.java
+++ b/jpo-ode-svcs/src/test/java/us/dot/its/jpo/ode/kafka/listeners/asn1/RawEncodedSRMJsonRouterTest.java
@@ -5,20 +5,22 @@
import java.io.IOException;
import java.io.InputStream;
import java.nio.charset.StandardCharsets;
-import java.util.Map;
-import org.apache.kafka.clients.consumer.Consumer;
-import org.apache.kafka.common.serialization.StringDeserializer;
+import java.util.concurrent.CompletableFuture;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.TimeoutException;
+
import org.json.JSONException;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.boot.autoconfigure.kafka.KafkaProperties;
+import org.springframework.boot.kafka.autoconfigure.KafkaProperties;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.boot.test.context.SpringBootTest;
-import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
+import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.kafka.core.KafkaTemplate;
-import org.springframework.kafka.test.utils.KafkaTestUtils;
+import org.springframework.kafka.test.context.EmbeddedKafka;
import org.springframework.test.annotation.DirtiesContext;
-import org.springframework.test.context.ContextConfiguration;
+import org.springframework.test.context.TestPropertySource;
import us.dot.its.jpo.ode.config.SerializationConfig;
import us.dot.its.jpo.ode.kafka.KafkaConsumerConfig;
import us.dot.its.jpo.ode.kafka.OdeKafkaProperties;
@@ -28,7 +30,6 @@
import us.dot.its.jpo.ode.kafka.producer.KafkaProducerConfig;
import us.dot.its.jpo.ode.kafka.topics.Asn1CoderTopics;
import us.dot.its.jpo.ode.kafka.topics.RawEncodedJsonTopics;
-import us.dot.its.jpo.ode.test.utilities.EmbeddedKafkaHolder;
import us.dot.its.jpo.ode.udp.controller.UDPReceiverProperties;
@SpringBootTest(
@@ -39,38 +40,32 @@
RawEncodedJsonService.class,
SerializationConfig.class,
TestMetricsConfig.class,
+ UDPReceiverProperties.class,
+ OdeKafkaProperties.class,
+ RawEncodedJsonTopics.class,
+ KafkaProperties.class,
+ Asn1CoderTopics.class
},
properties = {
"ode.kafka.topics.raw-encoded-json.srm=topic.Asn1DecoderTestSRMJSON",
"ode.kafka.topics.asn1.decoder-input=topic.Asn1DecoderSRMInput"
})
+@EmbeddedKafka
+@TestPropertySource(properties = {"spring.kafka.bootstrap-servers=${spring.embedded.kafka.brokers}"})
@EnableConfigurationProperties
-@ContextConfiguration(classes = {
- UDPReceiverProperties.class, OdeKafkaProperties.class,
- RawEncodedJsonTopics.class, KafkaProperties.class, Asn1CoderTopics.class
-})
@DirtiesContext
class RawEncodedSRMJsonRouterTest {
- @Autowired
- Asn1CoderTopics asn1CoderTopics;
@Autowired
RawEncodedJsonTopics rawEncodedJsonTopics;
@Autowired
private KafkaTemplate kafkaTemplate;
- @Test
- void testListen() throws JSONException, IOException {
- var embeddedKafka = EmbeddedKafkaHolder.getEmbeddedKafka();
- EmbeddedKafkaHolder.addTopics(asn1CoderTopics.getDecoderInput(), rawEncodedJsonTopics.getSrm());
+ private CompletableFuture future;
- Map consumerProps =
- KafkaTestUtils.consumerProps("Asn1DecodeSRMJSONTestConsumer", "false", embeddedKafka);
- var cf =
- new DefaultKafkaConsumerFactory<>(consumerProps,
- new StringDeserializer(), new StringDeserializer());
- Consumer testConsumer = cf.createConsumer();
- embeddedKafka.consumeFromAnEmbeddedTopic(testConsumer, asn1CoderTopics.getDecoderInput());
+ @Test
+ void testListen() throws JSONException, IOException, InterruptedException {
+ future = new CompletableFuture<>();
var classLoader = getClass().getClassLoader();
InputStream inputStream = classLoader
@@ -80,13 +75,23 @@ void testListen() throws JSONException, IOException {
var json = new String(inputStream.readAllBytes(), StandardCharsets.UTF_8);
kafkaTemplate.send(rawEncodedJsonTopics.getSrm(), json);
+ String odeRsmData;
+ try {
+ odeRsmData = future.get(3, TimeUnit.SECONDS);
+ } catch (ExecutionException | TimeoutException e) {
+ throw new AssertionError("SRM message was not received within the timeout period", e);
+ }
+
inputStream = classLoader
.getResourceAsStream("us/dot/its/jpo/ode/kafka/listeners/asn1/expected-srm.xml");
assert inputStream != null;
var expectedSrm = new String(inputStream.readAllBytes(), StandardCharsets.UTF_8);
- var producedSrm = KafkaTestUtils.getSingleRecord(testConsumer, asn1CoderTopics.getDecoderInput());
- assertEquals(expectedSrm, producedSrm.value());
- testConsumer.close();
+ assertEquals(expectedSrm, odeRsmData);
}
+
+ @KafkaListener(topics = {"topic.Asn1DecoderSRMInput"} , groupId = "test-group")
+ public void receive(String payload) {
+ future.complete(payload);
+ }
}
\ No newline at end of file
diff --git a/jpo-ode-svcs/src/test/java/us/dot/its/jpo/ode/kafka/listeners/asn1/RawEncodedSSMJsonRouterTest.java b/jpo-ode-svcs/src/test/java/us/dot/its/jpo/ode/kafka/listeners/asn1/RawEncodedSSMJsonRouterTest.java
index ce658c1ab..bb6900e50 100644
--- a/jpo-ode-svcs/src/test/java/us/dot/its/jpo/ode/kafka/listeners/asn1/RawEncodedSSMJsonRouterTest.java
+++ b/jpo-ode-svcs/src/test/java/us/dot/its/jpo/ode/kafka/listeners/asn1/RawEncodedSSMJsonRouterTest.java
@@ -5,20 +5,22 @@
import java.io.IOException;
import java.io.InputStream;
import java.nio.charset.StandardCharsets;
-import java.util.Map;
-import org.apache.kafka.clients.consumer.Consumer;
-import org.apache.kafka.common.serialization.StringDeserializer;
+import java.util.concurrent.CompletableFuture;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.TimeoutException;
+
import org.json.JSONException;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.boot.autoconfigure.kafka.KafkaProperties;
+import org.springframework.boot.kafka.autoconfigure.KafkaProperties;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.boot.test.context.SpringBootTest;
-import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
+import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.kafka.core.KafkaTemplate;
-import org.springframework.kafka.test.utils.KafkaTestUtils;
+import org.springframework.kafka.test.context.EmbeddedKafka;
import org.springframework.test.annotation.DirtiesContext;
-import org.springframework.test.context.ContextConfiguration;
+import org.springframework.test.context.TestPropertySource;
import us.dot.its.jpo.ode.config.SerializationConfig;
import us.dot.its.jpo.ode.kafka.KafkaConsumerConfig;
import us.dot.its.jpo.ode.kafka.OdeKafkaProperties;
@@ -28,7 +30,6 @@
import us.dot.its.jpo.ode.kafka.producer.KafkaProducerConfig;
import us.dot.its.jpo.ode.kafka.topics.Asn1CoderTopics;
import us.dot.its.jpo.ode.kafka.topics.RawEncodedJsonTopics;
-import us.dot.its.jpo.ode.test.utilities.EmbeddedKafkaHolder;
import us.dot.its.jpo.ode.udp.controller.UDPReceiverProperties;
@SpringBootTest(
@@ -39,38 +40,32 @@
RawEncodedJsonService.class,
SerializationConfig.class,
TestMetricsConfig.class,
+ UDPReceiverProperties.class,
+ OdeKafkaProperties.class,
+ RawEncodedJsonTopics.class,
+ KafkaProperties.class,
+ Asn1CoderTopics.class
},
properties = {
"ode.kafka.topics.raw-encoded-json.ssm=topic.Asn1DecoderTestSSMJSON",
"ode.kafka.topics.asn1.decoder-input=topic.Asn1DecoderSSMInput"
})
+@EmbeddedKafka
+@TestPropertySource(properties = {"spring.kafka.bootstrap-servers=${spring.embedded.kafka.brokers}"})
@EnableConfigurationProperties
-@ContextConfiguration(classes = {
- UDPReceiverProperties.class, OdeKafkaProperties.class,
- RawEncodedJsonTopics.class, KafkaProperties.class, Asn1CoderTopics.class
-})
@DirtiesContext
class RawEncodedSSMJsonRouterTest {
- @Autowired
- Asn1CoderTopics asn1CoderTopics;
@Autowired
RawEncodedJsonTopics rawEncodedJsonTopics;
@Autowired
private KafkaTemplate kafkaTemplate;
- @Test
- void testListen() throws JSONException, IOException {
- var embeddedKafka = EmbeddedKafkaHolder.getEmbeddedKafka();
- EmbeddedKafkaHolder.addTopics(asn1CoderTopics.getDecoderInput(), rawEncodedJsonTopics.getSsm());
+ private CompletableFuture future;
- Map consumerProps =
- KafkaTestUtils.consumerProps("Asn1DecodeSSMJSONTestConsumer", "false", embeddedKafka);
- var cf =
- new DefaultKafkaConsumerFactory<>(consumerProps,
- new StringDeserializer(), new StringDeserializer());
- Consumer testConsumer = cf.createConsumer();
- embeddedKafka.consumeFromAnEmbeddedTopic(testConsumer, asn1CoderTopics.getDecoderInput());
+ @Test
+ void testListen() throws JSONException, IOException, InterruptedException {
+ future = new CompletableFuture<>();
var classLoader = getClass().getClassLoader();
InputStream inputStream = classLoader
@@ -85,8 +80,18 @@ void testListen() throws JSONException, IOException {
assert inputStream != null;
var expectedSSM = new String(inputStream.readAllBytes(), StandardCharsets.UTF_8);
- var consumedSSM = KafkaTestUtils.getSingleRecord(testConsumer, asn1CoderTopics.getDecoderInput());
- assertEquals(expectedSSM, consumedSSM.value());
- testConsumer.close();
+ String odeSsmData;
+ try {
+ odeSsmData = future.get(3, TimeUnit.SECONDS);
+ } catch (ExecutionException | TimeoutException e) {
+ throw new AssertionError("SSM message was not received within the timeout period", e);
+ }
+
+ assertEquals(expectedSSM, odeSsmData);
}
+
+ @KafkaListener(topics = {"topic.Asn1DecoderSSMInput"} , groupId = "test-group")
+ public void receive(String payload) {
+ future.complete(payload);
+ }
}
\ No newline at end of file
diff --git a/jpo-ode-svcs/src/test/java/us/dot/its/jpo/ode/kafka/listeners/asn1/RawEncodedTIMJsonRouterTest.java b/jpo-ode-svcs/src/test/java/us/dot/its/jpo/ode/kafka/listeners/asn1/RawEncodedTIMJsonRouterTest.java
index 87cd97d17..6b4532168 100644
--- a/jpo-ode-svcs/src/test/java/us/dot/its/jpo/ode/kafka/listeners/asn1/RawEncodedTIMJsonRouterTest.java
+++ b/jpo-ode-svcs/src/test/java/us/dot/its/jpo/ode/kafka/listeners/asn1/RawEncodedTIMJsonRouterTest.java
@@ -5,20 +5,22 @@
import java.io.IOException;
import java.io.InputStream;
import java.nio.charset.StandardCharsets;
-import java.util.Map;
-import org.apache.kafka.clients.consumer.Consumer;
-import org.apache.kafka.common.serialization.StringDeserializer;
+import java.util.concurrent.CompletableFuture;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.TimeoutException;
+
import org.json.JSONException;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.boot.autoconfigure.kafka.KafkaProperties;
+import org.springframework.boot.kafka.autoconfigure.KafkaProperties;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.boot.test.context.SpringBootTest;
-import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
+import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.kafka.core.KafkaTemplate;
-import org.springframework.kafka.test.utils.KafkaTestUtils;
+import org.springframework.kafka.test.context.EmbeddedKafka;
import org.springframework.test.annotation.DirtiesContext;
-import org.springframework.test.context.ContextConfiguration;
+import org.springframework.test.context.TestPropertySource;
import us.dot.its.jpo.ode.config.SerializationConfig;
import us.dot.its.jpo.ode.kafka.KafkaConsumerConfig;
import us.dot.its.jpo.ode.kafka.OdeKafkaProperties;
@@ -28,7 +30,6 @@
import us.dot.its.jpo.ode.kafka.producer.KafkaProducerConfig;
import us.dot.its.jpo.ode.kafka.topics.Asn1CoderTopics;
import us.dot.its.jpo.ode.kafka.topics.RawEncodedJsonTopics;
-import us.dot.its.jpo.ode.test.utilities.EmbeddedKafkaHolder;
import us.dot.its.jpo.ode.udp.controller.UDPReceiverProperties;
@@ -40,38 +41,32 @@
RawEncodedTIMJsonRouter.class,
RawEncodedJsonService.class,
TestMetricsConfig.class,
+ UDPReceiverProperties.class,
+ OdeKafkaProperties.class,
+ RawEncodedJsonTopics.class,
+ KafkaProperties.class,
+ Asn1CoderTopics.class
},
properties = {
"ode.kafka.topics.raw-encoded-json.tim=topic.Asn1DecoderTestTIMJSON",
"ode.kafka.topics.asn1.decoder-input=topic.Asn1DecoderTIMInput"
})
+@EmbeddedKafka
+@TestPropertySource(properties = {"spring.kafka.bootstrap-servers=${spring.embedded.kafka.brokers}"})
@EnableConfigurationProperties
-@ContextConfiguration(classes = {
- UDPReceiverProperties.class, OdeKafkaProperties.class,
- RawEncodedJsonTopics.class, KafkaProperties.class, Asn1CoderTopics.class
-})
@DirtiesContext
class RawEncodedTIMJsonRouterTest {
- @Autowired
- Asn1CoderTopics asn1CoderTopics;
@Autowired
RawEncodedJsonTopics rawEncodedJsonTopics;
@Autowired
private KafkaTemplate kafkaTemplate;
- @Test
- void testListen() throws JSONException, IOException {
- var embeddedKafka = EmbeddedKafkaHolder.getEmbeddedKafka();
- EmbeddedKafkaHolder.addTopics(asn1CoderTopics.getDecoderInput(), rawEncodedJsonTopics.getTim());
+ private CompletableFuture future;
- Map consumerProps =
- KafkaTestUtils.consumerProps("Asn1DecodeTIMJSONTestConsumer", "false", embeddedKafka);
- var cf =
- new DefaultKafkaConsumerFactory<>(consumerProps,
- new StringDeserializer(), new StringDeserializer());
- Consumer testConsumer = cf.createConsumer();
- embeddedKafka.consumeFromAnEmbeddedTopic(testConsumer, asn1CoderTopics.getDecoderInput());
+ @Test
+ void testListen() throws JSONException, IOException, InterruptedException {
+ future = new CompletableFuture<>();
var classLoader = getClass().getClassLoader();
InputStream inputStream = classLoader
@@ -86,9 +81,18 @@ void testListen() throws JSONException, IOException {
assert inputStream != null;
var expectedTim = new String(inputStream.readAllBytes(), StandardCharsets.UTF_8);
- var produced =
- KafkaTestUtils.getSingleRecord(testConsumer, asn1CoderTopics.getDecoderInput());
- assertEquals(expectedTim, produced.value());
- testConsumer.close();
+ String odeTimData;
+ try {
+ odeTimData = future.get(3, TimeUnit.SECONDS);
+ } catch (ExecutionException | TimeoutException e) {
+ throw new AssertionError("TIM message was not received within the timeout period", e);
+ }
+
+ assertEquals(expectedTim, odeTimData);
+ }
+
+ @KafkaListener(topics = {"topic.Asn1DecoderTIMInput"} , groupId = "test-group")
+ public void receive(String payload) {
+ future.complete(payload);
}
}
\ No newline at end of file
diff --git a/jpo-ode-svcs/src/test/java/us/dot/its/jpo/ode/services/asn1/Asn1EncodedDataRouterTest.java b/jpo-ode-svcs/src/test/java/us/dot/its/jpo/ode/services/asn1/Asn1EncodedDataRouterTest.java
index 7a790f559..f5296b116 100644
--- a/jpo-ode-svcs/src/test/java/us/dot/its/jpo/ode/services/asn1/Asn1EncodedDataRouterTest.java
+++ b/jpo-ode-svcs/src/test/java/us/dot/its/jpo/ode/services/asn1/Asn1EncodedDataRouterTest.java
@@ -37,7 +37,7 @@
import org.mockito.Mockito;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
-import org.springframework.boot.autoconfigure.kafka.KafkaProperties;
+import org.springframework.boot.kafka.autoconfigure.KafkaProperties;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.context.annotation.Profile;
@@ -47,6 +47,7 @@
import org.springframework.kafka.listener.KafkaMessageListenerContainer;
import org.springframework.kafka.listener.MessageListener;
import org.springframework.kafka.test.EmbeddedKafkaBroker;
+import org.springframework.kafka.test.context.EmbeddedKafka;
import org.springframework.kafka.test.utils.ContainerTestUtils;
import org.springframework.kafka.test.utils.KafkaTestUtils;
import org.springframework.stereotype.Service;
@@ -71,7 +72,6 @@
import us.dot.its.jpo.ode.security.SecurityServicesClient;
import us.dot.its.jpo.ode.security.SecurityServicesProperties;
import us.dot.its.jpo.ode.security.models.SignatureResultModel;
-import us.dot.its.jpo.ode.test.utilities.EmbeddedKafkaHolder;
@Slf4j
@SpringBootTest(
@@ -82,7 +82,11 @@
"ode.kafka.topics.json.tim-tmc-filtered=topic.Asn1EncodedDataRouterTestTimTmcFiltered",
"ode.kafka.topics.asn1.encoder-input=topic.Asn1EncodedDataRouterTestEncoderInput",
"ode.kafka.topics.asn1.encoder-output=topic.Asn1EncodedDataRouterTestEncoderOutput",
- "ode.kafka.topics.sdx-depositor.input=topic.Asn1EncodedDataRouterTestSDXDepositor"
+ "ode.kafka.topics.sdx-depositor.input=topic.Asn1EncodedDataRouterTestSDXDepositor",
+ "ode.kafka.topics.json.tim=topic.OdeTimJson",
+ "spring.kafka.bootstrap-servers=${spring.embedded.kafka.brokers}",
+ "ode.kafka.brokers=${spring.embedded.kafka.brokers}",
+ "ode.kafka.topics.json.tim-ktable=topic.OdeTimJsonKTable"
},
classes = {
OdeKafkaProperties.class,
@@ -102,9 +106,22 @@
@EnableConfigurationProperties
@DirtiesContext
@ActiveProfiles("test")
+@EmbeddedKafka(
+ partitions = 1,
+ topics = {
+ "topic.Asn1EncodedDataRouterTestTimCertExpiration",
+ "topic.Asn1EncodedDataRouterTestTimTmcFiltered",
+ "topic.Asn1EncodedDataRouterTestEncoderInput",
+ "topic.Asn1EncodedDataRouterTestEncoderOutput",
+ "topic.Asn1EncodedDataRouterTestSDXDepositor",
+ "topic.OdeTimJson",
+ "topic.OdeTimJsonKTable"
+ }
+)
class Asn1EncodedDataRouterTest {
- private final EmbeddedKafkaBroker embeddedKafka = EmbeddedKafkaHolder.getEmbeddedKafka();
+ @Autowired
+ private EmbeddedKafkaBroker embeddedKafka;
@Autowired
Asn1CoderTopics asn1CoderTopics;
@Autowired
@@ -156,7 +173,6 @@ void processDoubleEncodedMessage() throws IOException {
jsonTopics.getTimTmcFiltered(),
sdxDepositorTopic
};
- EmbeddedKafkaHolder.addTopics(topicsForConsumption);
securityServicesProperties.setIsSdwSigningEnabled(true);
Asn1EncodedDataRouter encoderRouter = new Asn1EncodedDataRouter(
@@ -206,12 +222,6 @@ void processDoubleEncodedMessage() throws IOException {
@Test
void processUnsignedMessageSDWOnly() throws IOException {
- String[] topicsForConsumption = {
- asn1CoderTopics.getEncoderInput(),
- jsonTopics.getTimCertExpiration(),
- jsonTopics.getTimTmcFiltered()
- };
- EmbeddedKafkaHolder.addTopics(topicsForConsumption);
securityServicesProperties.setIsSdwSigningEnabled(true);
securityServicesProperties.setIsRsuSigningEnabled(true);
@@ -243,7 +253,7 @@ void processUnsignedMessageSDWOnly() throws IOException {
Awaitility.await().until(completableFuture::isDone);
var consumerProps = KafkaTestUtils.consumerProps(
- "processUnsignedMessageSDWOnly", "false", embeddedKafka);
+ embeddedKafka, "processUnsignedMessageSDWOnly", false);
var consumerFactory = new DefaultKafkaConsumerFactory<>(consumerProps,
new StringDeserializer(), new StringDeserializer());
@@ -297,12 +307,6 @@ void processUnsignedMessageSDWOnly() throws IOException {
@Test
void processUnsignedMessageWithRsus() throws IOException {
- String[] topicsForConsumption = {
- asn1CoderTopics.getEncoderInput(),
- jsonTopics.getTimCertExpiration(),
- jsonTopics.getTimTmcFiltered()
- };
- EmbeddedKafkaHolder.addTopics(topicsForConsumption);
securityServicesProperties.setIsSdwSigningEnabled(true);
securityServicesProperties.setIsRsuSigningEnabled(true);
@@ -334,7 +338,7 @@ void processUnsignedMessageWithRsus() throws IOException {
Awaitility.await().until(completableFuture::isDone);
var consumerProps = KafkaTestUtils.consumerProps(
- "processUnsignedMessage", "false", embeddedKafka);
+ embeddedKafka, "processUnsignedMessage", false);
var consumerFactory = new DefaultKafkaConsumerFactory<>(consumerProps,
new StringDeserializer(), new StringDeserializer());
@@ -377,7 +381,7 @@ void processUnsignedMessageWithRsus() throws IOException {
private KafkaMessageListenerContainer setupListenerContainer(
Asn1EncodedDataRouter encoderRouter,
String containerName) {
- var consumerProps = KafkaTestUtils.consumerProps(containerName, "false", embeddedKafka);
+ var consumerProps = KafkaTestUtils.consumerProps(embeddedKafka, containerName, false);
DefaultKafkaConsumerFactory consumerFactory =
new DefaultKafkaConsumerFactory<>(consumerProps, new StringDeserializer(), new StringDeserializer());
ContainerProperties containerProperties = new ContainerProperties(asn1CoderTopics.getEncoderOutput());
@@ -400,7 +404,7 @@ private KafkaMessageListenerContainer setupListenerContainer(
private Consumer createTestConsumer(String group) {
var consumerProps = KafkaTestUtils.consumerProps(
- group, "false", embeddedKafka);
+ embeddedKafka, group, false);
var consumerFactory = new DefaultKafkaConsumerFactory<>(consumerProps,
new StringDeserializer(), new StringDeserializer());
return consumerFactory.createConsumer();
diff --git a/jpo-ode-svcs/src/test/java/us/dot/its/jpo/ode/test/utilities/EmbeddedKafkaHolder.java b/jpo-ode-svcs/src/test/java/us/dot/its/jpo/ode/test/utilities/EmbeddedKafkaHolder.java
index aecd57099..4121e320a 100644
--- a/jpo-ode-svcs/src/test/java/us/dot/its/jpo/ode/test/utilities/EmbeddedKafkaHolder.java
+++ b/jpo-ode-svcs/src/test/java/us/dot/its/jpo/ode/test/utilities/EmbeddedKafkaHolder.java
@@ -2,9 +2,9 @@
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.admin.NewTopic;
-import org.springframework.kafka.KafkaException;
+import org.apache.kafka.common.KafkaException;
import org.springframework.kafka.test.EmbeddedKafkaBroker;
-import org.springframework.kafka.test.EmbeddedKafkaZKBroker;
+import org.springframework.kafka.test.EmbeddedKafkaKraftBroker;
/**
* The EmbeddedKafkaHolder class is a utility for managing a singleton instance of an embedded Kafka
@@ -26,8 +26,8 @@
@Slf4j
public final class EmbeddedKafkaHolder {
- private static EmbeddedKafkaBroker embeddedKafka =
- new EmbeddedKafkaZKBroker(1, false).brokerListProperty("spring.kafka.bootstrap-servers");
+ private static final EmbeddedKafkaBroker embeddedKafka =
+ new EmbeddedKafkaKraftBroker(1, 1).brokerListProperty("spring.kafka.bootstrap-servers");
private static boolean started;
diff --git a/jpo-ode-svcs/src/test/java/us/dot/its/jpo/ode/traveler/TimDepositControllerTest.java b/jpo-ode-svcs/src/test/java/us/dot/its/jpo/ode/traveler/TimDepositControllerTest.java
index eba22af98..439deee57 100644
--- a/jpo-ode-svcs/src/test/java/us/dot/its/jpo/ode/traveler/TimDepositControllerTest.java
+++ b/jpo-ode-svcs/src/test/java/us/dot/its/jpo/ode/traveler/TimDepositControllerTest.java
@@ -16,7 +16,6 @@
package us.dot.its.jpo.ode.traveler;
- import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.fasterxml.jackson.dataformat.xml.XmlMapper;
@@ -36,16 +35,17 @@
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
- import org.springframework.boot.autoconfigure.kafka.KafkaProperties;
+ import org.springframework.boot.kafka.autoconfigure.KafkaProperties;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.http.ResponseEntity;
import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.kafka.test.EmbeddedKafkaBroker;
+ import org.springframework.kafka.test.context.EmbeddedKafka;
import org.springframework.kafka.test.utils.KafkaTestUtils;
import org.springframework.test.annotation.DirtiesContext;
- import org.springframework.test.context.ContextConfiguration;
+ import org.springframework.test.context.TestPropertySource;
import us.dot.its.jpo.ode.kafka.KafkaConsumerConfig;
import us.dot.its.jpo.ode.kafka.OdeKafkaProperties;
import us.dot.its.jpo.ode.kafka.TestMetricsConfig;
@@ -53,25 +53,22 @@
import us.dot.its.jpo.ode.kafka.topics.Asn1CoderTopics;
import us.dot.its.jpo.ode.kafka.topics.JsonTopics;
import us.dot.its.jpo.ode.model.OdeMsgMetadata;
- import us.dot.its.jpo.ode.model.OdeObject;
import us.dot.its.jpo.ode.model.SerialId;
import us.dot.its.jpo.ode.plugin.j2735.DdsAdvisorySituationData;
import us.dot.its.jpo.ode.plugin.j2735.builders.TravelerMessageFromHumanToAsnConverter;
import us.dot.its.jpo.ode.security.SecurityServicesProperties;
- import us.dot.its.jpo.ode.test.utilities.EmbeddedKafkaHolder;
import us.dot.its.jpo.ode.util.DateTimeUtils;
import us.dot.its.jpo.ode.util.JsonUtils.JsonUtilsException;
import us.dot.its.jpo.ode.util.XmlUtils;
-
-
+
@EnableConfigurationProperties
@SpringBootTest(classes = {KafkaProducerConfig.class, KafkaConsumerConfig.class,
OdeKafkaProperties.class, Asn1CoderTopics.class, JsonTopics.class,
SecurityServicesProperties.class, KafkaProperties.class, TimIngestTrackerProperties.class,
- XmlMapper.class, TestMetricsConfig.class}, properties = {"ode.kafka.brokers=localhost:4242"})
- @ContextConfiguration(classes = {TimDepositController.class, Asn1CoderTopics.class,
- JsonTopics.class, TimIngestTrackerProperties.class,
- SecurityServicesProperties.class, OdeKafkaProperties.class})
+ XmlMapper.class, TestMetricsConfig.class, TimDepositController.class}, properties = {"ode.kafka.brokers=localhost:4242"})
+@EmbeddedKafka
+@TestPropertySource(properties = {"spring.kafka.bootstrap-servers=${spring.embedded.kafka.brokers}",
+ "ode.kafka.brokers=${spring.embedded.kafka.brokers}"})
@DirtiesContext
class TimDepositControllerTest {
@@ -92,19 +89,15 @@ class TimDepositControllerTest {
@Autowired
KafkaTemplate kafkaTemplate;
-
- @Autowired
- KafkaTemplate timDataKafkaTemplate;
-
+
@Autowired
private XmlMapper simpleXmlMapper;
-
- EmbeddedKafkaBroker embeddedKafka = EmbeddedKafkaHolder.getEmbeddedKafka();
-
- int consumerCount = 0;
+
+ @Autowired
+ EmbeddedKafkaBroker embeddedKafka;
@Test
- void nullRequestShouldReturnEmptyError() throws com.fasterxml.jackson.core.JsonProcessingException {
+ void nullRequestShouldReturnEmptyError() {
TimDepositController testTimDepositController =
new TimDepositController(asn1CoderTopics, jsonTopics,
timIngestTrackerProperties, securityServicesProperties, kafkaTemplate,
@@ -114,7 +107,7 @@ void nullRequestShouldReturnEmptyError() throws com.fasterxml.jackson.core.JsonP
}
@Test
- void emptyRequestShouldReturnEmptyError() throws com.fasterxml.jackson.core.JsonProcessingException {
+ void emptyRequestShouldReturnEmptyError() {
TimDepositController testTimDepositController =
new TimDepositController(asn1CoderTopics, jsonTopics,
timIngestTrackerProperties, securityServicesProperties, kafkaTemplate,
@@ -124,7 +117,7 @@ void emptyRequestShouldReturnEmptyError() throws com.fasterxml.jackson.core.Json
}
@Test
- void invalidJsonSyntaxShouldReturnJsonSyntaxError() throws com.fasterxml.jackson.core.JsonProcessingException {
+ void invalidJsonSyntaxShouldReturnJsonSyntaxError() {
TimDepositController testTimDepositController =
new TimDepositController(asn1CoderTopics, jsonTopics,
timIngestTrackerProperties, securityServicesProperties, kafkaTemplate,
@@ -135,7 +128,7 @@ void invalidJsonSyntaxShouldReturnJsonSyntaxError() throws com.fasterxml.jackson
}
@Test
- void missingRequestElementShouldReturnMissingRequestError() throws com.fasterxml.jackson.core.JsonProcessingException {
+ void missingRequestElementShouldReturnMissingRequestError() {
TimDepositController testTimDepositController =
new TimDepositController(asn1CoderTopics, jsonTopics,
timIngestTrackerProperties, securityServicesProperties, kafkaTemplate,
@@ -147,7 +140,7 @@ void missingRequestElementShouldReturnMissingRequestError() throws com.fasterxml
}
@Test
- void invalidTimestampShouldReturnInvalidTimestampError() throws com.fasterxml.jackson.core.JsonProcessingException {
+ void invalidTimestampShouldReturnInvalidTimestampError() {
TimDepositController testTimDepositController =
new TimDepositController(asn1CoderTopics, jsonTopics,
timIngestTrackerProperties, securityServicesProperties, kafkaTemplate,
@@ -161,7 +154,7 @@ void invalidTimestampShouldReturnInvalidTimestampError() throws com.fasterxml.ja
}
@Test
- void messageWithNoRSUsOrSDWShouldReturnWarning() throws IOException {
+ void messageWithNoRSUsOrSDWShouldReturnWarning() {
// prepare
odeKafkaProperties.setDisabledTopics(Set.of());
TimDepositController testTimDepositController =
@@ -218,7 +211,7 @@ void failedObjectNodeConversionShouldReturnConvertingError(
@Test
void failedXmlConversionShouldReturnConversionError(
@Capturing TimTransmogrifier capturingTimTransmogrifier)
- throws XmlUtils.XmlUtilsException, JsonUtilsException, JsonProcessingException {
+ throws XmlUtils.XmlUtilsException, JsonUtilsException {
// prepare
odeKafkaProperties.setDisabledTopics(Set.of());
final Clock prevClock = DateTimeUtils.setClock(
@@ -254,7 +247,14 @@ void testSuccessfulMessageReturnsSuccessMessagePost() throws IOException {
odeKafkaProperties.setDisabledTopics(Set.of());
jsonTopics.setTim("test.successfulMessageReturnsSuccessMessagePost.tim.json");
asn1CoderTopics.setEncoderInput("test.successfulMessageReturnsSuccessMessagePost.encoderInput");
- EmbeddedKafkaHolder.addTopics(jsonTopics.getTim(), asn1CoderTopics.getEncoderInput());
+ String[] topics = {jsonTopics.getTim(), asn1CoderTopics.getEncoderInput()};
+ embeddedKafka.addTopics(topics);
+
+ var jsonTimConsumer = createConsumer("postSuccessJsonTimGroup");
+ var asn1CoderEncoderInputConsumer = createConsumer("postSuccessEncoderInputGroup");
+ embeddedKafka.consumeFromAnEmbeddedTopic(jsonTimConsumer, jsonTopics.getTim());
+ embeddedKafka.consumeFromAnEmbeddedTopic(asn1CoderEncoderInputConsumer, asn1CoderTopics.getEncoderInput());
+
final Clock prevClock = DateTimeUtils.setClock(
Clock.fixed(Instant.parse("2018-03-13T01:07:11.120Z"), ZoneId.of("UTC")));
TimDepositController testTimDepositController =
@@ -272,8 +272,6 @@ void testSuccessfulMessageReturnsSuccessMessagePost() throws IOException {
Assertions.assertEquals(expectedResponseBody, actualResponse.getBody());
// verify JSON tim message
- var jsonTimConsumer = createStr2StrConsumer();
- embeddedKafka.consumeFromAnEmbeddedTopic(jsonTimConsumer, jsonTopics.getTim());
var jsonTimRecord = KafkaTestUtils.getSingleRecord(jsonTimConsumer, jsonTopics.getTim());
var actualTimJson = new JSONObject(jsonTimRecord.value());
var expectedTimJson = new JSONObject(
@@ -286,9 +284,6 @@ void testSuccessfulMessageReturnsSuccessMessagePost() throws IOException {
Assertions.assertEquals(expectedTimJson.toString(2), actualTimJson.toString(2));
// verify ASN.1 coder encoder input message
- var asn1CoderEncoderInputConsumer = createStr2StrConsumer();
- embeddedKafka.consumeFromAnEmbeddedTopic(asn1CoderEncoderInputConsumer,
- asn1CoderTopics.getEncoderInput());
var asn1CoderEncoderInputRecord = KafkaTestUtils.getSingleRecord(asn1CoderEncoderInputConsumer,
asn1CoderTopics.getEncoderInput());
var actualXml = asn1CoderEncoderInputRecord.value();
@@ -314,7 +309,14 @@ void testSuccessfulSdwRequestMessageReturnsSuccessMessagePost() throws Exception
jsonTopics.setTim("test.successfulSdwRequestMessageReturnsSuccessMessagePost.tim.json");
asn1CoderTopics.setEncoderInput(
"test.successfulSdwRequestMessageReturnsSuccessMessagePost.encoderInput");
- EmbeddedKafkaHolder.addTopics(jsonTopics.getTim(), asn1CoderTopics.getEncoderInput());
+ String[] topics = {jsonTopics.getTim(), asn1CoderTopics.getEncoderInput()};
+ embeddedKafka.addTopics(topics);
+
+ var jsonTimConsumer = createConsumer("sdwPostSuccessJsonTimGroup");
+ var asn1CoderEncoderInputConsumer = createConsumer("sdwPostSuccessEncoderInputGroup");
+ embeddedKafka.consumeFromAnEmbeddedTopic(jsonTimConsumer, jsonTopics.getTim());
+ embeddedKafka.consumeFromAnEmbeddedTopic(asn1CoderEncoderInputConsumer, asn1CoderTopics.getEncoderInput());
+
final Clock prevClock = DateTimeUtils.setClock(
Clock.fixed(Instant.parse("2018-03-13T01:07:11.120Z"), ZoneId.of("UTC")));
TimDepositController testTimDepositController =
@@ -333,8 +335,6 @@ void testSuccessfulSdwRequestMessageReturnsSuccessMessagePost() throws Exception
Assertions.assertEquals(expectedResponseBody, actualResponse.getBody());
// verify JSON tim message
- var jsonTimConsumer = createStr2StrConsumer();
- embeddedKafka.consumeFromAnEmbeddedTopic(jsonTimConsumer, jsonTopics.getTim());
var jsonTimRecord = KafkaTestUtils.getSingleRecord(jsonTimConsumer, jsonTopics.getTim());
var actualTimJson = new JSONObject(jsonTimRecord.value());
var expectedTimJson = new JSONObject(
@@ -347,9 +347,6 @@ void testSuccessfulSdwRequestMessageReturnsSuccessMessagePost() throws Exception
Assertions.assertEquals(expectedTimJson.toString(2), actualTimJson.toString(2));
// verify ASN.1 coder encoder input message
- var asn1CoderEncoderInputConsumer = createStr2StrConsumer();
- embeddedKafka.consumeFromAnEmbeddedTopic(asn1CoderEncoderInputConsumer,
- asn1CoderTopics.getEncoderInput());
var asn1CoderEncoderInputRecord = KafkaTestUtils.getSingleRecord(asn1CoderEncoderInputConsumer,
asn1CoderTopics.getEncoderInput());
var actualXml = asn1CoderEncoderInputRecord.value();
@@ -375,7 +372,14 @@ void testSuccessfulMessageReturnsSuccessMessagePostWithOde() throws IOException
jsonTopics.setTim("test.successfulMessageReturnsSuccessMessagePostWithOde.tim.json");
asn1CoderTopics.setEncoderInput(
"test.successfulMessageReturnsSuccessMessagePostWithOde.encoderInput");
- EmbeddedKafkaHolder.addTopics(jsonTopics.getTim(), asn1CoderTopics.getEncoderInput());
+ String[] topics = {jsonTopics.getTim(), asn1CoderTopics.getEncoderInput()};
+ embeddedKafka.addTopics(topics);
+
+ var jsonTimConsumer = createConsumer("postWithOdeJsonTimGroup");
+ var asn1CoderEncoderInputConsumer = createConsumer("postWithOdeEncoderInputGroup");
+ embeddedKafka.consumeFromAnEmbeddedTopic(jsonTimConsumer, jsonTopics.getTim());
+ embeddedKafka.consumeFromAnEmbeddedTopic(asn1CoderEncoderInputConsumer, asn1CoderTopics.getEncoderInput());
+
final Clock prevClock = DateTimeUtils.setClock(
Clock.fixed(Instant.parse("2018-03-13T01:07:11.120Z"), ZoneId.of("UTC")));
TimDepositController testTimDepositController =
@@ -393,8 +397,6 @@ void testSuccessfulMessageReturnsSuccessMessagePostWithOde() throws IOException
Assertions.assertEquals(expectedResponseBody, actualResponse.getBody());
// verify JSON tim message
- var jsonTimConsumer = createStr2StrConsumer();
- embeddedKafka.consumeFromAnEmbeddedTopic(jsonTimConsumer, jsonTopics.getTim());
var jsonTimRecord = KafkaTestUtils.getSingleRecord(jsonTimConsumer, jsonTopics.getTim());
var actualTimJson = new JSONObject(jsonTimRecord.value());
var expectedTimJson = new JSONObject(
@@ -407,9 +409,6 @@ void testSuccessfulMessageReturnsSuccessMessagePostWithOde() throws IOException
Assertions.assertEquals(expectedTimJson.toString(2), actualTimJson.toString(2));
// verify ASN.1 coder encoder input message
- var asn1CoderEncoderInputConsumer = createStr2StrConsumer();
- embeddedKafka.consumeFromAnEmbeddedTopic(asn1CoderEncoderInputConsumer,
- asn1CoderTopics.getEncoderInput());
var asn1CoderEncoderInputRecord = KafkaTestUtils.getSingleRecord(asn1CoderEncoderInputConsumer,
asn1CoderTopics.getEncoderInput());
var actualXml = asn1CoderEncoderInputRecord.value();
@@ -434,7 +433,14 @@ void testSuccessfulMessageReturnsSuccessMessagePut() throws IOException {
odeKafkaProperties.setDisabledTopics(Set.of());
jsonTopics.setTim("test.successfulMessageReturnsSuccessMessagePut.tim.json");
asn1CoderTopics.setEncoderInput("test.successfulMessageReturnsSuccessMessagePut.encoderInput");
- EmbeddedKafkaHolder.addTopics(jsonTopics.getTim(), asn1CoderTopics.getEncoderInput());
+ String[] topics = {jsonTopics.getTim(), asn1CoderTopics.getEncoderInput()};
+ embeddedKafka.addTopics(topics);
+
+ var jsonTimConsumer = createConsumer("putSuccessJsonTimGroup");
+ var asn1CoderEncoderInputConsumer = createConsumer("putSuccessEncoderInputGroup");
+ embeddedKafka.consumeFromAnEmbeddedTopic(jsonTimConsumer, jsonTopics.getTim());
+ embeddedKafka.consumeFromAnEmbeddedTopic(asn1CoderEncoderInputConsumer, asn1CoderTopics.getEncoderInput());
+
final Clock prevClock = DateTimeUtils.setClock(
Clock.fixed(Instant.parse("2018-03-13T01:07:11.120Z"), ZoneId.of("UTC")));
TimDepositController testTimDepositController =
@@ -452,8 +458,6 @@ void testSuccessfulMessageReturnsSuccessMessagePut() throws IOException {
Assertions.assertEquals(expectedResponseBody, actualResponse.getBody());
// verify JSON tim message
- var jsonTimConsumer = createStr2StrConsumer();
- embeddedKafka.consumeFromAnEmbeddedTopic(jsonTimConsumer, jsonTopics.getTim());
var jsonTimRecord = KafkaTestUtils.getSingleRecord(jsonTimConsumer, jsonTopics.getTim());
var actualTimJson = new JSONObject(jsonTimRecord.value());
var expectedTimJson = new JSONObject(
@@ -466,9 +470,6 @@ void testSuccessfulMessageReturnsSuccessMessagePut() throws IOException {
Assertions.assertEquals(expectedTimJson.toString(2), actualTimJson.toString(2));
// verify ASN.1 coder encoder input message
- var asn1CoderEncoderInputConsumer = createStr2StrConsumer();
- embeddedKafka.consumeFromAnEmbeddedTopic(asn1CoderEncoderInputConsumer,
- asn1CoderTopics.getEncoderInput());
var asn1CoderEncoderInputRecord = KafkaTestUtils.getSingleRecord(asn1CoderEncoderInputConsumer,
asn1CoderTopics.getEncoderInput());
var actualXml = asn1CoderEncoderInputRecord.value();
@@ -493,7 +494,14 @@ void testDepositingTimWithExtraProperties() throws IOException {
odeKafkaProperties.setDisabledTopics(Set.of());
jsonTopics.setTim("test.depositingTimWithExtraProperties.tim.json");
asn1CoderTopics.setEncoderInput("test.depositingTimWithExtraProperties.encoderInput");
- EmbeddedKafkaHolder.addTopics(jsonTopics.getTim(), asn1CoderTopics.getEncoderInput());
+ String[] topics = {jsonTopics.getTim(), asn1CoderTopics.getEncoderInput()};
+ embeddedKafka.addTopics(topics);
+
+ var jsonTimConsumer = createConsumer("extraPropsJsonTimGroup");
+ var asn1CoderEncoderInputConsumer = createConsumer("extraPropsEncoderInputGroup");
+ embeddedKafka.consumeFromAnEmbeddedTopic(jsonTimConsumer, jsonTopics.getTim());
+ embeddedKafka.consumeFromAnEmbeddedTopic(asn1CoderEncoderInputConsumer, asn1CoderTopics.getEncoderInput());
+
final Clock prevClock = DateTimeUtils.setClock(
Clock.fixed(Instant.parse("2018-03-13T01:07:11.120Z"), ZoneId.of("UTC")));
TimDepositController testTimDepositController =
@@ -511,8 +519,6 @@ void testDepositingTimWithExtraProperties() throws IOException {
Assertions.assertEquals(expectedResponseBody, actualResponse.getBody());
// verify JSON tim message
- var jsonTimConsumer = createStr2StrConsumer();
- embeddedKafka.consumeFromAnEmbeddedTopic(jsonTimConsumer, jsonTopics.getTim());
var jsonTimRecord = KafkaTestUtils.getSingleRecord(jsonTimConsumer, jsonTopics.getTim());
var actualTimJson = new JSONObject(jsonTimRecord.value());
var expectedTimJson =
@@ -525,9 +531,6 @@ void testDepositingTimWithExtraProperties() throws IOException {
Assertions.assertEquals(expectedTimJson.toString(2), actualTimJson.toString(2));
// verify ASN.1 coder encoder input message
- var asn1CoderEncoderInputConsumer = createStr2StrConsumer();
- embeddedKafka.consumeFromAnEmbeddedTopic(asn1CoderEncoderInputConsumer,
- asn1CoderTopics.getEncoderInput());
var asn1CoderEncoderInputRecord = KafkaTestUtils.getSingleRecord(asn1CoderEncoderInputConsumer,
asn1CoderTopics.getEncoderInput());
var actualXml = asn1CoderEncoderInputRecord.value();
@@ -552,7 +555,14 @@ void testSuccessfulTimIngestIsTracked() throws IOException {
odeKafkaProperties.setDisabledTopics(Set.of());
jsonTopics.setTim("test.successfulTimIngestIsTracked.tim.json");
asn1CoderTopics.setEncoderInput("test.successfulTimIngestIsTracked.encoderInput");
- EmbeddedKafkaHolder.addTopics(jsonTopics.getTim(), asn1CoderTopics.getEncoderInput());
+ String[] topics = {jsonTopics.getTim(), asn1CoderTopics.getEncoderInput()};
+ embeddedKafka.addTopics(topics);
+
+ var jsonTimConsumer = createConsumer("ingestTrackedJsonTimGroup");
+ var asn1CoderEncoderInputConsumer = createConsumer("ingestTrackedEncoderInputGroup");
+ embeddedKafka.consumeFromAnEmbeddedTopic(jsonTimConsumer, jsonTopics.getTim());
+ embeddedKafka.consumeFromAnEmbeddedTopic(asn1CoderEncoderInputConsumer, asn1CoderTopics.getEncoderInput());
+
final Clock prevClock = DateTimeUtils.setClock(
Clock.fixed(Instant.parse("2018-03-13T01:07:11.120Z"), ZoneId.of("UTC")));
TimDepositController testTimDepositController =
@@ -573,8 +583,6 @@ void testSuccessfulTimIngestIsTracked() throws IOException {
TimIngestTracker.getInstance().getTotalMessagesReceived());
// verify JSON tim message
- var jsonTimConsumer = createStr2StrConsumer();
- embeddedKafka.consumeFromAnEmbeddedTopic(jsonTimConsumer, jsonTopics.getTim());
var jsonTimRecord = KafkaTestUtils.getSingleRecord(jsonTimConsumer, jsonTopics.getTim());
var actualTimJson = new JSONObject(jsonTimRecord.value());
var expectedTimJson =
@@ -587,9 +595,6 @@ void testSuccessfulTimIngestIsTracked() throws IOException {
Assertions.assertEquals(expectedTimJson.toString(2), actualTimJson.toString(2));
// verify ASN.1 coder encoder input message
- var asn1CoderEncoderInputConsumer = createStr2StrConsumer();
- embeddedKafka.consumeFromAnEmbeddedTopic(asn1CoderEncoderInputConsumer,
- asn1CoderTopics.getEncoderInput());
var asn1CoderEncoderInputRecord = KafkaTestUtils.getSingleRecord(asn1CoderEncoderInputConsumer,
asn1CoderTopics.getEncoderInput());
var actualXml = asn1CoderEncoderInputRecord.value();
@@ -615,7 +620,14 @@ void testSuccessfulRsuMessageReturnsSuccessMessagePost() throws IOException {
jsonTopics.setTim("test.successfulRsuMessageReturnsSuccessMessagePost.tim.json");
asn1CoderTopics.setEncoderInput(
"test.successfulRsuMessageReturnsSuccessMessagePost.encoderInput");
- EmbeddedKafkaHolder.addTopics(jsonTopics.getTim(), asn1CoderTopics.getEncoderInput());
+ String[] topics = {jsonTopics.getTim(), asn1CoderTopics.getEncoderInput()};
+ embeddedKafka.addTopics(topics);
+
+ var jsonTimConsumer = createConsumer("rsuPostSuccessJsonTimGroup");
+ var asn1CoderEncoderInputConsumer = createConsumer("rsuPostSuccessEncoderInputGroup");
+ embeddedKafka.consumeFromAnEmbeddedTopic(jsonTimConsumer, jsonTopics.getTim());
+ embeddedKafka.consumeFromAnEmbeddedTopic(asn1CoderEncoderInputConsumer, asn1CoderTopics.getEncoderInput());
+
final Clock prevClock = DateTimeUtils.setClock(
Clock.fixed(Instant.parse("2018-03-13T01:07:11.120Z"), ZoneId.of("UTC")));
TimDepositController testTimDepositController =
@@ -633,8 +645,6 @@ void testSuccessfulRsuMessageReturnsSuccessMessagePost() throws IOException {
Assertions.assertEquals(expectedResponseBody, actualResponse.getBody());
// verify JSON tim message
- var jsonTimConsumer = createStr2StrConsumer();
- embeddedKafka.consumeFromAnEmbeddedTopic(jsonTimConsumer, jsonTopics.getTim());
var jsonTimRecord = KafkaTestUtils.getSingleRecord(jsonTimConsumer, jsonTopics.getTim());
var actualTimJson = new JSONObject(jsonTimRecord.value());
var expectedTimJson = new JSONObject(
@@ -647,9 +657,6 @@ void testSuccessfulRsuMessageReturnsSuccessMessagePost() throws IOException {
Assertions.assertEquals(expectedTimJson.toString(2), actualTimJson.toString(2));
// verify ASN.1 coder encoder input message
- var asn1CoderEncoderInputConsumer = createStr2StrConsumer();
- embeddedKafka.consumeFromAnEmbeddedTopic(asn1CoderEncoderInputConsumer,
- asn1CoderTopics.getEncoderInput());
var asn1CoderEncoderInputRecord = KafkaTestUtils.getSingleRecord(asn1CoderEncoderInputConsumer,
asn1CoderTopics.getEncoderInput());
var actualXml = asn1CoderEncoderInputRecord.value();
@@ -665,19 +672,14 @@ void testSuccessfulRsuMessageReturnsSuccessMessagePost() throws IOException {
asn1CoderEncoderInputConsumer.close();
DateTimeUtils.setClock(prevClock);
}
-
- /**
- * Helper method to create a consumer for String messages with String keys.
- */
- private Consumer createStr2StrConsumer() {
- consumerCount++;
- var consumerProps =
- KafkaTestUtils.consumerProps("TimDepositControllerTest", "true", embeddedKafka);
- DefaultKafkaConsumerFactory stringConsumerFactory =
- new DefaultKafkaConsumerFactory<>(consumerProps, new StringDeserializer(),
- new StringDeserializer());
- return stringConsumerFactory.createConsumer(String.format("groupid%d", consumerCount),
- String.format("clientidsuffix%d", consumerCount));
+
+ /**
+ * Helper method to create a consumer with an explicit, stable group ID for String messages with String keys.
+ */
+ private Consumer createConsumer(String groupId) {
+ java.util.Map consumerProps =
+ KafkaTestUtils.consumerProps(embeddedKafka, groupId, true);
+ return new DefaultKafkaConsumerFactory<>(consumerProps, new StringDeserializer(), new StringDeserializer()).createConsumer();
}
/**
@@ -721,7 +723,7 @@ private static String getStreamId(String xmlString) {
private static void removeStreamId(JSONObject jsonObject) {
jsonObject.getJSONObject("metadata").getJSONObject("serialId").remove("streamId");
}
-
+
/**
* Helper method to remove the stream id from an XML string.
*
@@ -731,5 +733,5 @@ private static void removeStreamId(JSONObject jsonObject) {
private static String removeStreamId(String xmlString, String streamId) {
return xmlString.replace(streamId, "");
}
-
+
}
\ No newline at end of file
diff --git a/jpo-ode-svcs/src/test/java/us/dot/its/jpo/ode/udp/bsm/BsmReceiverTest.java b/jpo-ode-svcs/src/test/java/us/dot/its/jpo/ode/udp/bsm/BsmReceiverTest.java
index 0a0238c7d..c237b9c94 100644
--- a/jpo-ode-svcs/src/test/java/us/dot/its/jpo/ode/udp/bsm/BsmReceiverTest.java
+++ b/jpo-ode-svcs/src/test/java/us/dot/its/jpo/ode/udp/bsm/BsmReceiverTest.java
@@ -1,5 +1,6 @@
package us.dot.its.jpo.ode.udp.bsm;
+import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotEquals;
@@ -8,27 +9,28 @@
import java.time.Clock;
import java.time.Instant;
import java.time.ZoneId;
+import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
-import org.apache.kafka.clients.consumer.Consumer;
+import java.util.concurrent.TimeUnit;
+
import org.json.JSONObject;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.boot.autoconfigure.kafka.KafkaProperties;
+import org.springframework.boot.kafka.autoconfigure.KafkaProperties;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.boot.test.context.SpringBootTest;
-import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
+import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.kafka.core.KafkaTemplate;
-import org.springframework.kafka.test.EmbeddedKafkaBroker;
-import org.springframework.kafka.test.utils.KafkaTestUtils;
+import org.springframework.kafka.test.context.EmbeddedKafka;
import org.springframework.test.annotation.DirtiesContext;
-import org.springframework.test.context.ContextConfiguration;
+import org.springframework.test.context.TestPropertySource;
import us.dot.its.jpo.ode.config.SerializationConfig;
+import us.dot.its.jpo.ode.kafka.KafkaConsumerConfig;
import us.dot.its.jpo.ode.kafka.OdeKafkaProperties;
import us.dot.its.jpo.ode.kafka.TestMetricsConfig;
import us.dot.its.jpo.ode.kafka.producer.KafkaProducerConfig;
import us.dot.its.jpo.ode.kafka.topics.RawEncodedJsonTopics;
-import us.dot.its.jpo.ode.test.utilities.EmbeddedKafkaHolder;
import us.dot.its.jpo.ode.test.utilities.TestUDPClient;
import us.dot.its.jpo.ode.udp.controller.UDPReceiverProperties;
import us.dot.its.jpo.ode.util.DateTimeUtils;
@@ -36,11 +38,12 @@
@EnableConfigurationProperties
@SpringBootTest(
classes = { OdeKafkaProperties.class, UDPReceiverProperties.class, KafkaProducerConfig.class,
- SerializationConfig.class, TestMetricsConfig.class, },
+ KafkaConsumerConfig.class, SerializationConfig.class, TestMetricsConfig.class,
+ RawEncodedJsonTopics.class, KafkaProperties.class},
properties = {"ode.receivers.bsm.receiver-port=15352",
"ode.kafka.topics.raw-encoded-json.bsm=topic.BsmReceiverTest"})
-@ContextConfiguration(
- classes = {UDPReceiverProperties.class, RawEncodedJsonTopics.class, KafkaProperties.class})
+@EmbeddedKafka
+@TestPropertySource(properties = {"spring.kafka.bootstrap-servers=${spring.embedded.kafka.brokers}"})
@DirtiesContext
class BsmReceiverTest {
@@ -53,12 +56,13 @@ class BsmReceiverTest {
@Autowired
KafkaTemplate kafkaTemplate;
- EmbeddedKafkaBroker embeddedKafka = EmbeddedKafkaHolder.getEmbeddedKafka();
+ private CountDownLatch latch;
+ private String actualPayload;
@Test
void testRun() throws Exception {
- EmbeddedKafkaHolder.addTopics(rawEncodedJsonTopics.getBsm());
-
+ latch = new CountDownLatch(1);
+ actualPayload = null;
final Clock prevClock = DateTimeUtils
.setClock(Clock.fixed(Instant.parse("2024-11-26T23:53:21.120Z"), ZoneId.of("UTC")));
// create the BsmReceiver and submit it to a runner
@@ -75,18 +79,12 @@ void testRun() throws Exception {
TestUDPClient udpClient = new TestUDPClient(udpReceiverProperties.getBsm().getReceiverPort());
udpClient.send(fileContent);
- var consumerProps = KafkaTestUtils.consumerProps("BsmReceiverTest", "true", embeddedKafka);
- DefaultKafkaConsumerFactory cf =
- new DefaultKafkaConsumerFactory<>(consumerProps);
- Consumer consumer = cf.createConsumer();
- embeddedKafka.consumeFromAnEmbeddedTopic(consumer, rawEncodedJsonTopics.getBsm());
+ assertThat(latch.await(3, TimeUnit.SECONDS)).isTrue();
- // read record from produce topic
- var singleRecord = KafkaTestUtils.getSingleRecord(consumer, rawEncodedJsonTopics.getBsm());
// confirm the stream-id is different, then remove it from both so that we can test equality
// of all other fields
- assertNotEquals(expected, singleRecord.value());
- JSONObject producedJson = new JSONObject(singleRecord.value());
+ assertNotEquals(expected, actualPayload);
+ JSONObject producedJson = new JSONObject(actualPayload);
JSONObject expectedJson = new JSONObject(expected);
// assert that the UUIDs are different, then remove them so that the rest of the JSON can be
@@ -100,4 +98,10 @@ void testRun() throws Exception {
DateTimeUtils.setClock(prevClock);
}
+
+ @KafkaListener(topics = "topic.BsmReceiverTest")
+ public void receive(String payload) {
+ this.actualPayload = payload;
+ latch.countDown();
+ }
}
\ No newline at end of file
diff --git a/jpo-ode-svcs/src/test/java/us/dot/its/jpo/ode/udp/generic/GenericReceiverTest.java b/jpo-ode-svcs/src/test/java/us/dot/its/jpo/ode/udp/generic/GenericReceiverTest.java
index 005616723..7c6201e85 100644
--- a/jpo-ode-svcs/src/test/java/us/dot/its/jpo/ode/udp/generic/GenericReceiverTest.java
+++ b/jpo-ode-svcs/src/test/java/us/dot/its/jpo/ode/udp/generic/GenericReceiverTest.java
@@ -14,7 +14,7 @@
import org.json.JSONObject;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.boot.autoconfigure.kafka.KafkaProperties;
+import org.springframework.boot.kafka.autoconfigure.KafkaProperties;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
@@ -22,7 +22,6 @@
import org.springframework.kafka.test.EmbeddedKafkaBroker;
import org.springframework.kafka.test.utils.KafkaTestUtils;
import org.springframework.test.annotation.DirtiesContext;
-import org.springframework.test.context.ContextConfiguration;
import us.dot.its.jpo.ode.config.SerializationConfig;
import us.dot.its.jpo.ode.kafka.OdeKafkaProperties;
import us.dot.its.jpo.ode.kafka.TestMetricsConfig;
@@ -36,7 +35,7 @@
@EnableConfigurationProperties
@SpringBootTest(
classes = { OdeKafkaProperties.class, UDPReceiverProperties.class, KafkaProducerConfig.class,
- SerializationConfig.class, TestMetricsConfig.class, },
+ SerializationConfig.class, TestMetricsConfig.class, RawEncodedJsonTopics.class, KafkaProperties.class },
properties = {"ode.receivers.generic.receiver-port=15460",
"ode.kafka.topics.raw-encoded-json.bsm=topic.GenericReceiverTestBSM",
"ode.kafka.topics.raw-encoded-json.map=topic.GenericReceiverTestMAP",
@@ -48,8 +47,6 @@
"ode.kafka.topics.raw-encoded-json.sdsm=topic.GenericReceiverTestSDSM",
"ode.kafka.topics.raw-encoded-json.rtcm=topic.GenericReceiverTestRTCM",
"ode.kafka.topics.raw-encoded-json.rsm=topic.GenericReceiverTestRSM"})
-@ContextConfiguration(classes = {UDPReceiverProperties.class, OdeKafkaProperties.class,
- RawEncodedJsonTopics.class, KafkaProperties.class})
@DirtiesContext
class GenericReceiverTest {
@@ -81,7 +78,7 @@ void testRun() throws Exception {
TestUDPClient udpClient =
new TestUDPClient(udpReceiverProperties.getGeneric().getReceiverPort());
- var consumerProps = KafkaTestUtils.consumerProps("GenericReceiverTest", "true", embeddedKafka);
+ var consumerProps = KafkaTestUtils.consumerProps(embeddedKafka, "GenericReceiverTest", true);
var cf = new DefaultKafkaConsumerFactory<>(consumerProps, new StringDeserializer(),
new StringDeserializer());
var consumer = cf.createConsumer();
diff --git a/jpo-ode-svcs/src/test/java/us/dot/its/jpo/ode/udp/map/MapReceiverTest.java b/jpo-ode-svcs/src/test/java/us/dot/its/jpo/ode/udp/map/MapReceiverTest.java
index 7fac81a8f..8ea924134 100644
--- a/jpo-ode-svcs/src/test/java/us/dot/its/jpo/ode/udp/map/MapReceiverTest.java
+++ b/jpo-ode-svcs/src/test/java/us/dot/its/jpo/ode/udp/map/MapReceiverTest.java
@@ -1,5 +1,6 @@
package us.dot.its.jpo.ode.udp.map;
+import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotEquals;
import static us.dot.its.jpo.ode.test.utilities.ApprovalTestCase.deserializeTestCases;
@@ -8,57 +9,53 @@
import java.time.Clock;
import java.time.Instant;
import java.util.List;
-import java.util.Map;
+import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
-import lombok.extern.slf4j.Slf4j;
-import org.apache.kafka.clients.consumer.Consumer;
-import org.apache.kafka.clients.consumer.ConsumerRecord;
+import java.util.concurrent.TimeUnit;
+
import org.json.JSONObject;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.boot.autoconfigure.kafka.KafkaProperties;
+import org.springframework.boot.kafka.autoconfigure.KafkaProperties;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.boot.test.context.SpringBootTest;
-import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
+import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.kafka.core.KafkaTemplate;
-import org.springframework.kafka.test.EmbeddedKafkaBroker;
-import org.springframework.kafka.test.utils.KafkaTestUtils;
+import org.springframework.kafka.test.context.EmbeddedKafka;
import org.springframework.test.annotation.DirtiesContext;
-import org.springframework.test.context.ContextConfiguration;
+import org.springframework.test.context.TestPropertySource;
import us.dot.its.jpo.ode.config.SerializationConfig;
+import us.dot.its.jpo.ode.kafka.KafkaConsumerConfig;
import us.dot.its.jpo.ode.kafka.OdeKafkaProperties;
import us.dot.its.jpo.ode.kafka.TestMetricsConfig;
import us.dot.its.jpo.ode.kafka.producer.KafkaProducerConfig;
import us.dot.its.jpo.ode.kafka.topics.RawEncodedJsonTopics;
import us.dot.its.jpo.ode.test.utilities.ApprovalTestCase;
-import us.dot.its.jpo.ode.test.utilities.EmbeddedKafkaHolder;
import us.dot.its.jpo.ode.test.utilities.TestUDPClient;
import us.dot.its.jpo.ode.udp.controller.UDPReceiverProperties;
import us.dot.its.jpo.ode.util.DateTimeUtils;
-@Slf4j
@SpringBootTest(
classes = {
- OdeKafkaProperties.class,
- UDPReceiverProperties.class,
+ KafkaConsumerConfig.class,
KafkaProducerConfig.class,
SerializationConfig.class,
TestMetricsConfig.class,
+ UDPReceiverProperties.class,
+ OdeKafkaProperties.class,
+ RawEncodedJsonTopics.class,
+ KafkaProperties.class
},
properties = {"ode.kafka.topics.raw-encoded-json.map=topic.MapReceiverTestMAPJSON",
"ode.receivers.map.receiver-port=12412"}
)
@EnableConfigurationProperties
-@ContextConfiguration(classes = {
- UDPReceiverProperties.class,
- RawEncodedJsonTopics.class, KafkaProperties.class
-})
+@EmbeddedKafka
+@TestPropertySource(properties = {"spring.kafka.bootstrap-servers=${spring.embedded.kafka.brokers}"})
@DirtiesContext
class MapReceiverTest {
- private final EmbeddedKafkaBroker embeddedKafka = EmbeddedKafkaHolder.getEmbeddedKafka();
-
@Autowired
UDPReceiverProperties udpReceiverProperties;
@@ -68,9 +65,11 @@ class MapReceiverTest {
@Autowired
RawEncodedJsonTopics rawEncodedJsonTopics;
- @Test
- void testMapReceiver() throws IOException {
+ private CountDownLatch latch;
+ private String actualPayload;
+ @Test
+ void testMapReceiver() throws IOException, InterruptedException {
// Set the clock to a fixed time so that the MapReceiver will produce the same output every time
final Clock prevClock = DateTimeUtils.setClock(
Clock.fixed(Instant.parse("2020-01-01T00:00:00Z"), Clock.systemUTC().getZone()));
@@ -80,16 +79,6 @@ void testMapReceiver() throws IOException {
ExecutorService executorService = Executors.newCachedThreadPool();
executorService.submit(mapReceiver);
- EmbeddedKafkaHolder.addTopics(rawEncodedJsonTopics.getMap());
-
- // Set up a Kafka consumer
- Map consumerProps =
- KafkaTestUtils.consumerProps("test-group", "false", embeddedKafka);
- DefaultKafkaConsumerFactory cf =
- new DefaultKafkaConsumerFactory<>(consumerProps);
- Consumer consumer = cf.createConsumer();
- embeddedKafka.consumeFromAnEmbeddedTopic(consumer, rawEncodedJsonTopics.getMap());
-
TestUDPClient udpClient = new TestUDPClient(udpReceiverProperties.getMap().getReceiverPort());
String path =
@@ -97,12 +86,11 @@ void testMapReceiver() throws IOException {
List approvalTestCases = deserializeTestCases(path);
for (ApprovalTestCase approvalTestCase : approvalTestCases) {
+ latch = new CountDownLatch(1);
+ actualPayload = null;
udpClient.send(approvalTestCase.getInput());
-
- ConsumerRecord produced =
- KafkaTestUtils.getSingleRecord(consumer, rawEncodedJsonTopics.getMap());
-
- JSONObject producedJson = new JSONObject(produced.value());
+ assertThat(latch.await(3, TimeUnit.SECONDS)).isTrue();
+ JSONObject producedJson = new JSONObject(actualPayload);
JSONObject expectedJson = new JSONObject(approvalTestCase.getExpected());
// assert that the UUIDs are different, then remove them so that the rest of the JSON can be compared
@@ -120,4 +108,10 @@ void testMapReceiver() throws IOException {
DateTimeUtils.setClock(prevClock);
}
+
+ @KafkaListener(topics = "topic.MapReceiverTestMAPJSON")
+ public void receive(String payload) {
+ this.actualPayload = payload;
+ latch.countDown();
+ }
}
diff --git a/jpo-ode-svcs/src/test/java/us/dot/its/jpo/ode/udp/psm/PsmReceiverTest.java b/jpo-ode-svcs/src/test/java/us/dot/its/jpo/ode/udp/psm/PsmReceiverTest.java
index fc7b7765d..a32a2922f 100644
--- a/jpo-ode-svcs/src/test/java/us/dot/its/jpo/ode/udp/psm/PsmReceiverTest.java
+++ b/jpo-ode-svcs/src/test/java/us/dot/its/jpo/ode/udp/psm/PsmReceiverTest.java
@@ -1,5 +1,6 @@
package us.dot.its.jpo.ode.udp.psm;
+import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotEquals;
@@ -8,52 +9,51 @@
import java.time.Clock;
import java.time.Instant;
import java.time.ZoneId;
+import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
-import org.apache.kafka.clients.admin.NewTopic;
+import java.util.concurrent.TimeUnit;
+
import org.json.JSONObject;
import org.junit.jupiter.api.Test;
-import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.boot.autoconfigure.kafka.KafkaProperties;
+import org.springframework.boot.kafka.autoconfigure.KafkaProperties;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.boot.test.context.SpringBootTest;
-import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
+import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.kafka.core.KafkaTemplate;
-import org.springframework.kafka.test.EmbeddedKafkaBroker;
-import org.springframework.kafka.test.utils.KafkaTestUtils;
+import org.springframework.kafka.test.context.EmbeddedKafka;
import org.springframework.test.annotation.DirtiesContext;
-import org.springframework.test.context.ContextConfiguration;
-import org.springframework.test.context.junit4.SpringRunner;
+import org.springframework.test.context.TestPropertySource;
import us.dot.its.jpo.ode.config.SerializationConfig;
+import us.dot.its.jpo.ode.kafka.KafkaConsumerConfig;
import us.dot.its.jpo.ode.kafka.OdeKafkaProperties;
import us.dot.its.jpo.ode.kafka.TestMetricsConfig;
import us.dot.its.jpo.ode.kafka.producer.KafkaProducerConfig;
import us.dot.its.jpo.ode.kafka.topics.RawEncodedJsonTopics;
-import us.dot.its.jpo.ode.test.utilities.EmbeddedKafkaHolder;
import us.dot.its.jpo.ode.test.utilities.TestUDPClient;
import us.dot.its.jpo.ode.udp.controller.UDPReceiverProperties;
import us.dot.its.jpo.ode.util.DateTimeUtils;
-@RunWith(SpringRunner.class)
@EnableConfigurationProperties
@SpringBootTest(
classes = {
- OdeKafkaProperties.class,
- UDPReceiverProperties.class,
+ KafkaConsumerConfig.class,
KafkaProducerConfig.class,
SerializationConfig.class,
TestMetricsConfig.class,
+ UDPReceiverProperties.class,
+ OdeKafkaProperties.class,
+ RawEncodedJsonTopics.class,
+ KafkaProperties.class
},
properties = {
"ode.receivers.psm.receiver-port=15456",
"ode.kafka.topics.raw-encoded-json.psm=topic.PsmReceiverTest"
}
)
-@ContextConfiguration(classes = {
- UDPReceiverProperties.class,
- RawEncodedJsonTopics.class, KafkaProperties.class
-})
+@EmbeddedKafka
+@TestPropertySource(properties = {"spring.kafka.bootstrap-servers=${spring.embedded.kafka.brokers}"})
@DirtiesContext
class PsmReceiverTest {
@@ -66,15 +66,13 @@ class PsmReceiverTest {
@Autowired
KafkaTemplate kafkaTemplate;
- EmbeddedKafkaBroker embeddedKafka = EmbeddedKafkaHolder.getEmbeddedKafka();
+ private CountDownLatch latch;
+ private String actualPayload;
@Test
void testRun() throws Exception {
- try {
- embeddedKafka.addTopics(new NewTopic(rawEncodedJsonTopics.getPsm(), 1, (short) 1));
- } catch (Exception e) {
- // Ignore as we're only ensuring topics exist
- }
+ latch = new CountDownLatch(1);
+ actualPayload = null;
final Clock prevClock = DateTimeUtils.setClock(
Clock.fixed(Instant.parse("2024-11-26T23:53:21.120Z"), ZoneId.of("UTC")));
@@ -93,16 +91,11 @@ void testRun() throws Exception {
TestUDPClient udpClient = new TestUDPClient(udpReceiverProperties.getPsm().getReceiverPort());
udpClient.send(fileContent);
- var consumerProps = KafkaTestUtils.consumerProps(
- "PsmReceiverTest", "true", embeddedKafka);
- var cf = new DefaultKafkaConsumerFactory(consumerProps);
- var consumer = cf.createConsumer();
- embeddedKafka.consumeFromAnEmbeddedTopic(consumer, rawEncodedJsonTopics.getPsm());
+ assertThat(latch.await(3, TimeUnit.SECONDS)).isTrue();
- var singleRecord = KafkaTestUtils.getSingleRecord(consumer, rawEncodedJsonTopics.getPsm());
- assertNotEquals(expected, singleRecord.value());
+ assertNotEquals(expected, actualPayload);
- JSONObject producedJson = new JSONObject(singleRecord.value());
+ JSONObject producedJson = new JSONObject(actualPayload);
JSONObject expectedJson = new JSONObject(expected);
assertNotEquals(expectedJson.getJSONObject("metadata").get("serialId"),
@@ -114,4 +107,10 @@ void testRun() throws Exception {
DateTimeUtils.setClock(prevClock);
}
+
+ @KafkaListener(topics = "topic.PsmReceiverTest")
+ public void receive(String payload) {
+ this.actualPayload = payload;
+ latch.countDown();
+ }
}
\ No newline at end of file
diff --git a/jpo-ode-svcs/src/test/java/us/dot/its/jpo/ode/udp/rsm/RsmReceiverTest.java b/jpo-ode-svcs/src/test/java/us/dot/its/jpo/ode/udp/rsm/RsmReceiverTest.java
index 46a51a0a6..b57381bdc 100644
--- a/jpo-ode-svcs/src/test/java/us/dot/its/jpo/ode/udp/rsm/RsmReceiverTest.java
+++ b/jpo-ode-svcs/src/test/java/us/dot/its/jpo/ode/udp/rsm/RsmReceiverTest.java
@@ -1,5 +1,6 @@
package us.dot.its.jpo.ode.udp.rsm;
+import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotEquals;
@@ -8,27 +9,27 @@
import java.time.Clock;
import java.time.Instant;
import java.time.ZoneId;
+import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
-import org.apache.kafka.clients.consumer.Consumer;
+import java.util.concurrent.TimeUnit;
import org.json.JSONObject;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.boot.autoconfigure.kafka.KafkaProperties;
+import org.springframework.boot.kafka.autoconfigure.KafkaProperties;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.boot.test.context.SpringBootTest;
-import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
+import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.kafka.core.KafkaTemplate;
-import org.springframework.kafka.test.EmbeddedKafkaBroker;
-import org.springframework.kafka.test.utils.KafkaTestUtils;
+import org.springframework.kafka.test.context.EmbeddedKafka;
import org.springframework.test.annotation.DirtiesContext;
-import org.springframework.test.context.ContextConfiguration;
+import org.springframework.test.context.TestPropertySource;
import us.dot.its.jpo.ode.config.SerializationConfig;
+import us.dot.its.jpo.ode.kafka.KafkaConsumerConfig;
import us.dot.its.jpo.ode.kafka.OdeKafkaProperties;
import us.dot.its.jpo.ode.kafka.TestMetricsConfig;
import us.dot.its.jpo.ode.kafka.producer.KafkaProducerConfig;
import us.dot.its.jpo.ode.kafka.topics.RawEncodedJsonTopics;
-import us.dot.its.jpo.ode.test.utilities.EmbeddedKafkaHolder;
import us.dot.its.jpo.ode.test.utilities.TestUDPClient;
import us.dot.its.jpo.ode.udp.controller.UDPReceiverProperties;
import us.dot.its.jpo.ode.util.DateTimeUtils;
@@ -36,14 +37,18 @@
/**
* Unit test for the RsmReceiver class, verifying UDP reception, Kafka publishing and output.
*/
-@EnableConfigurationProperties
@SpringBootTest(
- classes = { OdeKafkaProperties.class, UDPReceiverProperties.class, KafkaProducerConfig.class,
- SerializationConfig.class, TestMetricsConfig.class, },
+ classes = {KafkaProducerConfig.class, KafkaConsumerConfig.class,
+ SerializationConfig.class, TestMetricsConfig.class, UDPReceiverProperties.class, OdeKafkaProperties.class,
+ RawEncodedJsonTopics.class,
+ KafkaProperties.class },
properties = {"ode.receivers.rsm.receiver-port=12759",
- "ode.kafka.topics.raw-encoded-json.rsm=topic.RsmReceiverTest"})
-@ContextConfiguration(
- classes = {UDPReceiverProperties.class, RawEncodedJsonTopics.class, KafkaProperties.class})
+ "ode.kafka.topics.raw-encoded-json.rsm=topic.RsmReceiverTest"
+ }
+)
+@EnableConfigurationProperties
+@EmbeddedKafka
+@TestPropertySource(properties = {"spring.kafka.bootstrap-servers=${spring.embedded.kafka.brokers}"})
@DirtiesContext
public class RsmReceiverTest {
@@ -56,46 +61,36 @@ public class RsmReceiverTest {
@Autowired
KafkaTemplate kafkaTemplate;
- EmbeddedKafkaBroker embeddedKafka = EmbeddedKafkaHolder.getEmbeddedKafka();
+ private CountDownLatch latch;
+ private String actualPayload;
@Test
void testRun() throws Exception {
- EmbeddedKafkaHolder.addTopics(rawEncodedJsonTopics.getRsm());
+ latch = new CountDownLatch(1);
+ actualPayload = null;
+
+ final Clock prevClock = DateTimeUtils.setClock(
+ Clock.fixed(Instant.parse("2024-11-26T23:53:21.120Z"), ZoneId.of("UTC")));
- final Clock prevClock = DateTimeUtils
- .setClock(Clock.fixed(Instant.parse("2024-11-26T23:53:21.120Z"), ZoneId.of("UTC")));
- // create the RsmReceiver and submit it to a runner
RsmReceiver rsmReceiver = new RsmReceiver(udpReceiverProperties.getRsm(), kafkaTemplate,
- rawEncodedJsonTopics.getRsm());
+ rawEncodedJsonTopics.getRsm());
ExecutorService executorService = Executors.newCachedThreadPool();
executorService.submit(rsmReceiver);
- String fileContent = Files.readString(
- Paths.get("src/test/resources/us/dot/its/jpo/ode/udp/rsm/RsmReceiverTest_ValidRSM.txt"));
- String expected = Files.readString(Paths.get(
- "src/test/resources/us/dot/its/jpo/ode/udp/rsm/RsmReceiverTest_ValidRSM_expected.json"));
+ String fileContent = Files.readString(Paths.get("src/test/resources/us/dot/its/jpo/ode/udp/rsm/RsmReceiverTest_ValidRSM.txt"));
+ String expected = Files.readString(Paths.get("src/test/resources/us/dot/its/jpo/ode/udp/rsm/RsmReceiverTest_ValidRSM_expected.json"));
TestUDPClient udpClient = new TestUDPClient(udpReceiverProperties.getRsm().getReceiverPort());
udpClient.send(fileContent);
- var consumerProps = KafkaTestUtils.consumerProps("RsmReceiverTest", "true", embeddedKafka);
- DefaultKafkaConsumerFactory cf =
- new DefaultKafkaConsumerFactory<>(consumerProps);
- Consumer consumer = cf.createConsumer();
- embeddedKafka.consumeFromAnEmbeddedTopic(consumer, rawEncodedJsonTopics.getRsm());
-
- // read record from produce topic
- var singleRecord = KafkaTestUtils.getSingleRecord(consumer, rawEncodedJsonTopics.getRsm());
- // confirm the stream-id is different, then remove it from both so that we can test equality
- // of all other fields
- assertNotEquals(expected, singleRecord.value());
- JSONObject producedJson = new JSONObject(singleRecord.value());
+ assertThat(latch.await(3, TimeUnit.SECONDS)).isTrue();
+
+ assertNotEquals(expected, actualPayload);
+ JSONObject producedJson = new JSONObject(actualPayload);
JSONObject expectedJson = new JSONObject(expected);
- // assert that the UUIDs are different, then remove them so that the rest of the JSON can be
- // compared
assertNotEquals(expectedJson.getJSONObject("metadata").get("serialId"),
- producedJson.getJSONObject("metadata").get("serialId"));
+ producedJson.getJSONObject("metadata").get("serialId"));
expectedJson.getJSONObject("metadata").remove("serialId");
producedJson.getJSONObject("metadata").remove("serialId");
@@ -103,4 +98,10 @@ void testRun() throws Exception {
DateTimeUtils.setClock(prevClock);
}
+
+ @KafkaListener(topics = "topic.RsmReceiverTest")
+ public void receive(String payload) {
+ this.actualPayload = payload;
+ latch.countDown();
+ }
}
\ No newline at end of file
diff --git a/jpo-ode-svcs/src/test/java/us/dot/its/jpo/ode/udp/rtcm/RtcmReceiverTest.java b/jpo-ode-svcs/src/test/java/us/dot/its/jpo/ode/udp/rtcm/RtcmReceiverTest.java
index 2859a629e..8fe4a58c9 100644
--- a/jpo-ode-svcs/src/test/java/us/dot/its/jpo/ode/udp/rtcm/RtcmReceiverTest.java
+++ b/jpo-ode-svcs/src/test/java/us/dot/its/jpo/ode/udp/rtcm/RtcmReceiverTest.java
@@ -1,5 +1,6 @@
package us.dot.its.jpo.ode.udp.rtcm;
+import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotEquals;
@@ -8,27 +9,28 @@
import java.time.Clock;
import java.time.Instant;
import java.time.ZoneId;
+import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
-import org.apache.kafka.clients.consumer.Consumer;
+import java.util.concurrent.TimeUnit;
+
import org.json.JSONObject;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.boot.autoconfigure.kafka.KafkaProperties;
+import org.springframework.boot.kafka.autoconfigure.KafkaProperties;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.boot.test.context.SpringBootTest;
-import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
+import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.kafka.core.KafkaTemplate;
-import org.springframework.kafka.test.EmbeddedKafkaBroker;
-import org.springframework.kafka.test.utils.KafkaTestUtils;
+import org.springframework.kafka.test.context.EmbeddedKafka;
import org.springframework.test.annotation.DirtiesContext;
-import org.springframework.test.context.ContextConfiguration;
+import org.springframework.test.context.TestPropertySource;
import us.dot.its.jpo.ode.config.SerializationConfig;
+import us.dot.its.jpo.ode.kafka.KafkaConsumerConfig;
import us.dot.its.jpo.ode.kafka.OdeKafkaProperties;
import us.dot.its.jpo.ode.kafka.TestMetricsConfig;
import us.dot.its.jpo.ode.kafka.producer.KafkaProducerConfig;
import us.dot.its.jpo.ode.kafka.topics.RawEncodedJsonTopics;
-import us.dot.its.jpo.ode.test.utilities.EmbeddedKafkaHolder;
import us.dot.its.jpo.ode.test.utilities.TestUDPClient;
import us.dot.its.jpo.ode.udp.controller.UDPReceiverProperties;
import us.dot.its.jpo.ode.util.DateTimeUtils;
@@ -38,12 +40,14 @@
*/
@EnableConfigurationProperties
@SpringBootTest(
- classes = { OdeKafkaProperties.class, UDPReceiverProperties.class, KafkaProducerConfig.class,
- SerializationConfig.class, TestMetricsConfig.class, },
+ classes = { KafkaConsumerConfig.class, KafkaProducerConfig.class,
+ SerializationConfig.class, TestMetricsConfig.class,
+ UDPReceiverProperties.class, RawEncodedJsonTopics.class,
+ KafkaProperties.class, OdeKafkaProperties.class},
properties = {"ode.receivers.rtcm.receiver-port=12753",
"ode.kafka.topics.raw-encoded-json.rtcm=topic.RtcmReceiverTest"})
-@ContextConfiguration(
- classes = {UDPReceiverProperties.class, RawEncodedJsonTopics.class, KafkaProperties.class})
+@EmbeddedKafka
+@TestPropertySource(properties = {"spring.kafka.bootstrap-servers=${spring.embedded.kafka.brokers}"})
@DirtiesContext
public class RtcmReceiverTest {
@@ -56,11 +60,13 @@ public class RtcmReceiverTest {
@Autowired
KafkaTemplate kafkaTemplate;
- EmbeddedKafkaBroker embeddedKafka = EmbeddedKafkaHolder.getEmbeddedKafka();
+ private CountDownLatch latch;
+ private String actualPayload;
@Test
void testRun() throws Exception {
- EmbeddedKafkaHolder.addTopics(rawEncodedJsonTopics.getRtcm());
+ latch = new CountDownLatch(1);
+ actualPayload = null;
final Clock prevClock = DateTimeUtils
.setClock(Clock.fixed(Instant.parse("2024-11-26T23:53:21.120Z"), ZoneId.of("UTC")));
@@ -78,18 +84,12 @@ void testRun() throws Exception {
TestUDPClient udpClient = new TestUDPClient(udpReceiverProperties.getRtcm().getReceiverPort());
udpClient.send(fileContent);
- var consumerProps = KafkaTestUtils.consumerProps("RtcmReceiverTest", "true", embeddedKafka);
- DefaultKafkaConsumerFactory cf =
- new DefaultKafkaConsumerFactory<>(consumerProps);
- Consumer consumer = cf.createConsumer();
- embeddedKafka.consumeFromAnEmbeddedTopic(consumer, rawEncodedJsonTopics.getRtcm());
+ assertThat(latch.await(3, TimeUnit.SECONDS)).isTrue();
- // read record from produce topic
- var singleRecord = KafkaTestUtils.getSingleRecord(consumer, rawEncodedJsonTopics.getRtcm());
- // confirm the stream-id is different, then remove it from both so that we can test equality
+ // confirm the stream-id is different, then remove it from both so that we can test equality
// of all other fields
- assertNotEquals(expected, singleRecord.value());
- JSONObject producedJson = new JSONObject(singleRecord.value());
+ assertNotEquals(expected, actualPayload);
+ JSONObject producedJson = new JSONObject(actualPayload);
JSONObject expectedJson = new JSONObject(expected);
// assert that the UUIDs are different, then remove them so that the rest of the JSON can be
@@ -103,4 +103,10 @@ void testRun() throws Exception {
DateTimeUtils.setClock(prevClock);
}
+
+ @KafkaListener(topics = "topic.RtcmReceiverTest")
+ public void receive(String payload) {
+ this.actualPayload = payload;
+ latch.countDown();
+ }
}
\ No newline at end of file
diff --git a/jpo-ode-svcs/src/test/java/us/dot/its/jpo/ode/udp/sdsm/SdsmReceiverTest.java b/jpo-ode-svcs/src/test/java/us/dot/its/jpo/ode/udp/sdsm/SdsmReceiverTest.java
index a8fb13dc8..cc35d5e0c 100644
--- a/jpo-ode-svcs/src/test/java/us/dot/its/jpo/ode/udp/sdsm/SdsmReceiverTest.java
+++ b/jpo-ode-svcs/src/test/java/us/dot/its/jpo/ode/udp/sdsm/SdsmReceiverTest.java
@@ -9,44 +9,43 @@
import java.time.Clock;
import java.time.Instant;
import java.time.ZoneOffset;
+import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
-import org.apache.kafka.clients.admin.NewTopic;
+import java.util.concurrent.TimeUnit;
+
+import org.assertj.core.api.Assertions;
import org.json.JSONObject;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.Test;
-import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.boot.autoconfigure.kafka.KafkaProperties;
+import org.springframework.boot.kafka.autoconfigure.KafkaProperties;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.boot.test.context.SpringBootTest;
-import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
+import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.kafka.core.KafkaTemplate;
-import org.springframework.kafka.test.EmbeddedKafkaBroker;
-import org.springframework.kafka.test.utils.KafkaTestUtils;
+import org.springframework.kafka.test.context.EmbeddedKafka;
import org.springframework.test.annotation.DirtiesContext;
-import org.springframework.test.context.ContextConfiguration;
-import org.springframework.test.context.junit4.SpringRunner;
+import org.springframework.test.context.TestPropertySource;
import us.dot.its.jpo.ode.config.SerializationConfig;
+import us.dot.its.jpo.ode.kafka.KafkaConsumerConfig;
import us.dot.its.jpo.ode.kafka.OdeKafkaProperties;
import us.dot.its.jpo.ode.kafka.TestMetricsConfig;
import us.dot.its.jpo.ode.kafka.producer.KafkaProducerConfig;
import us.dot.its.jpo.ode.kafka.topics.RawEncodedJsonTopics;
-import us.dot.its.jpo.ode.test.utilities.EmbeddedKafkaHolder;
import us.dot.its.jpo.ode.test.utilities.TestUDPClient;
import us.dot.its.jpo.ode.udp.controller.UDPReceiverProperties;
import us.dot.its.jpo.ode.util.DateTimeUtils;
import us.dot.its.jpo.ode.util.JsonUtils;
-@RunWith(SpringRunner.class)
@EnableConfigurationProperties
@SpringBootTest(
- classes = {OdeKafkaProperties.class, UDPReceiverProperties.class, KafkaProducerConfig.class,
- SerializationConfig.class, TestMetricsConfig.class},
+ classes = {KafkaConsumerConfig.class, KafkaProducerConfig.class,
+ SerializationConfig.class, TestMetricsConfig.class, UDPReceiverProperties.class, RawEncodedJsonTopics.class, KafkaProperties.class, OdeKafkaProperties.class},
properties = {"ode.receivers.sdsm.receiver-port=12413",
"ode.kafka.topics.raw-encoded-json.sdsm=topic.SdsmReceiverTest"})
-@ContextConfiguration(
- classes = {UDPReceiverProperties.class, RawEncodedJsonTopics.class, KafkaProperties.class})
+@EmbeddedKafka
+@TestPropertySource(properties = {"spring.kafka.bootstrap-servers=${spring.embedded.kafka.brokers}"})
@DirtiesContext
class SdsmReceiverTest {
@@ -59,11 +58,12 @@ class SdsmReceiverTest {
@Autowired
RawEncodedJsonTopics rawEncodedJsonTopics;
- EmbeddedKafkaBroker embeddedKafka = EmbeddedKafkaHolder.getEmbeddedKafka();
-
private ExecutorService executorService;
private SdsmReceiver sdsmReceiver;
+ private CountDownLatch latch;
+ private String actualPayload;
+
@AfterEach
void cleanup() {
if (executorService != null) {
@@ -76,11 +76,8 @@ void cleanup() {
@Test
void testRun() throws Exception {
- try {
- embeddedKafka.addTopics(new NewTopic(rawEncodedJsonTopics.getSdsm(), 1, (short) 1));
- } catch (Exception e) {
- // Ignore as we're only ensuring topics exist
- }
+ latch = new CountDownLatch(1);
+ actualPayload = null;
final Clock prevClock = DateTimeUtils
.setClock(Clock.fixed(Instant.parse("2024-11-26T23:53:21.120Z"), ZoneOffset.UTC));
@@ -100,15 +97,11 @@ void testRun() throws Exception {
new TestUDPClient(udpReceiverProperties.getSdsm().getReceiverPort());
udpClient.send(fileContent);
- var consumerProps = KafkaTestUtils.consumerProps("SdsmReceiverTest", "true", embeddedKafka);
- var cf = new DefaultKafkaConsumerFactory(consumerProps);
- var consumer = cf.createConsumer();
- embeddedKafka.consumeFromAnEmbeddedTopic(consumer, rawEncodedJsonTopics.getSdsm());
+ Assertions.assertThat(latch.await(3, TimeUnit.SECONDS)).isTrue();
- var singleRecord = KafkaTestUtils.getSingleRecord(consumer, rawEncodedJsonTopics.getSdsm());
- assertNotEquals(expected, singleRecord.value());
+ assertNotEquals(expected, actualPayload);
- JSONObject producedJson = new JSONObject(singleRecord.value());
+ JSONObject producedJson = new JSONObject(actualPayload);
JSONObject expectedJson = new JSONObject(expected);
assertNotEquals(expectedJson.getJSONObject("metadata").get("serialId"),
@@ -122,4 +115,10 @@ void testRun() throws Exception {
DateTimeUtils.setClock(prevClock);
}
}
+
+ @KafkaListener(topics = "topic.SdsmReceiverTest")
+ public void receive(String payload) {
+ this.actualPayload = payload;
+ latch.countDown();
+ }
}
\ No newline at end of file
diff --git a/jpo-ode-svcs/src/test/java/us/dot/its/jpo/ode/udp/spat/SpatReceiverTest.java b/jpo-ode-svcs/src/test/java/us/dot/its/jpo/ode/udp/spat/SpatReceiverTest.java
index 6c9dce3bd..609d11c0e 100644
--- a/jpo-ode-svcs/src/test/java/us/dot/its/jpo/ode/udp/spat/SpatReceiverTest.java
+++ b/jpo-ode-svcs/src/test/java/us/dot/its/jpo/ode/udp/spat/SpatReceiverTest.java
@@ -1,5 +1,6 @@
package us.dot.its.jpo.ode.udp.spat;
+import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotEquals;
@@ -8,27 +9,28 @@
import java.time.Clock;
import java.time.Instant;
import java.time.ZoneId;
+import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
-import org.apache.kafka.clients.consumer.Consumer;
+import java.util.concurrent.TimeUnit;
+
import org.json.JSONObject;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.boot.autoconfigure.kafka.KafkaProperties;
+import org.springframework.boot.kafka.autoconfigure.KafkaProperties;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.boot.test.context.SpringBootTest;
-import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
+import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.kafka.core.KafkaTemplate;
-import org.springframework.kafka.test.EmbeddedKafkaBroker;
-import org.springframework.kafka.test.utils.KafkaTestUtils;
+import org.springframework.kafka.test.context.EmbeddedKafka;
import org.springframework.test.annotation.DirtiesContext;
-import org.springframework.test.context.ContextConfiguration;
+import org.springframework.test.context.TestPropertySource;
import us.dot.its.jpo.ode.config.SerializationConfig;
+import us.dot.its.jpo.ode.kafka.KafkaConsumerConfig;
import us.dot.its.jpo.ode.kafka.OdeKafkaProperties;
import us.dot.its.jpo.ode.kafka.TestMetricsConfig;
import us.dot.its.jpo.ode.kafka.producer.KafkaProducerConfig;
import us.dot.its.jpo.ode.kafka.topics.RawEncodedJsonTopics;
-import us.dot.its.jpo.ode.test.utilities.EmbeddedKafkaHolder;
import us.dot.its.jpo.ode.test.utilities.TestUDPClient;
import us.dot.its.jpo.ode.udp.controller.UDPReceiverProperties;
import us.dot.its.jpo.ode.util.DateTimeUtils;
@@ -36,21 +38,22 @@
@EnableConfigurationProperties
@SpringBootTest(
classes = {
- OdeKafkaProperties.class,
- UDPReceiverProperties.class,
+ KafkaConsumerConfig.class,
KafkaProducerConfig.class,
SerializationConfig.class,
TestMetricsConfig.class,
+ UDPReceiverProperties.class,
+ OdeKafkaProperties.class,
+ RawEncodedJsonTopics.class,
+ KafkaProperties.class
},
properties = {
"ode.receivers.spat.receiver-port=15356",
"ode.kafka.topics.raw-encoded-json.spat=topic.SpatReceiverTest"
}
)
-@ContextConfiguration(classes = {
- UDPReceiverProperties.class,
- RawEncodedJsonTopics.class, KafkaProperties.class
-})
+@EmbeddedKafka
+@TestPropertySource(properties = {"spring.kafka.bootstrap-servers=${spring.embedded.kafka.brokers}"})
@DirtiesContext
class SpatReceiverTest {
@@ -63,11 +66,13 @@ class SpatReceiverTest {
@Autowired
private KafkaTemplate kafkaTemplate;
- EmbeddedKafkaBroker embeddedKafka = EmbeddedKafkaHolder.getEmbeddedKafka();
+ private CountDownLatch latch;
+ private String actualPayload;
@Test
void testRun() throws Exception {
- EmbeddedKafkaHolder.addTopics(rawEncodedJsonTopics.getSpat());
+ latch = new CountDownLatch(1);
+ actualPayload = null;
final Clock prevClock = DateTimeUtils.setClock(
Clock.fixed(Instant.parse("2024-11-26T23:53:21.120Z"), ZoneId.of("UTC")));
@@ -87,16 +92,10 @@ void testRun() throws Exception {
TestUDPClient udpClient = new TestUDPClient(udpReceiverProperties.getSpat().getReceiverPort());
udpClient.send(fileContent);
- var consumerProps = KafkaTestUtils.consumerProps(
- "SpatReceiverTest", "true", embeddedKafka);
- DefaultKafkaConsumerFactory cf =
- new DefaultKafkaConsumerFactory<>(consumerProps);
- Consumer consumer = cf.createConsumer();
- embeddedKafka.consumeFromAnEmbeddedTopic(consumer, rawEncodedJsonTopics.getSpat());
+ assertThat(latch.await(3, TimeUnit.SECONDS)).isTrue();
- var singleRecord = KafkaTestUtils.getSingleRecord(consumer, rawEncodedJsonTopics.getSpat());
- assertNotEquals(expected, singleRecord.value());
- JSONObject producedJson = new JSONObject(singleRecord.value());
+ assertNotEquals(expected, actualPayload);
+ JSONObject producedJson = new JSONObject(actualPayload);
JSONObject expectedJson = new JSONObject(expected);
assertNotEquals(expectedJson.getJSONObject("metadata").get("serialId"),
@@ -110,4 +109,9 @@ void testRun() throws Exception {
DateTimeUtils.setClock(prevClock);
}
+ @KafkaListener(topics = "topic.SpatReceiverTest")
+ public void receive(String payload) {
+ this.actualPayload = payload;
+ latch.countDown();
+ }
}
\ No newline at end of file
diff --git a/jpo-ode-svcs/src/test/java/us/dot/its/jpo/ode/udp/srm/SrmReceiverTest.java b/jpo-ode-svcs/src/test/java/us/dot/its/jpo/ode/udp/srm/SrmReceiverTest.java
index ff0e964a5..f092703e8 100644
--- a/jpo-ode-svcs/src/test/java/us/dot/its/jpo/ode/udp/srm/SrmReceiverTest.java
+++ b/jpo-ode-svcs/src/test/java/us/dot/its/jpo/ode/udp/srm/SrmReceiverTest.java
@@ -1,5 +1,6 @@
package us.dot.its.jpo.ode.udp.srm;
+import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotEquals;
@@ -8,51 +9,51 @@
import java.time.Clock;
import java.time.Instant;
import java.time.ZoneId;
+import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
+import java.util.concurrent.TimeUnit;
+
import org.json.JSONObject;
import org.junit.jupiter.api.Test;
-import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.boot.autoconfigure.kafka.KafkaProperties;
+import org.springframework.boot.kafka.autoconfigure.KafkaProperties;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.boot.test.context.SpringBootTest;
-import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
+import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.kafka.core.KafkaTemplate;
-import org.springframework.kafka.test.EmbeddedKafkaBroker;
-import org.springframework.kafka.test.utils.KafkaTestUtils;
+import org.springframework.kafka.test.context.EmbeddedKafka;
import org.springframework.test.annotation.DirtiesContext;
-import org.springframework.test.context.ContextConfiguration;
-import org.springframework.test.context.junit4.SpringRunner;
+import org.springframework.test.context.TestPropertySource;
import us.dot.its.jpo.ode.config.SerializationConfig;
+import us.dot.its.jpo.ode.kafka.KafkaConsumerConfig;
import us.dot.its.jpo.ode.kafka.OdeKafkaProperties;
import us.dot.its.jpo.ode.kafka.TestMetricsConfig;
import us.dot.its.jpo.ode.kafka.producer.KafkaProducerConfig;
import us.dot.its.jpo.ode.kafka.topics.RawEncodedJsonTopics;
-import us.dot.its.jpo.ode.test.utilities.EmbeddedKafkaHolder;
import us.dot.its.jpo.ode.test.utilities.TestUDPClient;
import us.dot.its.jpo.ode.udp.controller.UDPReceiverProperties;
import us.dot.its.jpo.ode.util.DateTimeUtils;
-@RunWith(SpringRunner.class)
@EnableConfigurationProperties
@SpringBootTest(
classes = {
- OdeKafkaProperties.class,
- UDPReceiverProperties.class,
+ KafkaConsumerConfig.class,
KafkaProducerConfig.class,
SerializationConfig.class,
TestMetricsConfig.class,
+ UDPReceiverProperties.class,
+ OdeKafkaProperties.class,
+ RawEncodedJsonTopics.class,
+ KafkaProperties.class
},
properties = {
"ode.receivers.srm.receiver-port=15459",
"ode.kafka.topics.raw-encoded-json.srm=topic.SrmReceiverTest"
}
)
-@ContextConfiguration(classes = {
- UDPReceiverProperties.class,
- RawEncodedJsonTopics.class, KafkaProperties.class
-})
+@EmbeddedKafka
+@TestPropertySource(properties = {"spring.kafka.bootstrap-servers=${spring.embedded.kafka.brokers}"})
@DirtiesContext
class SrmReceiverTest {
@@ -65,11 +66,13 @@ class SrmReceiverTest {
@Autowired
KafkaTemplate kafkaTemplate;
- EmbeddedKafkaBroker embeddedKafka = EmbeddedKafkaHolder.getEmbeddedKafka();
+ private CountDownLatch latch;
+ private String actualPayload;
@Test
void testRun() throws Exception {
- EmbeddedKafkaHolder.addTopics(rawEncodedJsonTopics.getSrm());
+ latch = new CountDownLatch(1);
+ actualPayload = null;
final Clock prevClock = DateTimeUtils.setClock(
Clock.fixed(Instant.parse("2024-11-26T23:53:21.120Z"), ZoneId.of("UTC")));
@@ -89,17 +92,11 @@ void testRun() throws Exception {
TestUDPClient udpClient = new TestUDPClient(udpReceiverProperties.getSrm().getReceiverPort());
udpClient.send(fileContent);
- var consumerProps = KafkaTestUtils.consumerProps(
- "SrmReceiverTest", "true", embeddedKafka);
- var cf = new DefaultKafkaConsumerFactory(consumerProps);
- var consumer = cf.createConsumer();
- embeddedKafka.consumeFromAnEmbeddedTopic(consumer, rawEncodedJsonTopics.getSrm());
+ assertThat(latch.await(3, TimeUnit.SECONDS)).isTrue();
- var singleRecord = KafkaTestUtils.getSingleRecord(consumer, rawEncodedJsonTopics.getSrm());
- String receivedValue = singleRecord.value();
- assertNotEquals(expected, receivedValue);
+ assertNotEquals(expected, actualPayload);
- JSONObject producedJson = new JSONObject(receivedValue);
+ JSONObject producedJson = new JSONObject(actualPayload);
JSONObject expectedJson = new JSONObject(expected);
assertNotEquals(expectedJson.getJSONObject("metadata").get("serialId"),
@@ -111,4 +108,10 @@ void testRun() throws Exception {
DateTimeUtils.setClock(prevClock);
}
+
+ @KafkaListener(topics = "topic.SrmReceiverTest")
+ public void receive(String payload) {
+ this.actualPayload = payload;
+ latch.countDown();
+ }
}
\ No newline at end of file
diff --git a/jpo-ode-svcs/src/test/java/us/dot/its/jpo/ode/udp/ssm/SsmReceiverTest.java b/jpo-ode-svcs/src/test/java/us/dot/its/jpo/ode/udp/ssm/SsmReceiverTest.java
index 760ab07e3..d8702bc6d 100644
--- a/jpo-ode-svcs/src/test/java/us/dot/its/jpo/ode/udp/ssm/SsmReceiverTest.java
+++ b/jpo-ode-svcs/src/test/java/us/dot/its/jpo/ode/udp/ssm/SsmReceiverTest.java
@@ -1,5 +1,6 @@
package us.dot.its.jpo.ode.udp.ssm;
+import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotEquals;
@@ -8,27 +9,28 @@
import java.time.Clock;
import java.time.Instant;
import java.time.ZoneId;
+import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
-import org.apache.kafka.clients.consumer.Consumer;
+import java.util.concurrent.TimeUnit;
+
import org.json.JSONObject;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.boot.autoconfigure.kafka.KafkaProperties;
+import org.springframework.boot.kafka.autoconfigure.KafkaProperties;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.boot.test.context.SpringBootTest;
-import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
+import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.kafka.core.KafkaTemplate;
-import org.springframework.kafka.test.EmbeddedKafkaBroker;
-import org.springframework.kafka.test.utils.KafkaTestUtils;
+import org.springframework.kafka.test.context.EmbeddedKafka;
import org.springframework.test.annotation.DirtiesContext;
-import org.springframework.test.context.ContextConfiguration;
+import org.springframework.test.context.TestPropertySource;
import us.dot.its.jpo.ode.config.SerializationConfig;
+import us.dot.its.jpo.ode.kafka.KafkaConsumerConfig;
import us.dot.its.jpo.ode.kafka.OdeKafkaProperties;
import us.dot.its.jpo.ode.kafka.TestMetricsConfig;
import us.dot.its.jpo.ode.kafka.producer.KafkaProducerConfig;
import us.dot.its.jpo.ode.kafka.topics.RawEncodedJsonTopics;
-import us.dot.its.jpo.ode.test.utilities.EmbeddedKafkaHolder;
import us.dot.its.jpo.ode.test.utilities.TestUDPClient;
import us.dot.its.jpo.ode.udp.controller.UDPReceiverProperties;
import us.dot.its.jpo.ode.util.DateTimeUtils;
@@ -36,21 +38,22 @@
@EnableConfigurationProperties
@SpringBootTest(
classes = {
- OdeKafkaProperties.class,
- UDPReceiverProperties.class,
+ KafkaConsumerConfig.class,
KafkaProducerConfig.class,
SerializationConfig.class,
TestMetricsConfig.class,
+ UDPReceiverProperties.class,
+ OdeKafkaProperties.class,
+ RawEncodedJsonTopics.class,
+ KafkaProperties.class
},
properties = {
"ode.receivers.ssm.receiver-port=15358",
"ode.kafka.topics.raw-encoded-json.ssm=topic.SsmReceiverTest"
}
)
-@ContextConfiguration(classes = {
- UDPReceiverProperties.class,
- RawEncodedJsonTopics.class, KafkaProperties.class
-})
+@EmbeddedKafka
+@TestPropertySource(properties = {"spring.kafka.bootstrap-servers=${spring.embedded.kafka.brokers}"})
@DirtiesContext
class SsmReceiverTest {
@@ -63,11 +66,13 @@ class SsmReceiverTest {
@Autowired
KafkaTemplate kafkaTemplate;
- EmbeddedKafkaBroker embeddedKafka = EmbeddedKafkaHolder.getEmbeddedKafka();
+ private CountDownLatch latch;
+ private String actualPayload;
@Test
void testRun() throws Exception {
- EmbeddedKafkaHolder.addTopics(rawEncodedJsonTopics.getSsm());
+ latch = new CountDownLatch(1);
+ actualPayload = null;
final Clock prevClock = DateTimeUtils.setClock(
Clock.fixed(Instant.parse("2024-11-26T23:53:21.120Z"), ZoneId.of("UTC")));
@@ -86,16 +91,10 @@ void testRun() throws Exception {
TestUDPClient udpClient = new TestUDPClient(udpReceiverProperties.getSsm().getReceiverPort());
udpClient.send(fileContent);
- var consumerProps = KafkaTestUtils.consumerProps(
- "SsmReceiverTest", "true", embeddedKafka);
- DefaultKafkaConsumerFactory cf =
- new DefaultKafkaConsumerFactory<>(consumerProps);
- Consumer consumer = cf.createConsumer();
- embeddedKafka.consumeFromAnEmbeddedTopic(consumer, rawEncodedJsonTopics.getSsm());
+ assertThat(latch.await(3, TimeUnit.SECONDS)).isTrue();
- var singleRecord = KafkaTestUtils.getSingleRecord(consumer, rawEncodedJsonTopics.getSsm());
- assertNotEquals(expected, singleRecord.value());
- JSONObject producedJson = new JSONObject(singleRecord.value());
+ assertNotEquals(expected, actualPayload);
+ JSONObject producedJson = new JSONObject(actualPayload);
JSONObject expectedJson = new JSONObject(expected);
assertNotEquals(expectedJson.getJSONObject("metadata").get("serialId"),
@@ -109,4 +108,10 @@ void testRun() throws Exception {
DateTimeUtils.setClock(prevClock);
}
+
+ @KafkaListener(topics = "topic.SsmReceiverTest")
+ public void receive(String payload) {
+ this.actualPayload = payload;
+ latch.countDown();
+ }
}
\ No newline at end of file
diff --git a/jpo-ode-svcs/src/test/java/us/dot/its/jpo/ode/udp/tim/TimReceiverTest.java b/jpo-ode-svcs/src/test/java/us/dot/its/jpo/ode/udp/tim/TimReceiverTest.java
index 23ede9af4..a0be16f46 100644
--- a/jpo-ode-svcs/src/test/java/us/dot/its/jpo/ode/udp/tim/TimReceiverTest.java
+++ b/jpo-ode-svcs/src/test/java/us/dot/its/jpo/ode/udp/tim/TimReceiverTest.java
@@ -8,50 +8,51 @@
import java.time.Clock;
import java.time.Instant;
import java.time.ZoneId;
+import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
-import org.apache.kafka.clients.consumer.Consumer;
-import org.apache.kafka.common.serialization.StringDeserializer;
+import java.util.concurrent.TimeUnit;
import org.json.JSONObject;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.boot.autoconfigure.kafka.KafkaProperties;
+import org.springframework.boot.kafka.autoconfigure.KafkaProperties;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.boot.test.context.SpringBootTest;
-import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
+import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.kafka.core.KafkaTemplate;
-import org.springframework.kafka.test.EmbeddedKafkaBroker;
-import org.springframework.kafka.test.utils.KafkaTestUtils;
+import org.springframework.kafka.test.context.EmbeddedKafka;
import org.springframework.test.annotation.DirtiesContext;
-import org.springframework.test.context.ContextConfiguration;
+import org.springframework.test.context.TestPropertySource;
import us.dot.its.jpo.ode.config.SerializationConfig;
+import us.dot.its.jpo.ode.kafka.KafkaConsumerConfig;
import us.dot.its.jpo.ode.kafka.OdeKafkaProperties;
import us.dot.its.jpo.ode.kafka.TestMetricsConfig;
import us.dot.its.jpo.ode.kafka.producer.KafkaProducerConfig;
import us.dot.its.jpo.ode.kafka.topics.RawEncodedJsonTopics;
-import us.dot.its.jpo.ode.test.utilities.EmbeddedKafkaHolder;
import us.dot.its.jpo.ode.test.utilities.TestUDPClient;
import us.dot.its.jpo.ode.udp.controller.UDPReceiverProperties;
import us.dot.its.jpo.ode.util.DateTimeUtils;
+import static org.assertj.core.api.Assertions.assertThat;
@EnableConfigurationProperties
@SpringBootTest(
classes = {
- OdeKafkaProperties.class,
- UDPReceiverProperties.class,
KafkaProducerConfig.class,
+ KafkaConsumerConfig.class,
SerializationConfig.class,
TestMetricsConfig.class,
+ UDPReceiverProperties.class,
+ OdeKafkaProperties.class,
+ RawEncodedJsonTopics.class,
+ KafkaProperties.class
},
properties = {
"ode.receivers.tim.receiver-port=15353",
"ode.kafka.topics.raw-encoded-json.tim=topic.TimReceiverTest"
}
)
-@ContextConfiguration(classes = {
- UDPReceiverProperties.class, OdeKafkaProperties.class,
- RawEncodedJsonTopics.class, KafkaProperties.class
-})
+@EmbeddedKafka
+@TestPropertySource(properties = {"spring.kafka.bootstrap-servers=${spring.embedded.kafka.brokers}"})
@DirtiesContext
class TimReceiverTest {
@@ -64,11 +65,13 @@ class TimReceiverTest {
@Autowired
KafkaTemplate kafkaTemplate;
- EmbeddedKafkaBroker embeddedKafka = EmbeddedKafkaHolder.getEmbeddedKafka();
+ private CountDownLatch latch;
+ private String actualPayload;
@Test
void testRun() throws Exception {
- EmbeddedKafkaHolder.addTopics(rawEncodedJsonTopics.getTim());
+ latch = new CountDownLatch(1);
+ actualPayload = null;
final Clock prevClock = DateTimeUtils.setClock(
Clock.fixed(Instant.parse("2024-11-26T23:53:21.120Z"), ZoneId.of("UTC")));
@@ -87,20 +90,12 @@ void testRun() throws Exception {
TestUDPClient udpClient = new TestUDPClient(udpReceiverProperties.getTim().getReceiverPort());
udpClient.send(fileContent);
- var consumerProps = KafkaTestUtils.consumerProps(
- "TimReceiverTest", "true", embeddedKafka);
- DefaultKafkaConsumerFactory cf =
- new DefaultKafkaConsumerFactory<>(consumerProps, new StringDeserializer(), new StringDeserializer());
- Consumer consumer = cf.createConsumer();
- embeddedKafka.consumeFromAnEmbeddedTopic(consumer, rawEncodedJsonTopics.getTim());
-
- var singleRecord = KafkaTestUtils.getSingleRecord(consumer, rawEncodedJsonTopics.getTim());
- // confirm the stream-id is different, then remove it from both so that we can test equality of all other fields
- assertNotEquals(expected, singleRecord.value());
- JSONObject producedJson = new JSONObject(singleRecord.value());
+ assertThat(latch.await(3, TimeUnit.SECONDS)).isTrue();
+
+ assertNotEquals(expected, actualPayload);
+ JSONObject producedJson = new JSONObject(actualPayload);
JSONObject expectedJson = new JSONObject(expected);
- // assert that the UUIDs are different, then remove them so that the rest of the JSON can be compared
assertNotEquals(expectedJson.getJSONObject("metadata").get("serialId"),
producedJson.getJSONObject("metadata").get("serialId"));
expectedJson.getJSONObject("metadata").remove("serialId");
@@ -112,4 +107,10 @@ void testRun() throws Exception {
DateTimeUtils.setClock(prevClock);
}
+
+ @KafkaListener(topics = "topic.TimReceiverTest")
+ public void receive(String payload) {
+ this.actualPayload = payload;
+ latch.countDown();
+ }
}
\ No newline at end of file
diff --git a/pom.xml b/pom.xml
index a345c8266..8088632c2 100644
--- a/pom.xml
+++ b/pom.xml
@@ -7,18 +7,18 @@
org.springframework.boot
spring-boot-starter-parent
- 3.5.4
+ 4.0.3
scm:git:https://github.com/usdot-jpo-ode/jpo-ode.git
- jpo-ode-5.1.0
+ jpo-ode-6.0.0
usdot.jpo.ode
jpo-ode
- 5.1.0
+ 6.0.0
pom
jpo-ode-common
@@ -60,7 +60,7 @@
org.springframework.boot
- spring-boot-starter-web
+ spring-boot-starter-webmvc
org.springframework.boot
@@ -83,12 +83,18 @@
org.junit.jupiter
junit-jupiter-api
- 5.12.2
test
org.projectlombok
lombok
+
1.18.30
provided
@@ -109,7 +115,7 @@
org.springframework.cloud
spring-cloud-dependencies
- 2025.0.0
+ 2025.0.1
pom
import
@@ -149,7 +155,6 @@
org.apache.maven.plugins
maven-surefire-plugin
- 3.2.5
-javaagent:${user.home}/.m2/repository/org/jmockit/jmockit/${jmockit.version}/jmockit-${jmockit.version}.jar
-Xshare:off