From f3d75103eda0c7d8d73e5516996905984960c35b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?U=C4=9Fur=20=C3=96zy=C4=B1lmazel?= Date: Sat, 25 Jan 2025 00:02:49 +0300 Subject: [PATCH 01/13] wip - consumer group --- .golangci.yml | 1 + cmd/githubconsumergroup/main.go | 101 +++++++++++ .../kafkacp/kafkaconsumer/kafkaconsumer.go | 2 + .../kafkaconsumergroup/kafkaconsumergroup.go | 166 ++++++++++++++++++ .../kafkaconsumergroup_test.go | 11 ++ 5 files changed, 281 insertions(+) create mode 100644 cmd/githubconsumergroup/main.go create mode 100644 internal/kafkacp/kafkaconsumergroup/kafkaconsumergroup.go create mode 100644 internal/kafkacp/kafkaconsumergroup/kafkaconsumergroup_test.go diff --git a/.golangci.yml b/.golangci.yml index d176115..4bedcf7 100644 --- a/.golangci.yml +++ b/.golangci.yml @@ -35,6 +35,7 @@ linters-settings: - h - d - p + - l # --------------------------------------------------------------------------- errcheck: check-type-assertions: true diff --git a/cmd/githubconsumergroup/main.go b/cmd/githubconsumergroup/main.go new file mode 100644 index 0000000..a36e1bc --- /dev/null +++ b/cmd/githubconsumergroup/main.go @@ -0,0 +1,101 @@ +package main + +import ( + "context" + "fmt" + "log" + + "github.com/devchain-network/cauldron/internal/kafkacp/kafkaconsumergroup" + "github.com/devchain-network/cauldron/internal/slogger" + "github.com/devchain-network/cauldron/internal/storage" + "github.com/devchain-network/cauldron/internal/storage/githubstorage" + "github.com/vigo/getenv" +) + +// const ( +// defaultKafkaConsumerTopic = "github" +// ) + +// Run ... +func Run() error { + logLevel := getenv.String("LOG_LEVEL", slogger.DefaultLogLevel) + // kafkaTopic := getenv.String("KC_TOPIC_GITHUB", defaultKafkaConsumerTopic) + databaseURL := getenv.String("DATABASE_URL", "") + + if err := getenv.Parse(); err != nil { + return fmt.Errorf("environment variable parse error: [%w]", err) + } + + logger, err := slogger.New( + slogger.WithLogLevelName(*logLevel), + ) + if err != nil { + return fmt.Errorf("logger instantiate error: [%w]", err) + } + + ctx, cancel := context.WithTimeout(context.Background(), storage.DefaultDBPingTimeout) + defer cancel() + + db, err := githubstorage.New( + ctx, + githubstorage.WithDatabaseDSN(*databaseURL), + githubstorage.WithLogger(logger), + ) + if err != nil { + return fmt.Errorf("github storage instantiate error: [%w]", err) + } + + if err = db.Ping(ctx, storage.DefaultDBPingMaxRetries, storage.DefaultDBPingBackoff); err != nil { + return fmt.Errorf("github storage ping error: [%w]", err) + } + defer func() { + logger.Info("github storage - closing pgx pool") + db.Pool.Close() + }() + + kafkaGitHubConsumer, err := kafkaconsumergroup.New( + kafkaconsumergroup.WithLogger(logger), + kafkaconsumergroup.WithStorage(db), + ) + if err != nil { + return fmt.Errorf("github kafka group consumer instantiate error: [%w]", err) + } + + defer func() { _ = kafkaGitHubConsumer.SaramaConsumerGroup.Close() }() + + if err = kafkaGitHubConsumer.StartConsume(); err != nil { + return fmt.Errorf("github kafka group consumer start consume error: [%w]", err) + } + + return nil +} + +func main() { + if err := Run(); err != nil { + log.Fatal(err) + } +} + +// import ( + +// kafkaConsumer, err := kafkaconsumergroup.New( +// kafkaconsumergroup.WithLogger(logger), +// kafkaconsumergroup.WithStorage(db), +// ) +// if err != nil { +// return fmt.Errorf("kafka consumer instantiate error: [%w]", err) +// } +// +// for { +// if err = kafkaConsumer.SaramaConsumerGroup.Consume( +// ctx, []string{*kafkaTopic}, kafkaConsumer.ConsumerGroupHandler, +// ); err != nil { +// logger.Error("error", "error", err) +// +// continue +// } +// } +// +// // return nil +// } +// diff --git a/internal/kafkacp/kafkaconsumer/kafkaconsumer.go b/internal/kafkacp/kafkaconsumer/kafkaconsumer.go index 2173533..1ff47ed 100644 --- a/internal/kafkacp/kafkaconsumer/kafkaconsumer.go +++ b/internal/kafkacp/kafkaconsumer/kafkaconsumer.go @@ -27,6 +27,8 @@ const ( DefaultMaxRetries = 10 ) +var _ KafkaConsumer = (*Consumer)(nil) // compile time proof + // KafkaConsumer defines kafka consumer behaviours. type KafkaConsumer interface { Consume() error diff --git a/internal/kafkacp/kafkaconsumergroup/kafkaconsumergroup.go b/internal/kafkacp/kafkaconsumergroup/kafkaconsumergroup.go new file mode 100644 index 0000000..6337026 --- /dev/null +++ b/internal/kafkacp/kafkaconsumergroup/kafkaconsumergroup.go @@ -0,0 +1,166 @@ +package kafkaconsumergroup + +import ( + "context" + "fmt" + "log/slog" + "os" + "os/signal" + "sync" + + "github.com/IBM/sarama" + "github.com/devchain-network/cauldron/internal/cerrors" + "github.com/devchain-network/cauldron/internal/kafkacp" + "github.com/devchain-network/cauldron/internal/storage" +) + +var _ sarama.ConsumerGroupHandler = (*Consumer)(nil) // compile time proof + +// Consumer ... +type Consumer struct { + Logger *slog.Logger + Storage storage.PingStorer + SaramaConsumerGroup sarama.ConsumerGroup + Topic kafkacp.KafkaTopicIdentifier + KafkaBrokers kafkacp.KafkaBrokers + NumberOfWorkers int +} + +// Setup ... +func (Consumer) Setup(_ sarama.ConsumerGroupSession) error { return nil } + +// Cleanup ... +func (Consumer) Cleanup(_ sarama.ConsumerGroupSession) error { return nil } + +// ConsumeClaim ... +func (Consumer) ConsumeClaim(sess sarama.ConsumerGroupSession, claim sarama.ConsumerGroupClaim) error { + for msg := range claim.Messages() { + fmt.Println(msg.Topic, msg.Partition, msg.Offset, string(msg.Key), string(msg.Value)) + + sess.MarkMessage(msg, "") + } + + return nil +} + +// StartConsume ... +func (c Consumer) StartConsume() error { + ctx, cancel := signal.NotifyContext(context.Background(), os.Interrupt, os.Kill) + defer cancel() + + var wg sync.WaitGroup + + wg.Add(1) + go func() { + defer wg.Done() + + for { + select { + case err, ok := <-c.SaramaConsumerGroup.Errors(): + if !ok { + c.Logger.Info("error chan closed, exiting error handler") + + return + } + c.Logger.Error("group errrrrrrrrr", "error", err) + case <-ctx.Done(): + c.Logger.Info("context canceled, stopping error handler") + + return + } + } + }() + + wg.Add(1) + go func() { + defer wg.Done() + topics := []string{c.Topic.String()} + + for { + if ctx.Err() != nil { + c.Logger.Info("context canceled, stopping consumer loop") + + return + } + + if err := c.SaramaConsumerGroup.Consume(ctx, topics, c); err != nil { + if ctx.Err() != nil { + c.Logger.Info("consume stopped due to context cancellation") + + return + } + + c.Logger.Error("erroooo", "error", err) + } + } + }() + + <-ctx.Done() + wg.Wait() + + if err := c.SaramaConsumerGroup.Close(); err != nil { + return fmt.Errorf("failed to close consumer group, error: [%w]", err) + } + + c.Logger.Info("all workers are stopped") + + return nil +} + +// Option represents option function type. +type Option func(*Consumer) error + +// WithLogger sets logger. +func WithLogger(l *slog.Logger) Option { + return func(c *Consumer) error { + if l == nil { + return fmt.Errorf("kafka consumer WithLogger error: [%w]", cerrors.ErrValueRequired) + } + c.Logger = l + + return nil + } +} + +// WithStorage sets storage value. +func WithStorage(st storage.PingStorer) Option { + return func(c *Consumer) error { + if st == nil { + return fmt.Errorf("kafka consumer WithStorage error: [%w]", cerrors.ErrValueRequired) + } + c.Storage = st + + return nil + } +} + +// New ... +func New(options ...Option) (*Consumer, error) { + consumer := new(Consumer) + + var kafkaBrokers kafkacp.KafkaBrokers + kafkaBrokers.AddFromString(kafkacp.DefaultKafkaBrokers) + + consumer.KafkaBrokers = kafkaBrokers + consumer.Topic = kafkacp.KafkaTopicIdentifier("github") + consumer.NumberOfWorkers = 5 + + for _, option := range options { + if err := option(consumer); err != nil { + return nil, fmt.Errorf("kafka consumer group option error: [%w]", err) + } + } + + config := sarama.NewConfig() + config.Version = sarama.V3_9_0_0 + config.Consumer.Return.Errors = true + + group, err := sarama.NewConsumerGroup(consumer.KafkaBrokers.ToStringSlice(), "my-group", config) + if err != nil { + return nil, fmt.Errorf("kafka group consumer group error: [%w]", err) + } + + consumer.SaramaConsumerGroup = group + + return consumer, nil +} diff --git a/internal/kafkacp/kafkaconsumergroup/kafkaconsumergroup_test.go b/internal/kafkacp/kafkaconsumergroup/kafkaconsumergroup_test.go new file mode 100644 index 0000000..ac155bf --- /dev/null +++ b/internal/kafkacp/kafkaconsumergroup/kafkaconsumergroup_test.go @@ -0,0 +1,11 @@ +package kafkaconsumergroup_test + +// func TestNew(t *testing.T) { +// // kversion, err := sarama.ParseKafkaVersion("V2_0_0_0") +// // fmt.Println("err", err) +// // fmt.Println("kversion", kversion) +// fmt.Println(sarama.V3_9_0_0) +// cg, err := kafkaconsumergroup.New() +// assert.Nil(t, err) +// assert.NotNil(t, cg) +// } From d3ebe3de7f6b1b1c4a81765ba926420608002261 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?U=C4=9Fur=20=C3=96zy=C4=B1lmazel?= Date: Sat, 25 Jan 2025 19:00:33 +0300 Subject: [PATCH 02/13] wip --- Rakefile | 14 + cmd/githubconsumer/main.go | 6 +- cmd/githubconsumergroup/main.go | 48 +- go.mod | 2 - .../kafkacp/kafkaconsumer/kafkaconsumer.go | 5 +- .../kafkaconsumergroup/kafkaconsumergroup.go | 250 ++++++++++- .../kafkaconsumergroup_test.go | 415 ++++++++++++++++++ 7 files changed, 686 insertions(+), 54 deletions(-) diff --git a/Rakefile b/Rakefile index af5b31e..01c7d4a 100644 --- a/Rakefile +++ b/Rakefile @@ -36,6 +36,7 @@ namespace :run do namespace :kafka do namespace :github do + desc 'run kafka github consumer' task :consumer do run = %{ go run -race cmd/githubconsumer/main.go } @@ -47,6 +48,19 @@ namespace :run do Process.kill('KILL', pid) 0 end + + desc 'run kafka github consumer group' + task :consumer_group do + run = %{ go run -race cmd/githubconsumergroup/main.go } + pid = Process.spawn(run) + Process.wait(pid) + $CHILD_STATUS&.exitstatus || 1 + rescue Interrupt + Process.getpgid(pid) + Process.kill('KILL', pid) + 0 + end + end end end diff --git a/cmd/githubconsumer/main.go b/cmd/githubconsumer/main.go index dfd6082..db07e29 100644 --- a/cmd/githubconsumer/main.go +++ b/cmd/githubconsumer/main.go @@ -13,16 +13,12 @@ import ( "github.com/vigo/getenv" ) -const ( - defaultKafkaConsumerTopic = "github" -) - // Run runs kafa github consumer. func Run() error { logLevel := getenv.String("LOG_LEVEL", slogger.DefaultLogLevel) brokersList := getenv.String("KCP_BROKERS", kafkacp.DefaultKafkaBrokers) - kafkaTopic := getenv.String("KC_TOPIC_GITHUB", defaultKafkaConsumerTopic) + kafkaTopic := getenv.String("KC_TOPIC_GITHUB", "") kafkaPartition := getenv.Int("KC_PARTITION", kafkaconsumer.DefaultPartition) kafkaDialTimeout := getenv.Duration("KC_DIAL_TIMEOUT", kafkaconsumer.DefaultDialTimeout) kafkaReadTimeout := getenv.Duration("KC_READ_TIMEOUT", kafkaconsumer.DefaultReadTimeout) diff --git a/cmd/githubconsumergroup/main.go b/cmd/githubconsumergroup/main.go index a36e1bc..f2449ba 100644 --- a/cmd/githubconsumergroup/main.go +++ b/cmd/githubconsumergroup/main.go @@ -5,6 +5,7 @@ import ( "fmt" "log" + "github.com/devchain-network/cauldron/internal/kafkacp" "github.com/devchain-network/cauldron/internal/kafkacp/kafkaconsumergroup" "github.com/devchain-network/cauldron/internal/slogger" "github.com/devchain-network/cauldron/internal/storage" @@ -12,14 +13,17 @@ import ( "github.com/vigo/getenv" ) -// const ( -// defaultKafkaConsumerTopic = "github" -// ) - -// Run ... +// Run runs kafa github consumer group. func Run() error { logLevel := getenv.String("LOG_LEVEL", slogger.DefaultLogLevel) - // kafkaTopic := getenv.String("KC_TOPIC_GITHUB", defaultKafkaConsumerTopic) + brokersList := getenv.String("KCP_BROKERS", kafkacp.DefaultKafkaBrokers) + kafkaTopic := getenv.String("KC_TOPIC_GITHUB", "") + kafkaConsumerGroup := getenv.String("KCG_NAME", "github-group") + kafkaDialTimeout := getenv.Duration("KC_DIAL_TIMEOUT", kafkaconsumergroup.DefaultDialTimeout) + kafkaReadTimeout := getenv.Duration("KC_READ_TIMEOUT", kafkaconsumergroup.DefaultReadTimeout) + kafkaWriteTimeout := getenv.Duration("KC_WRITE_TIMEOUT", kafkaconsumergroup.DefaultWriteTimeout) + kafkaBackoff := getenv.Duration("KC_BACKOFF", kafkaconsumergroup.DefaultBackoff) + kafkaMaxRetries := getenv.Int("KC_MAX_RETRIES", kafkaconsumergroup.DefaultMaxRetries) databaseURL := getenv.String("DATABASE_URL", "") if err := getenv.Parse(); err != nil { @@ -56,6 +60,14 @@ func Run() error { kafkaGitHubConsumer, err := kafkaconsumergroup.New( kafkaconsumergroup.WithLogger(logger), kafkaconsumergroup.WithStorage(db), + kafkaconsumergroup.WithKafkaBrokers(*brokersList), + kafkaconsumergroup.WithDialTimeout(*kafkaDialTimeout), + kafkaconsumergroup.WithReadTimeout(*kafkaReadTimeout), + kafkaconsumergroup.WithWriteTimeout(*kafkaWriteTimeout), + kafkaconsumergroup.WithBackoff(*kafkaBackoff), + kafkaconsumergroup.WithMaxRetries(*kafkaMaxRetries), + kafkaconsumergroup.WithTopic(*kafkaTopic), + kafkaconsumergroup.WithKafkaGroupName(*kafkaConsumerGroup), ) if err != nil { return fmt.Errorf("github kafka group consumer instantiate error: [%w]", err) @@ -75,27 +87,3 @@ func main() { log.Fatal(err) } } - -// import ( - -// kafkaConsumer, err := kafkaconsumergroup.New( -// kafkaconsumergroup.WithLogger(logger), -// kafkaconsumergroup.WithStorage(db), -// ) -// if err != nil { -// return fmt.Errorf("kafka consumer instantiate error: [%w]", err) -// } -// -// for { -// if err = kafkaConsumer.SaramaConsumerGroup.Consume( -// ctx, []string{*kafkaTopic}, kafkaConsumer.ConsumerGroupHandler, -// ); err != nil { -// logger.Error("error", "error", err) -// -// continue -// } -// } -// -// // return nil -// } -// diff --git a/go.mod b/go.mod index 7ad02b9..ee5d343 100644 --- a/go.mod +++ b/go.mod @@ -46,5 +46,3 @@ require ( golang.org/x/text v0.21.0 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect ) - -replace github.com/go-playground/webhooks/v6 => github.com/devchain-network/webhooks/v6 v6.0.0-20250113163359-43c20e82b17e diff --git a/internal/kafkacp/kafkaconsumer/kafkaconsumer.go b/internal/kafkacp/kafkaconsumer/kafkaconsumer.go index 1ff47ed..30e3eda 100644 --- a/internal/kafkacp/kafkaconsumer/kafkaconsumer.go +++ b/internal/kafkacp/kafkaconsumer/kafkaconsumer.go @@ -337,7 +337,10 @@ func New(options ...Option) (*Consumer, error) { backoff := consumer.Backoff for i := range consumer.MaxRetries { - sconsumer, sconsumerErr = consumer.SaramaConsumerFactoryFunc(consumer.KafkaBrokers.ToStringSlice(), config) + sconsumer, sconsumerErr = consumer.SaramaConsumerFactoryFunc( + consumer.KafkaBrokers.ToStringSlice(), + config, + ) if sconsumerErr == nil { break } diff --git a/internal/kafkacp/kafkaconsumergroup/kafkaconsumergroup.go b/internal/kafkacp/kafkaconsumergroup/kafkaconsumergroup.go index 6337026..de4281e 100644 --- a/internal/kafkacp/kafkaconsumergroup/kafkaconsumergroup.go +++ b/internal/kafkacp/kafkaconsumergroup/kafkaconsumergroup.go @@ -4,9 +4,12 @@ import ( "context" "fmt" "log/slog" + "math" "os" "os/signal" + "runtime" "sync" + "time" "github.com/IBM/sarama" "github.com/devchain-network/cauldron/internal/cerrors" @@ -14,16 +17,57 @@ import ( "github.com/devchain-network/cauldron/internal/storage" ) +// defaults. +const ( + DefaultDialTimeout = 30 * time.Second + DefaultReadTimeout = 30 * time.Second + DefaultWriteTimeout = 30 * time.Second + DefaultBackoff = 2 * time.Second + DefaultMaxRetries = 10 +) + var _ sarama.ConsumerGroupHandler = (*Consumer)(nil) // compile time proof -// Consumer ... +// Consumer represents kafa group consumer setup. type Consumer struct { - Logger *slog.Logger - Storage storage.PingStorer - SaramaConsumerGroup sarama.ConsumerGroup - Topic kafkacp.KafkaTopicIdentifier - KafkaBrokers kafkacp.KafkaBrokers - NumberOfWorkers int + KafkaGroupName string + Logger *slog.Logger + Storage storage.PingStorer + // SaramaConfig *sarama.Config + SaramaConsumerGroupFactoryFunc SaramaConsumerGroupFactoryFunc + SaramaConsumerGroup sarama.ConsumerGroup + Topic kafkacp.KafkaTopicIdentifier + KafkaBrokers kafkacp.KafkaBrokers + KafkaVersion sarama.KafkaVersion + DialTimeout time.Duration + ReadTimeout time.Duration + WriteTimeout time.Duration + Backoff time.Duration + MaxRetries uint8 + NumberOfWorkers int +} + +// SaramaConsumerGroupFactoryFunc is a factory function. +type SaramaConsumerGroupFactoryFunc func([]string, string, *sarama.Config) (sarama.ConsumerGroup, error) + +func (c *Consumer) checkRequired() error { + if c.Logger == nil { + return fmt.Errorf("kafka consumer group check required, Logger error: [%w]", cerrors.ErrValueRequired) + } + + if c.Storage == nil { + return fmt.Errorf("kafka consumer group check required, Storage error: [%w]", cerrors.ErrValueRequired) + } + + if c.KafkaGroupName == "" { + return fmt.Errorf("kafka consumer group check required, KafkaGroupName error: [%w]", cerrors.ErrValueRequired) + } + + if !c.Topic.Valid() { + return fmt.Errorf("kafka consumer group check required, Topic error: [%w]", cerrors.ErrInvalid) + } + + return nil } // Setup ... @@ -114,7 +158,7 @@ type Option func(*Consumer) error func WithLogger(l *slog.Logger) Option { return func(c *Consumer) error { if l == nil { - return fmt.Errorf("kafka consumer WithLogger error: [%w]", cerrors.ErrValueRequired) + return fmt.Errorf("kafka consumer group WithLogger error: [%w]", cerrors.ErrValueRequired) } c.Logger = l @@ -126,7 +170,7 @@ func WithLogger(l *slog.Logger) Option { func WithStorage(st storage.PingStorer) Option { return func(c *Consumer) error { if st == nil { - return fmt.Errorf("kafka consumer WithStorage error: [%w]", cerrors.ErrValueRequired) + return fmt.Errorf("kafka consumer group WithStorage error: [%w]", cerrors.ErrValueRequired) } c.Storage = st @@ -134,7 +178,142 @@ func WithStorage(st storage.PingStorer) Option { } } -// New ... +// WithTopic sets topic name to consume. +func WithTopic(s string) Option { + return func(c *Consumer) error { + kt := kafkacp.KafkaTopicIdentifier(s) + if !kt.Valid() { + return fmt.Errorf("kafka consumer group WithTopic error: [%w]", cerrors.ErrInvalid) + } + c.Topic = kt + + return nil + } +} + +// WithKafkaVersion sets kafka version. +func WithKafkaVersion(s string) Option { + return func(c *Consumer) error { + version, err := sarama.ParseKafkaVersion(s) + if err != nil { + return fmt.Errorf("kafka consumer group WithKafkaVersion error: [%w][%w]", err, cerrors.ErrInvalid) + } + + c.KafkaVersion = version + + return nil + } +} + +// WithKafkaBrokers sets kafka brokers list. +func WithKafkaBrokers(brokers string) Option { + return func(c *Consumer) error { + var kafkaBrokers kafkacp.KafkaBrokers + kafkaBrokers.AddFromString(brokers) + if !kafkaBrokers.Valid() { + return fmt.Errorf("kafka consumer group WithKafkaBrokers error: [%w]", cerrors.ErrInvalid) + } + + c.KafkaBrokers = kafkaBrokers + + return nil + } +} + +// WithDialTimeout sets dial timeout. +func WithDialTimeout(d time.Duration) Option { + return func(c *Consumer) error { + if d < 0 { + return fmt.Errorf("kafka consumer WithDialTimeout error: [%w]", cerrors.ErrInvalid) + } + c.DialTimeout = d + + return nil + } +} + +// WithReadTimeout sets read timeout. +func WithReadTimeout(d time.Duration) Option { + return func(c *Consumer) error { + if d < 0 { + return fmt.Errorf("kafka consumer group WithReadTimeout error: [%w]", cerrors.ErrInvalid) + } + c.ReadTimeout = d + + return nil + } +} + +// WithWriteTimeout sets write timeout. +func WithWriteTimeout(d time.Duration) Option { + return func(c *Consumer) error { + if d < 0 { + return fmt.Errorf("kafka consumer group WithWriteTimeout error: [%w]", cerrors.ErrInvalid) + } + c.WriteTimeout = d + + return nil + } +} + +// WithBackoff sets backoff duration. +func WithBackoff(d time.Duration) Option { + return func(c *Consumer) error { + if d == 0 { + return fmt.Errorf("kafka consumer group WithBackoff error: [%w]", cerrors.ErrValueRequired) + } + + if d < 0 || d > time.Minute { + return fmt.Errorf("kafka consumer group WithBackoff error: [%w]", cerrors.ErrInvalid) + } + + c.Backoff = d + + return nil + } +} + +// WithMaxRetries sets max retries value. +func WithMaxRetries(i int) Option { + return func(c *Consumer) error { + if i > math.MaxUint8 || i < 0 { + return fmt.Errorf("kafka consumer group WithMaxRetries error: [%w]", cerrors.ErrInvalid) + } + c.MaxRetries = uint8(i) + + return nil + } +} + +// WithKafkaGroupName sets kafka consumer group name. +func WithKafkaGroupName(s string) Option { + return func(c *Consumer) error { + if s == "" { + return fmt.Errorf("kafka consumer group WithKafkaGroupName error: [%w]", cerrors.ErrValueRequired) + } + c.KafkaGroupName = s + + return nil + } +} + +// WithSaramaConsumerGroupFactoryFunc sets a custom factory function for Sarama consumer group. +func WithSaramaConsumerGroupFactoryFunc(factory SaramaConsumerGroupFactoryFunc) Option { + return func(c *Consumer) error { + if factory == nil { + return fmt.Errorf( + "kafka consumer group WithSaramaConsumerGroupFactoryFunc error: [%w]", + cerrors.ErrValueRequired, + ) + } + + c.SaramaConsumerGroupFactoryFunc = factory + + return nil + } +} + +// New instantiates new kafka github consumer group instance. func New(options ...Option) (*Consumer, error) { consumer := new(Consumer) @@ -142,8 +321,14 @@ func New(options ...Option) (*Consumer, error) { kafkaBrokers.AddFromString(kafkacp.DefaultKafkaBrokers) consumer.KafkaBrokers = kafkaBrokers - consumer.Topic = kafkacp.KafkaTopicIdentifier("github") - consumer.NumberOfWorkers = 5 + consumer.DialTimeout = DefaultDialTimeout + consumer.ReadTimeout = DefaultReadTimeout + consumer.WriteTimeout = DefaultWriteTimeout + consumer.Backoff = DefaultBackoff + consumer.MaxRetries = DefaultMaxRetries + consumer.NumberOfWorkers = runtime.NumCPU() + consumer.KafkaVersion = sarama.V3_9_0_0 + consumer.SaramaConsumerGroupFactoryFunc = sarama.NewConsumerGroup for _, option := range options { if err := option(consumer); err != nil { @@ -151,16 +336,49 @@ func New(options ...Option) (*Consumer, error) { } } + if err := consumer.checkRequired(); err != nil { + return nil, err + } + config := sarama.NewConfig() + config.Net.DialTimeout = consumer.DialTimeout + config.Net.ReadTimeout = consumer.ReadTimeout + config.Net.WriteTimeout = consumer.WriteTimeout config.Version = sarama.V3_9_0_0 config.Consumer.Return.Errors = true - group, err := sarama.NewConsumerGroup(consumer.KafkaBrokers.ToStringSlice(), "my-group", config) - if err != nil { - return nil, fmt.Errorf("kafka group consumer group error: [%w]", err) + var saramaConsumerGroup sarama.ConsumerGroup + var saramaConsumerGroupErr error + backoff := consumer.Backoff + + for i := range consumer.MaxRetries { + saramaConsumerGroup, saramaConsumerGroupErr = consumer.SaramaConsumerGroupFactoryFunc( + consumer.KafkaBrokers.ToStringSlice(), + consumer.KafkaGroupName, + config, + ) + + if saramaConsumerGroupErr == nil { + break + } + + consumer.Logger.Error( + "can not connect to", + "brokers", consumer.KafkaBrokers, + "error", saramaConsumerGroupErr, + "retry", fmt.Sprintf("%d/%d", i, consumer.MaxRetries), + "backoff", backoff.String(), + ) + time.Sleep(backoff) + backoff *= 2 + } + if saramaConsumerGroupErr != nil { + return nil, fmt.Errorf("kafka consumer group, group error: [%w]", saramaConsumerGroupErr) } - consumer.SaramaConsumerGroup = group + consumer.Logger.Info("successfully connected to", "broker", consumer.KafkaBrokers) + + consumer.SaramaConsumerGroup = saramaConsumerGroup return consumer, nil } diff --git a/internal/kafkacp/kafkaconsumergroup/kafkaconsumergroup_test.go b/internal/kafkacp/kafkaconsumergroup/kafkaconsumergroup_test.go index ac155bf..f3db284 100644 --- a/internal/kafkacp/kafkaconsumergroup/kafkaconsumergroup_test.go +++ b/internal/kafkacp/kafkaconsumergroup/kafkaconsumergroup_test.go @@ -1,6 +1,414 @@ package kafkaconsumergroup_test +import ( + "context" + "log/slog" + "testing" + "time" + + "github.com/IBM/sarama" + "github.com/devchain-network/cauldron/internal/cerrors" + "github.com/devchain-network/cauldron/internal/kafkacp/kafkaconsumergroup" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/mock" +) + +type mockLogger struct{} + +func (h *mockLogger) Enabled(_ context.Context, _ slog.Level) bool { + return true +} + +func (h *mockLogger) Handle(_ context.Context, record slog.Record) error { + return nil +} + +func (h *mockLogger) WithAttrs(attrs []slog.Attr) slog.Handler { + return h +} + +func (h *mockLogger) WithGroup(name string) slog.Handler { + return h +} + +type mockStorage struct { + mock.Mock +} + +func (m *mockStorage) MessageStore(ctx context.Context, msg *sarama.ConsumerMessage) error { + args := m.Called(ctx, msg) + return args.Error(0) +} + +func (m *mockStorage) Ping(ctx context.Context, maxRetries uint8, backoff time.Duration) error { + args := m.Called(ctx, maxRetries, backoff) + return args.Error(0) +} + +func TestNew_MissingRequiredFields(t *testing.T) { + consumer, err := kafkaconsumergroup.New() + + assert.ErrorIs(t, err, cerrors.ErrValueRequired) + assert.Nil(t, consumer) +} + +func TestNew_NilLogger(t *testing.T) { + consumer, err := kafkaconsumergroup.New( + kafkaconsumergroup.WithLogger(nil), + ) + + assert.ErrorIs(t, err, cerrors.ErrValueRequired) + assert.Nil(t, consumer) +} + +func TestNew_NoStorage(t *testing.T) { + logger := slog.New(new(mockLogger)) + + consumer, err := kafkaconsumergroup.New( + kafkaconsumergroup.WithLogger(logger), + ) + + assert.ErrorIs(t, err, cerrors.ErrValueRequired) + assert.Nil(t, consumer) +} + +func TestNew_NilStorage(t *testing.T) { + logger := slog.New(new(mockLogger)) + + consumer, err := kafkaconsumergroup.New( + kafkaconsumergroup.WithLogger(logger), + kafkaconsumergroup.WithStorage(nil), + ) + + assert.ErrorIs(t, err, cerrors.ErrValueRequired) + assert.Nil(t, consumer) +} + +func TestNew_NoGroupName(t *testing.T) { + logger := slog.New(new(mockLogger)) + storage := new(mockStorage) + + consumer, err := kafkaconsumergroup.New( + kafkaconsumergroup.WithLogger(logger), + kafkaconsumergroup.WithStorage(storage), + ) + + assert.ErrorIs(t, err, cerrors.ErrValueRequired) + assert.Nil(t, consumer) +} + +func TestNew_EmptyGroupName(t *testing.T) { + logger := slog.New(new(mockLogger)) + storage := new(mockStorage) + + consumer, err := kafkaconsumergroup.New( + kafkaconsumergroup.WithLogger(logger), + kafkaconsumergroup.WithStorage(storage), + kafkaconsumergroup.WithKafkaGroupName(""), + ) + + assert.ErrorIs(t, err, cerrors.ErrValueRequired) + assert.Nil(t, consumer) +} + +func TestNew_EmptyTopic(t *testing.T) { + logger := slog.New(new(mockLogger)) + storage := new(mockStorage) + + consumer, err := kafkaconsumergroup.New( + kafkaconsumergroup.WithLogger(logger), + kafkaconsumergroup.WithStorage(storage), + kafkaconsumergroup.WithKafkaGroupName("github-group"), + ) + + assert.ErrorIs(t, err, cerrors.ErrInvalid) + assert.Nil(t, consumer) +} + +func TestNew_InvalidTopic(t *testing.T) { + logger := slog.New(new(mockLogger)) + storage := new(mockStorage) + + consumer, err := kafkaconsumergroup.New( + kafkaconsumergroup.WithLogger(logger), + kafkaconsumergroup.WithStorage(storage), + kafkaconsumergroup.WithKafkaGroupName("github-group"), + kafkaconsumergroup.WithTopic("invalid"), + ) + + assert.ErrorIs(t, err, cerrors.ErrInvalid) + assert.Nil(t, consumer) +} + +func TestNew_InvalidBrokers(t *testing.T) { + logger := slog.New(new(mockLogger)) + storage := new(mockStorage) + + consumer, err := kafkaconsumergroup.New( + kafkaconsumergroup.WithLogger(logger), + kafkaconsumergroup.WithStorage(storage), + kafkaconsumergroup.WithKafkaGroupName("github-group"), + kafkaconsumergroup.WithTopic("github"), + kafkaconsumergroup.WithKafkaBrokers("invalid"), + ) + + assert.ErrorIs(t, err, cerrors.ErrInvalid) + assert.Nil(t, consumer) +} + +func TestNew_InvalidDialTimeout(t *testing.T) { + logger := slog.New(new(mockLogger)) + storage := new(mockStorage) + + consumer, err := kafkaconsumergroup.New( + kafkaconsumergroup.WithLogger(logger), + kafkaconsumergroup.WithStorage(storage), + kafkaconsumergroup.WithKafkaGroupName("github-group"), + kafkaconsumergroup.WithTopic("github"), + kafkaconsumergroup.WithKafkaBrokers("127.0.0.1:9094"), + kafkaconsumergroup.WithDialTimeout(-1*time.Second), + ) + + assert.ErrorIs(t, err, cerrors.ErrInvalid) + assert.Nil(t, consumer) +} + +func TestNew_InvalidReadTimeout(t *testing.T) { + logger := slog.New(new(mockLogger)) + storage := new(mockStorage) + + consumer, err := kafkaconsumergroup.New( + kafkaconsumergroup.WithLogger(logger), + kafkaconsumergroup.WithStorage(storage), + kafkaconsumergroup.WithKafkaGroupName("github-group"), + kafkaconsumergroup.WithTopic("github"), + kafkaconsumergroup.WithKafkaBrokers("127.0.0.1:9094"), + kafkaconsumergroup.WithReadTimeout(-1*time.Second), + ) + + assert.ErrorIs(t, err, cerrors.ErrInvalid) + assert.Nil(t, consumer) +} + +func TestNew_InvalidWriteTimeout(t *testing.T) { + logger := slog.New(new(mockLogger)) + storage := new(mockStorage) + + consumer, err := kafkaconsumergroup.New( + kafkaconsumergroup.WithLogger(logger), + kafkaconsumergroup.WithStorage(storage), + kafkaconsumergroup.WithKafkaGroupName("github-group"), + kafkaconsumergroup.WithTopic("github"), + kafkaconsumergroup.WithKafkaBrokers("127.0.0.1:9094"), + kafkaconsumergroup.WithWriteTimeout(-1*time.Second), + ) + + assert.ErrorIs(t, err, cerrors.ErrInvalid) + assert.Nil(t, consumer) +} + +func TestNew_ZeroBackoff(t *testing.T) { + logger := slog.New(new(mockLogger)) + storage := new(mockStorage) + + consumer, err := kafkaconsumergroup.New( + kafkaconsumergroup.WithLogger(logger), + kafkaconsumergroup.WithStorage(storage), + kafkaconsumergroup.WithKafkaGroupName("github-group"), + kafkaconsumergroup.WithTopic("github"), + kafkaconsumergroup.WithKafkaBrokers("127.0.0.1:9094"), + kafkaconsumergroup.WithBackoff(0), + ) + + assert.ErrorIs(t, err, cerrors.ErrValueRequired) + assert.Nil(t, consumer) +} + +func TestNew_InvalidBackoff(t *testing.T) { + logger := slog.New(new(mockLogger)) + storage := new(mockStorage) + + consumer, err := kafkaconsumergroup.New( + kafkaconsumergroup.WithLogger(logger), + kafkaconsumergroup.WithStorage(storage), + kafkaconsumergroup.WithKafkaGroupName("github-group"), + kafkaconsumergroup.WithTopic("github"), + kafkaconsumergroup.WithKafkaBrokers("127.0.0.1:9094"), + kafkaconsumergroup.WithBackoff(2*time.Minute), + ) + + assert.ErrorIs(t, err, cerrors.ErrInvalid) + assert.Nil(t, consumer) +} + +func TestNew_InvalidMaxRetries(t *testing.T) { + logger := slog.New(new(mockLogger)) + storage := new(mockStorage) + + consumer, err := kafkaconsumergroup.New( + kafkaconsumergroup.WithLogger(logger), + kafkaconsumergroup.WithStorage(storage), + kafkaconsumergroup.WithKafkaGroupName("github-group"), + kafkaconsumergroup.WithTopic("github"), + kafkaconsumergroup.WithKafkaBrokers("127.0.0.1:9094"), + kafkaconsumergroup.WithMaxRetries(256), + ) + + assert.ErrorIs(t, err, cerrors.ErrInvalid) + assert.Nil(t, consumer) +} + +func TestNew_InvalidKafkaVersion(t *testing.T) { + logger := slog.New(new(mockLogger)) + storage := new(mockStorage) + + consumer, err := kafkaconsumergroup.New( + kafkaconsumergroup.WithLogger(logger), + kafkaconsumergroup.WithStorage(storage), + kafkaconsumergroup.WithKafkaGroupName("github-group"), + kafkaconsumergroup.WithTopic("github"), + kafkaconsumergroup.WithKafkaBrokers("127.0.0.1:9094"), + kafkaconsumergroup.WithKafkaVersion("1111"), + ) + + assert.ErrorIs(t, err, cerrors.ErrInvalid) + assert.Nil(t, consumer) +} + +func TestNew_NilSaramaConsumerGroupFactoryFunc(t *testing.T) { + logger := slog.New(new(mockLogger)) + storage := new(mockStorage) + + consumer, err := kafkaconsumergroup.New( + kafkaconsumergroup.WithLogger(logger), + kafkaconsumergroup.WithStorage(storage), + kafkaconsumergroup.WithKafkaGroupName("github-group"), + kafkaconsumergroup.WithTopic("github"), + kafkaconsumergroup.WithKafkaBrokers("127.0.0.1:9094"), + kafkaconsumergroup.WithKafkaVersion("3.9.0"), + kafkaconsumergroup.WithDialTimeout(5*time.Second), + kafkaconsumergroup.WithReadTimeout(5*time.Second), + kafkaconsumergroup.WithWriteTimeout(5*time.Second), + kafkaconsumergroup.WithBackoff(1*time.Second), + kafkaconsumergroup.WithMaxRetries(2), + kafkaconsumergroup.WithSaramaConsumerGroupFactoryFunc(nil), + ) + + assert.ErrorIs(t, err, cerrors.ErrValueRequired) + assert.Nil(t, consumer) +} + +// type mockConsumerGroup struct { +// mock.Mock +// } +// +// func (m *mockConsumerGroup) NewConsumerGroup( +// brokers []string, +// groupName string, +// config *sarama.Config, +// ) (sarama.ConsumerGroup, error) { +// args := m.Called(brokers, groupName, config) +// return args.Get(0).(sarama.ConsumerGroup), args.Error(1) +// } + +func TestNew_NilSaramaConsumerGroupFactoryFunc_Error(t *testing.T) { + logger := slog.New(new(mockLogger)) + storage := new(mockStorage) + + consumer, err := kafkaconsumergroup.New( + kafkaconsumergroup.WithLogger(logger), + kafkaconsumergroup.WithStorage(storage), + kafkaconsumergroup.WithKafkaGroupName("github-group"), + kafkaconsumergroup.WithTopic("github"), + kafkaconsumergroup.WithBackoff(100*time.Millisecond), + kafkaconsumergroup.WithMaxRetries(1), + ) + + assert.Nil(t, consumer) + assert.Error(t, err) +} + +type mockConsumerGroup struct { + mock.Mock +} + +func (m *mockConsumerGroup) Consume(ctx context.Context, topics []string, handler sarama.ConsumerGroupHandler) error { + args := m.Called(ctx, topics, handler) + return args.Error(0) +} + +func (m *mockConsumerGroup) Errors() <-chan error { + args := m.Called() + return args.Get(0).(<-chan error) +} + +func (m *mockConsumerGroup) Close() error { + args := m.Called() + return args.Error(0) +} + +func (m *mockConsumerGroup) Pause(partitions map[string][]int32) { + m.Called(partitions) +} + +func (m *mockConsumerGroup) Resume(partitions map[string][]int32) { + m.Called(partitions) +} + +func (m *mockConsumerGroup) PauseAll() { + m.Called() +} + +func (m *mockConsumerGroup) ResumeAll() { + m.Called() +} + +type mockConsumerGroupFactory struct { + mock.Mock +} + +func (m *mockConsumerGroupFactory) CreateConsumerGroup( + brokers []string, + groupName string, + config *sarama.Config, +) (sarama.ConsumerGroup, error) { + args := m.Called(brokers, groupName, config) + return args.Get(0).(sarama.ConsumerGroup), args.Error(1) +} + +func TestNew_NilSaramaConsumerGroupFactoryFunc_Success(t *testing.T) { + logger := slog.New(new(mockLogger)) + storage := new(mockStorage) + + consumerGroup := &mockConsumerGroup{} + consumerGroupFactory := &mockConsumerGroupFactory{} + consumerGroupFactory.On( + "CreateConsumerGroup", + mock.Anything, + mock.Anything, + mock.Anything, + ).Return(consumerGroup, nil).Once() + + consumer, err := kafkaconsumergroup.New( + kafkaconsumergroup.WithLogger(logger), + kafkaconsumergroup.WithStorage(storage), + kafkaconsumergroup.WithKafkaGroupName("github-group"), + kafkaconsumergroup.WithTopic("github"), + kafkaconsumergroup.WithBackoff(100*time.Millisecond), + kafkaconsumergroup.WithMaxRetries(1), + kafkaconsumergroup.WithSaramaConsumerGroupFactoryFunc(consumerGroupFactory.CreateConsumerGroup), + ) + + assert.NotNil(t, consumer) + assert.NoError(t, err) + consumerGroupFactory.AssertNumberOfCalls(t, "CreateConsumerGroup", 1) + consumerGroupFactory.AssertExpectations(t) +} + // func TestNew(t *testing.T) { +// mockConsumerGroup.AssertNumberOfCalls(t, "NewConsumerGroup", 1) +// mockConsumerGroup.AssertExpectations(t) + // // kversion, err := sarama.ParseKafkaVersion("V2_0_0_0") // // fmt.Println("err", err) // // fmt.Println("kversion", kversion) @@ -9,3 +417,10 @@ package kafkaconsumergroup_test // assert.Nil(t, err) // assert.NotNil(t, cg) // } + +// func TestNew_Version(t *testing.T) { +// kversion, err := sarama.ParseKafkaVersion("3.9.0") +// fmt.Println("kversion", kversion) +// fmt.Printf("%T\n", kversion) +// fmt.Println("err", err) +// } From ec380bde2289b9f4bbf063cae1362a5569b55225 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?U=C4=9Fur=20=C3=96zy=C4=B1lmazel?= Date: Sat, 25 Jan 2025 21:46:05 +0300 Subject: [PATCH 03/13] before process message --- .../kafkaconsumergroup/kafkaconsumergroup.go | 69 +- .../kafkaconsumergroup_test.go | 824 +++++++++--------- 2 files changed, 467 insertions(+), 426 deletions(-) diff --git a/internal/kafkacp/kafkaconsumergroup/kafkaconsumergroup.go b/internal/kafkacp/kafkaconsumergroup/kafkaconsumergroup.go index de4281e..50f9ef3 100644 --- a/internal/kafkacp/kafkaconsumergroup/kafkaconsumergroup.go +++ b/internal/kafkacp/kafkaconsumergroup/kafkaconsumergroup.go @@ -30,11 +30,11 @@ var _ sarama.ConsumerGroupHandler = (*Consumer)(nil) // compile time proof // Consumer represents kafa group consumer setup. type Consumer struct { - KafkaGroupName string - Logger *slog.Logger - Storage storage.PingStorer - // SaramaConfig *sarama.Config + KafkaGroupName string + Logger *slog.Logger + Storage storage.PingStorer SaramaConsumerGroupFactoryFunc SaramaConsumerGroupFactoryFunc + MessageQueue chan *sarama.ConsumerMessage SaramaConsumerGroup sarama.ConsumerGroup Topic kafkacp.KafkaTopicIdentifier KafkaBrokers kafkacp.KafkaBrokers @@ -44,6 +44,7 @@ type Consumer struct { WriteTimeout time.Duration Backoff time.Duration MaxRetries uint8 + MessageBufferSize int NumberOfWorkers int } @@ -70,16 +71,16 @@ func (c *Consumer) checkRequired() error { return nil } -// Setup ... +// Setup implements sarama ConsumerGroupHandler interface. func (Consumer) Setup(_ sarama.ConsumerGroupSession) error { return nil } -// Cleanup ... +// Cleanup implements sarama ConsumerGroupHandler interface. func (Consumer) Cleanup(_ sarama.ConsumerGroupSession) error { return nil } -// ConsumeClaim ... -func (Consumer) ConsumeClaim(sess sarama.ConsumerGroupSession, claim sarama.ConsumerGroupClaim) error { +// ConsumeClaim implements sarama ConsumerGroupHandler interface. +func (c Consumer) ConsumeClaim(sess sarama.ConsumerGroupSession, claim sarama.ConsumerGroupClaim) error { for msg := range claim.Messages() { - fmt.Println(msg.Topic, msg.Partition, msg.Offset, string(msg.Key), string(msg.Value)) + c.MessageQueue <- msg sess.MarkMessage(msg, "") } @@ -87,11 +88,13 @@ func (Consumer) ConsumeClaim(sess sarama.ConsumerGroupSession, claim sarama.Cons return nil } -// StartConsume ... +// StartConsume consumes message from kafka. func (c Consumer) StartConsume() error { ctx, cancel := signal.NotifyContext(context.Background(), os.Interrupt, os.Kill) defer cancel() + topics := []string{c.Topic.String()} + var wg sync.WaitGroup wg.Add(1) @@ -106,7 +109,7 @@ func (c Consumer) StartConsume() error { return } - c.Logger.Error("group errrrrrrrrr", "error", err) + c.Logger.Error("kafka consumer group error", "error", err) case <-ctx.Done(): c.Logger.Info("context canceled, stopping error handler") @@ -115,10 +118,46 @@ func (c Consumer) StartConsume() error { } }() + c.Logger.Info("starting workers", "count", c.NumberOfWorkers) + for i := range c.NumberOfWorkers { + wg.Add(1) + + go func() { + defer func() { + wg.Done() + c.Logger.Info("worker stopped", "id", i) + }() + + for { + select { + case msg, ok := <-c.MessageQueue: + if !ok { + return + } + + c.Logger.Info( + "store message here", + "worker", i, + "topic", msg.Topic, + "partition", msg.Partition, + "offset", msg.Offset, + "key", string(msg.Key), + // "value", string(msg.Value), + ) + + case <-ctx.Done(): + return + } + } + }() + } + wg.Add(1) go func() { - defer wg.Done() - topics := []string{c.Topic.String()} + defer func() { + wg.Done() + close(c.MessageQueue) + }() for { if ctx.Err() != nil { @@ -134,7 +173,7 @@ func (c Consumer) StartConsume() error { return } - c.Logger.Error("erroooo", "error", err) + c.Logger.Error("kafka consume group consume", "error", err) } } }() @@ -327,8 +366,10 @@ func New(options ...Option) (*Consumer, error) { consumer.Backoff = DefaultBackoff consumer.MaxRetries = DefaultMaxRetries consumer.NumberOfWorkers = runtime.NumCPU() + consumer.MessageBufferSize = consumer.NumberOfWorkers * 10 consumer.KafkaVersion = sarama.V3_9_0_0 consumer.SaramaConsumerGroupFactoryFunc = sarama.NewConsumerGroup + consumer.MessageQueue = make(chan *sarama.ConsumerMessage, consumer.MessageBufferSize) for _, option := range options { if err := option(consumer); err != nil { diff --git a/internal/kafkacp/kafkaconsumergroup/kafkaconsumergroup_test.go b/internal/kafkacp/kafkaconsumergroup/kafkaconsumergroup_test.go index f3db284..26135e7 100644 --- a/internal/kafkacp/kafkaconsumergroup/kafkaconsumergroup_test.go +++ b/internal/kafkacp/kafkaconsumergroup/kafkaconsumergroup_test.go @@ -1,308 +1,373 @@ package kafkaconsumergroup_test -import ( - "context" - "log/slog" - "testing" - "time" - - "github.com/IBM/sarama" - "github.com/devchain-network/cauldron/internal/cerrors" - "github.com/devchain-network/cauldron/internal/kafkacp/kafkaconsumergroup" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/mock" -) - -type mockLogger struct{} - -func (h *mockLogger) Enabled(_ context.Context, _ slog.Level) bool { - return true -} - -func (h *mockLogger) Handle(_ context.Context, record slog.Record) error { - return nil -} - -func (h *mockLogger) WithAttrs(attrs []slog.Attr) slog.Handler { - return h -} - -func (h *mockLogger) WithGroup(name string) slog.Handler { - return h -} - -type mockStorage struct { - mock.Mock -} - -func (m *mockStorage) MessageStore(ctx context.Context, msg *sarama.ConsumerMessage) error { - args := m.Called(ctx, msg) - return args.Error(0) -} - -func (m *mockStorage) Ping(ctx context.Context, maxRetries uint8, backoff time.Duration) error { - args := m.Called(ctx, maxRetries, backoff) - return args.Error(0) -} - -func TestNew_MissingRequiredFields(t *testing.T) { - consumer, err := kafkaconsumergroup.New() - - assert.ErrorIs(t, err, cerrors.ErrValueRequired) - assert.Nil(t, consumer) -} - -func TestNew_NilLogger(t *testing.T) { - consumer, err := kafkaconsumergroup.New( - kafkaconsumergroup.WithLogger(nil), - ) - - assert.ErrorIs(t, err, cerrors.ErrValueRequired) - assert.Nil(t, consumer) -} - -func TestNew_NoStorage(t *testing.T) { - logger := slog.New(new(mockLogger)) - - consumer, err := kafkaconsumergroup.New( - kafkaconsumergroup.WithLogger(logger), - ) - - assert.ErrorIs(t, err, cerrors.ErrValueRequired) - assert.Nil(t, consumer) -} - -func TestNew_NilStorage(t *testing.T) { - logger := slog.New(new(mockLogger)) - - consumer, err := kafkaconsumergroup.New( - kafkaconsumergroup.WithLogger(logger), - kafkaconsumergroup.WithStorage(nil), - ) - - assert.ErrorIs(t, err, cerrors.ErrValueRequired) - assert.Nil(t, consumer) -} - -func TestNew_NoGroupName(t *testing.T) { - logger := slog.New(new(mockLogger)) - storage := new(mockStorage) - - consumer, err := kafkaconsumergroup.New( - kafkaconsumergroup.WithLogger(logger), - kafkaconsumergroup.WithStorage(storage), - ) - - assert.ErrorIs(t, err, cerrors.ErrValueRequired) - assert.Nil(t, consumer) -} - -func TestNew_EmptyGroupName(t *testing.T) { - logger := slog.New(new(mockLogger)) - storage := new(mockStorage) - - consumer, err := kafkaconsumergroup.New( - kafkaconsumergroup.WithLogger(logger), - kafkaconsumergroup.WithStorage(storage), - kafkaconsumergroup.WithKafkaGroupName(""), - ) - - assert.ErrorIs(t, err, cerrors.ErrValueRequired) - assert.Nil(t, consumer) -} - -func TestNew_EmptyTopic(t *testing.T) { - logger := slog.New(new(mockLogger)) - storage := new(mockStorage) - - consumer, err := kafkaconsumergroup.New( - kafkaconsumergroup.WithLogger(logger), - kafkaconsumergroup.WithStorage(storage), - kafkaconsumergroup.WithKafkaGroupName("github-group"), - ) - - assert.ErrorIs(t, err, cerrors.ErrInvalid) - assert.Nil(t, consumer) -} - -func TestNew_InvalidTopic(t *testing.T) { - logger := slog.New(new(mockLogger)) - storage := new(mockStorage) - - consumer, err := kafkaconsumergroup.New( - kafkaconsumergroup.WithLogger(logger), - kafkaconsumergroup.WithStorage(storage), - kafkaconsumergroup.WithKafkaGroupName("github-group"), - kafkaconsumergroup.WithTopic("invalid"), - ) - - assert.ErrorIs(t, err, cerrors.ErrInvalid) - assert.Nil(t, consumer) -} - -func TestNew_InvalidBrokers(t *testing.T) { - logger := slog.New(new(mockLogger)) - storage := new(mockStorage) - - consumer, err := kafkaconsumergroup.New( - kafkaconsumergroup.WithLogger(logger), - kafkaconsumergroup.WithStorage(storage), - kafkaconsumergroup.WithKafkaGroupName("github-group"), - kafkaconsumergroup.WithTopic("github"), - kafkaconsumergroup.WithKafkaBrokers("invalid"), - ) - - assert.ErrorIs(t, err, cerrors.ErrInvalid) - assert.Nil(t, consumer) -} - -func TestNew_InvalidDialTimeout(t *testing.T) { - logger := slog.New(new(mockLogger)) - storage := new(mockStorage) - - consumer, err := kafkaconsumergroup.New( - kafkaconsumergroup.WithLogger(logger), - kafkaconsumergroup.WithStorage(storage), - kafkaconsumergroup.WithKafkaGroupName("github-group"), - kafkaconsumergroup.WithTopic("github"), - kafkaconsumergroup.WithKafkaBrokers("127.0.0.1:9094"), - kafkaconsumergroup.WithDialTimeout(-1*time.Second), - ) - - assert.ErrorIs(t, err, cerrors.ErrInvalid) - assert.Nil(t, consumer) -} - -func TestNew_InvalidReadTimeout(t *testing.T) { - logger := slog.New(new(mockLogger)) - storage := new(mockStorage) - - consumer, err := kafkaconsumergroup.New( - kafkaconsumergroup.WithLogger(logger), - kafkaconsumergroup.WithStorage(storage), - kafkaconsumergroup.WithKafkaGroupName("github-group"), - kafkaconsumergroup.WithTopic("github"), - kafkaconsumergroup.WithKafkaBrokers("127.0.0.1:9094"), - kafkaconsumergroup.WithReadTimeout(-1*time.Second), - ) - - assert.ErrorIs(t, err, cerrors.ErrInvalid) - assert.Nil(t, consumer) -} - -func TestNew_InvalidWriteTimeout(t *testing.T) { - logger := slog.New(new(mockLogger)) - storage := new(mockStorage) - - consumer, err := kafkaconsumergroup.New( - kafkaconsumergroup.WithLogger(logger), - kafkaconsumergroup.WithStorage(storage), - kafkaconsumergroup.WithKafkaGroupName("github-group"), - kafkaconsumergroup.WithTopic("github"), - kafkaconsumergroup.WithKafkaBrokers("127.0.0.1:9094"), - kafkaconsumergroup.WithWriteTimeout(-1*time.Second), - ) - - assert.ErrorIs(t, err, cerrors.ErrInvalid) - assert.Nil(t, consumer) -} - -func TestNew_ZeroBackoff(t *testing.T) { - logger := slog.New(new(mockLogger)) - storage := new(mockStorage) - - consumer, err := kafkaconsumergroup.New( - kafkaconsumergroup.WithLogger(logger), - kafkaconsumergroup.WithStorage(storage), - kafkaconsumergroup.WithKafkaGroupName("github-group"), - kafkaconsumergroup.WithTopic("github"), - kafkaconsumergroup.WithKafkaBrokers("127.0.0.1:9094"), - kafkaconsumergroup.WithBackoff(0), - ) - - assert.ErrorIs(t, err, cerrors.ErrValueRequired) - assert.Nil(t, consumer) -} - -func TestNew_InvalidBackoff(t *testing.T) { - logger := slog.New(new(mockLogger)) - storage := new(mockStorage) - - consumer, err := kafkaconsumergroup.New( - kafkaconsumergroup.WithLogger(logger), - kafkaconsumergroup.WithStorage(storage), - kafkaconsumergroup.WithKafkaGroupName("github-group"), - kafkaconsumergroup.WithTopic("github"), - kafkaconsumergroup.WithKafkaBrokers("127.0.0.1:9094"), - kafkaconsumergroup.WithBackoff(2*time.Minute), - ) - - assert.ErrorIs(t, err, cerrors.ErrInvalid) - assert.Nil(t, consumer) -} - -func TestNew_InvalidMaxRetries(t *testing.T) { - logger := slog.New(new(mockLogger)) - storage := new(mockStorage) - - consumer, err := kafkaconsumergroup.New( - kafkaconsumergroup.WithLogger(logger), - kafkaconsumergroup.WithStorage(storage), - kafkaconsumergroup.WithKafkaGroupName("github-group"), - kafkaconsumergroup.WithTopic("github"), - kafkaconsumergroup.WithKafkaBrokers("127.0.0.1:9094"), - kafkaconsumergroup.WithMaxRetries(256), - ) - - assert.ErrorIs(t, err, cerrors.ErrInvalid) - assert.Nil(t, consumer) -} - -func TestNew_InvalidKafkaVersion(t *testing.T) { - logger := slog.New(new(mockLogger)) - storage := new(mockStorage) - - consumer, err := kafkaconsumergroup.New( - kafkaconsumergroup.WithLogger(logger), - kafkaconsumergroup.WithStorage(storage), - kafkaconsumergroup.WithKafkaGroupName("github-group"), - kafkaconsumergroup.WithTopic("github"), - kafkaconsumergroup.WithKafkaBrokers("127.0.0.1:9094"), - kafkaconsumergroup.WithKafkaVersion("1111"), - ) - - assert.ErrorIs(t, err, cerrors.ErrInvalid) - assert.Nil(t, consumer) -} - -func TestNew_NilSaramaConsumerGroupFactoryFunc(t *testing.T) { - logger := slog.New(new(mockLogger)) - storage := new(mockStorage) - - consumer, err := kafkaconsumergroup.New( - kafkaconsumergroup.WithLogger(logger), - kafkaconsumergroup.WithStorage(storage), - kafkaconsumergroup.WithKafkaGroupName("github-group"), - kafkaconsumergroup.WithTopic("github"), - kafkaconsumergroup.WithKafkaBrokers("127.0.0.1:9094"), - kafkaconsumergroup.WithKafkaVersion("3.9.0"), - kafkaconsumergroup.WithDialTimeout(5*time.Second), - kafkaconsumergroup.WithReadTimeout(5*time.Second), - kafkaconsumergroup.WithWriteTimeout(5*time.Second), - kafkaconsumergroup.WithBackoff(1*time.Second), - kafkaconsumergroup.WithMaxRetries(2), - kafkaconsumergroup.WithSaramaConsumerGroupFactoryFunc(nil), - ) - - assert.ErrorIs(t, err, cerrors.ErrValueRequired) - assert.Nil(t, consumer) -} - +// import ( +// "context" +// "log/slog" +// "testing" +// "time" +// +// "github.com/IBM/sarama" +// "github.com/devchain-network/cauldron/internal/cerrors" +// "github.com/devchain-network/cauldron/internal/kafkacp/kafkaconsumergroup" +// "github.com/stretchr/testify/assert" +// "github.com/stretchr/testify/mock" +// ) +// +// type mockLogger struct{} +// +// func (h *mockLogger) Enabled(_ context.Context, _ slog.Level) bool { +// return true +// } +// +// func (h *mockLogger) Handle(_ context.Context, record slog.Record) error { +// return nil +// } +// +// func (h *mockLogger) WithAttrs(attrs []slog.Attr) slog.Handler { +// return h +// } +// +// func (h *mockLogger) WithGroup(name string) slog.Handler { +// return h +// } +// +// type mockStorage struct { +// mock.Mock +// } +// +// func (m *mockStorage) MessageStore(ctx context.Context, msg *sarama.ConsumerMessage) error { +// args := m.Called(ctx, msg) +// return args.Error(0) +// } +// +// func (m *mockStorage) Ping(ctx context.Context, maxRetries uint8, backoff time.Duration) error { +// args := m.Called(ctx, maxRetries, backoff) +// return args.Error(0) +// } +// +// func TestNew_MissingRequiredFields(t *testing.T) { +// consumer, err := kafkaconsumergroup.New() +// +// assert.ErrorIs(t, err, cerrors.ErrValueRequired) +// assert.Nil(t, consumer) +// } +// +// func TestNew_NilLogger(t *testing.T) { +// consumer, err := kafkaconsumergroup.New( +// kafkaconsumergroup.WithLogger(nil), +// ) +// +// assert.ErrorIs(t, err, cerrors.ErrValueRequired) +// assert.Nil(t, consumer) +// } +// +// func TestNew_NoStorage(t *testing.T) { +// logger := slog.New(new(mockLogger)) +// +// consumer, err := kafkaconsumergroup.New( +// kafkaconsumergroup.WithLogger(logger), +// ) +// +// assert.ErrorIs(t, err, cerrors.ErrValueRequired) +// assert.Nil(t, consumer) +// } +// +// func TestNew_NilStorage(t *testing.T) { +// logger := slog.New(new(mockLogger)) +// +// consumer, err := kafkaconsumergroup.New( +// kafkaconsumergroup.WithLogger(logger), +// kafkaconsumergroup.WithStorage(nil), +// ) +// +// assert.ErrorIs(t, err, cerrors.ErrValueRequired) +// assert.Nil(t, consumer) +// } +// +// func TestNew_NoGroupName(t *testing.T) { +// logger := slog.New(new(mockLogger)) +// storage := new(mockStorage) +// +// consumer, err := kafkaconsumergroup.New( +// kafkaconsumergroup.WithLogger(logger), +// kafkaconsumergroup.WithStorage(storage), +// ) +// +// assert.ErrorIs(t, err, cerrors.ErrValueRequired) +// assert.Nil(t, consumer) +// } +// +// func TestNew_EmptyGroupName(t *testing.T) { +// logger := slog.New(new(mockLogger)) +// storage := new(mockStorage) +// +// consumer, err := kafkaconsumergroup.New( +// kafkaconsumergroup.WithLogger(logger), +// kafkaconsumergroup.WithStorage(storage), +// kafkaconsumergroup.WithKafkaGroupName(""), +// ) +// +// assert.ErrorIs(t, err, cerrors.ErrValueRequired) +// assert.Nil(t, consumer) +// } +// +// func TestNew_EmptyTopic(t *testing.T) { +// logger := slog.New(new(mockLogger)) +// storage := new(mockStorage) +// +// consumer, err := kafkaconsumergroup.New( +// kafkaconsumergroup.WithLogger(logger), +// kafkaconsumergroup.WithStorage(storage), +// kafkaconsumergroup.WithKafkaGroupName("github-group"), +// ) +// +// assert.ErrorIs(t, err, cerrors.ErrInvalid) +// assert.Nil(t, consumer) +// } +// +// func TestNew_InvalidTopic(t *testing.T) { +// logger := slog.New(new(mockLogger)) +// storage := new(mockStorage) +// +// consumer, err := kafkaconsumergroup.New( +// kafkaconsumergroup.WithLogger(logger), +// kafkaconsumergroup.WithStorage(storage), +// kafkaconsumergroup.WithKafkaGroupName("github-group"), +// kafkaconsumergroup.WithTopic("invalid"), +// ) +// +// assert.ErrorIs(t, err, cerrors.ErrInvalid) +// assert.Nil(t, consumer) +// } +// +// func TestNew_InvalidBrokers(t *testing.T) { +// logger := slog.New(new(mockLogger)) +// storage := new(mockStorage) +// +// consumer, err := kafkaconsumergroup.New( +// kafkaconsumergroup.WithLogger(logger), +// kafkaconsumergroup.WithStorage(storage), +// kafkaconsumergroup.WithKafkaGroupName("github-group"), +// kafkaconsumergroup.WithTopic("github"), +// kafkaconsumergroup.WithKafkaBrokers("invalid"), +// ) +// +// assert.ErrorIs(t, err, cerrors.ErrInvalid) +// assert.Nil(t, consumer) +// } +// +// func TestNew_InvalidDialTimeout(t *testing.T) { +// logger := slog.New(new(mockLogger)) +// storage := new(mockStorage) +// +// consumer, err := kafkaconsumergroup.New( +// kafkaconsumergroup.WithLogger(logger), +// kafkaconsumergroup.WithStorage(storage), +// kafkaconsumergroup.WithKafkaGroupName("github-group"), +// kafkaconsumergroup.WithTopic("github"), +// kafkaconsumergroup.WithKafkaBrokers("127.0.0.1:9094"), +// kafkaconsumergroup.WithDialTimeout(-1*time.Second), +// ) +// +// assert.ErrorIs(t, err, cerrors.ErrInvalid) +// assert.Nil(t, consumer) +// } +// +// func TestNew_InvalidReadTimeout(t *testing.T) { +// logger := slog.New(new(mockLogger)) +// storage := new(mockStorage) +// +// consumer, err := kafkaconsumergroup.New( +// kafkaconsumergroup.WithLogger(logger), +// kafkaconsumergroup.WithStorage(storage), +// kafkaconsumergroup.WithKafkaGroupName("github-group"), +// kafkaconsumergroup.WithTopic("github"), +// kafkaconsumergroup.WithKafkaBrokers("127.0.0.1:9094"), +// kafkaconsumergroup.WithReadTimeout(-1*time.Second), +// ) +// +// assert.ErrorIs(t, err, cerrors.ErrInvalid) +// assert.Nil(t, consumer) +// } +// +// func TestNew_InvalidWriteTimeout(t *testing.T) { +// logger := slog.New(new(mockLogger)) +// storage := new(mockStorage) +// +// consumer, err := kafkaconsumergroup.New( +// kafkaconsumergroup.WithLogger(logger), +// kafkaconsumergroup.WithStorage(storage), +// kafkaconsumergroup.WithKafkaGroupName("github-group"), +// kafkaconsumergroup.WithTopic("github"), +// kafkaconsumergroup.WithKafkaBrokers("127.0.0.1:9094"), +// kafkaconsumergroup.WithWriteTimeout(-1*time.Second), +// ) +// +// assert.ErrorIs(t, err, cerrors.ErrInvalid) +// assert.Nil(t, consumer) +// } +// +// func TestNew_ZeroBackoff(t *testing.T) { +// logger := slog.New(new(mockLogger)) +// storage := new(mockStorage) +// +// consumer, err := kafkaconsumergroup.New( +// kafkaconsumergroup.WithLogger(logger), +// kafkaconsumergroup.WithStorage(storage), +// kafkaconsumergroup.WithKafkaGroupName("github-group"), +// kafkaconsumergroup.WithTopic("github"), +// kafkaconsumergroup.WithKafkaBrokers("127.0.0.1:9094"), +// kafkaconsumergroup.WithBackoff(0), +// ) +// +// assert.ErrorIs(t, err, cerrors.ErrValueRequired) +// assert.Nil(t, consumer) +// } +// +// func TestNew_InvalidBackoff(t *testing.T) { +// logger := slog.New(new(mockLogger)) +// storage := new(mockStorage) +// +// consumer, err := kafkaconsumergroup.New( +// kafkaconsumergroup.WithLogger(logger), +// kafkaconsumergroup.WithStorage(storage), +// kafkaconsumergroup.WithKafkaGroupName("github-group"), +// kafkaconsumergroup.WithTopic("github"), +// kafkaconsumergroup.WithKafkaBrokers("127.0.0.1:9094"), +// kafkaconsumergroup.WithBackoff(2*time.Minute), +// ) +// +// assert.ErrorIs(t, err, cerrors.ErrInvalid) +// assert.Nil(t, consumer) +// } +// +// func TestNew_InvalidMaxRetries(t *testing.T) { +// logger := slog.New(new(mockLogger)) +// storage := new(mockStorage) +// +// consumer, err := kafkaconsumergroup.New( +// kafkaconsumergroup.WithLogger(logger), +// kafkaconsumergroup.WithStorage(storage), +// kafkaconsumergroup.WithKafkaGroupName("github-group"), +// kafkaconsumergroup.WithTopic("github"), +// kafkaconsumergroup.WithKafkaBrokers("127.0.0.1:9094"), +// kafkaconsumergroup.WithMaxRetries(256), +// ) +// +// assert.ErrorIs(t, err, cerrors.ErrInvalid) +// assert.Nil(t, consumer) +// } +// +// func TestNew_InvalidKafkaVersion(t *testing.T) { +// logger := slog.New(new(mockLogger)) +// storage := new(mockStorage) +// +// consumer, err := kafkaconsumergroup.New( +// kafkaconsumergroup.WithLogger(logger), +// kafkaconsumergroup.WithStorage(storage), +// kafkaconsumergroup.WithKafkaGroupName("github-group"), +// kafkaconsumergroup.WithTopic("github"), +// kafkaconsumergroup.WithKafkaBrokers("127.0.0.1:9094"), +// kafkaconsumergroup.WithKafkaVersion("1111"), +// ) +// +// assert.ErrorIs(t, err, cerrors.ErrInvalid) +// assert.Nil(t, consumer) +// } +// +// func TestNew_NilSaramaConsumerGroupFactoryFunc(t *testing.T) { +// logger := slog.New(new(mockLogger)) +// storage := new(mockStorage) +// +// consumer, err := kafkaconsumergroup.New( +// kafkaconsumergroup.WithLogger(logger), +// kafkaconsumergroup.WithStorage(storage), +// kafkaconsumergroup.WithKafkaGroupName("github-group"), +// kafkaconsumergroup.WithTopic("github"), +// kafkaconsumergroup.WithKafkaBrokers("127.0.0.1:9094"), +// kafkaconsumergroup.WithKafkaVersion("3.9.0"), +// kafkaconsumergroup.WithDialTimeout(5*time.Second), +// kafkaconsumergroup.WithReadTimeout(5*time.Second), +// kafkaconsumergroup.WithWriteTimeout(5*time.Second), +// kafkaconsumergroup.WithBackoff(1*time.Second), +// kafkaconsumergroup.WithMaxRetries(2), +// kafkaconsumergroup.WithSaramaConsumerGroupFactoryFunc(nil), +// ) +// +// assert.ErrorIs(t, err, cerrors.ErrValueRequired) +// assert.Nil(t, consumer) +// } +// +// // type mockConsumerGroup struct { +// // mock.Mock +// // } +// // +// // func (m *mockConsumerGroup) NewConsumerGroup( +// // brokers []string, +// // groupName string, +// // config *sarama.Config, +// // ) (sarama.ConsumerGroup, error) { +// // args := m.Called(brokers, groupName, config) +// // return args.Get(0).(sarama.ConsumerGroup), args.Error(1) +// // } +// +// func TestNew_NilSaramaConsumerGroupFactoryFunc_Error(t *testing.T) { +// logger := slog.New(new(mockLogger)) +// storage := new(mockStorage) +// +// consumer, err := kafkaconsumergroup.New( +// kafkaconsumergroup.WithLogger(logger), +// kafkaconsumergroup.WithStorage(storage), +// kafkaconsumergroup.WithKafkaGroupName("github-group"), +// kafkaconsumergroup.WithTopic("github"), +// kafkaconsumergroup.WithBackoff(100*time.Millisecond), +// kafkaconsumergroup.WithMaxRetries(1), +// ) +// +// assert.Nil(t, consumer) +// assert.Error(t, err) +// } +// // type mockConsumerGroup struct { // mock.Mock // } // -// func (m *mockConsumerGroup) NewConsumerGroup( +// func (m *mockConsumerGroup) Consume(ctx context.Context, topics []string, handler sarama.ConsumerGroupHandler) error { +// args := m.Called(ctx, topics, handler) +// return args.Error(0) +// } +// +// func (m *mockConsumerGroup) Errors() <-chan error { +// args := m.Called() +// return args.Get(0).(<-chan error) +// } +// +// func (m *mockConsumerGroup) Close() error { +// args := m.Called() +// return args.Error(0) +// } +// +// func (m *mockConsumerGroup) Pause(partitions map[string][]int32) { +// m.Called(partitions) +// } +// +// func (m *mockConsumerGroup) Resume(partitions map[string][]int32) { +// m.Called(partitions) +// } +// +// func (m *mockConsumerGroup) PauseAll() { +// m.Called() +// } +// +// func (m *mockConsumerGroup) ResumeAll() { +// m.Called() +// } +// +// type mockConsumerGroupFactory struct { +// mock.Mock +// } +// +// func (m *mockConsumerGroupFactory) CreateConsumerGroup( // brokers []string, // groupName string, // config *sarama.Config, @@ -310,117 +375,52 @@ func TestNew_NilSaramaConsumerGroupFactoryFunc(t *testing.T) { // args := m.Called(brokers, groupName, config) // return args.Get(0).(sarama.ConsumerGroup), args.Error(1) // } - -func TestNew_NilSaramaConsumerGroupFactoryFunc_Error(t *testing.T) { - logger := slog.New(new(mockLogger)) - storage := new(mockStorage) - - consumer, err := kafkaconsumergroup.New( - kafkaconsumergroup.WithLogger(logger), - kafkaconsumergroup.WithStorage(storage), - kafkaconsumergroup.WithKafkaGroupName("github-group"), - kafkaconsumergroup.WithTopic("github"), - kafkaconsumergroup.WithBackoff(100*time.Millisecond), - kafkaconsumergroup.WithMaxRetries(1), - ) - - assert.Nil(t, consumer) - assert.Error(t, err) -} - -type mockConsumerGroup struct { - mock.Mock -} - -func (m *mockConsumerGroup) Consume(ctx context.Context, topics []string, handler sarama.ConsumerGroupHandler) error { - args := m.Called(ctx, topics, handler) - return args.Error(0) -} - -func (m *mockConsumerGroup) Errors() <-chan error { - args := m.Called() - return args.Get(0).(<-chan error) -} - -func (m *mockConsumerGroup) Close() error { - args := m.Called() - return args.Error(0) -} - -func (m *mockConsumerGroup) Pause(partitions map[string][]int32) { - m.Called(partitions) -} - -func (m *mockConsumerGroup) Resume(partitions map[string][]int32) { - m.Called(partitions) -} - -func (m *mockConsumerGroup) PauseAll() { - m.Called() -} - -func (m *mockConsumerGroup) ResumeAll() { - m.Called() -} - -type mockConsumerGroupFactory struct { - mock.Mock -} - -func (m *mockConsumerGroupFactory) CreateConsumerGroup( - brokers []string, - groupName string, - config *sarama.Config, -) (sarama.ConsumerGroup, error) { - args := m.Called(brokers, groupName, config) - return args.Get(0).(sarama.ConsumerGroup), args.Error(1) -} - -func TestNew_NilSaramaConsumerGroupFactoryFunc_Success(t *testing.T) { - logger := slog.New(new(mockLogger)) - storage := new(mockStorage) - - consumerGroup := &mockConsumerGroup{} - consumerGroupFactory := &mockConsumerGroupFactory{} - consumerGroupFactory.On( - "CreateConsumerGroup", - mock.Anything, - mock.Anything, - mock.Anything, - ).Return(consumerGroup, nil).Once() - - consumer, err := kafkaconsumergroup.New( - kafkaconsumergroup.WithLogger(logger), - kafkaconsumergroup.WithStorage(storage), - kafkaconsumergroup.WithKafkaGroupName("github-group"), - kafkaconsumergroup.WithTopic("github"), - kafkaconsumergroup.WithBackoff(100*time.Millisecond), - kafkaconsumergroup.WithMaxRetries(1), - kafkaconsumergroup.WithSaramaConsumerGroupFactoryFunc(consumerGroupFactory.CreateConsumerGroup), - ) - - assert.NotNil(t, consumer) - assert.NoError(t, err) - consumerGroupFactory.AssertNumberOfCalls(t, "CreateConsumerGroup", 1) - consumerGroupFactory.AssertExpectations(t) -} - -// func TestNew(t *testing.T) { -// mockConsumerGroup.AssertNumberOfCalls(t, "NewConsumerGroup", 1) -// mockConsumerGroup.AssertExpectations(t) - -// // kversion, err := sarama.ParseKafkaVersion("V2_0_0_0") -// // fmt.Println("err", err) -// // fmt.Println("kversion", kversion) -// fmt.Println(sarama.V3_9_0_0) -// cg, err := kafkaconsumergroup.New() -// assert.Nil(t, err) -// assert.NotNil(t, cg) -// } - -// func TestNew_Version(t *testing.T) { -// kversion, err := sarama.ParseKafkaVersion("3.9.0") -// fmt.Println("kversion", kversion) -// fmt.Printf("%T\n", kversion) -// fmt.Println("err", err) +// +// func TestNew_NilSaramaConsumerGroupFactoryFunc_Success(t *testing.T) { +// logger := slog.New(new(mockLogger)) +// storage := new(mockStorage) +// +// consumerGroup := &mockConsumerGroup{} +// consumerGroupFactory := &mockConsumerGroupFactory{} +// consumerGroupFactory.On( +// "CreateConsumerGroup", +// mock.Anything, +// mock.Anything, +// mock.Anything, +// ).Return(consumerGroup, nil).Once() +// +// consumer, err := kafkaconsumergroup.New( +// kafkaconsumergroup.WithLogger(logger), +// kafkaconsumergroup.WithStorage(storage), +// kafkaconsumergroup.WithKafkaGroupName("github-group"), +// kafkaconsumergroup.WithTopic("github"), +// kafkaconsumergroup.WithBackoff(100*time.Millisecond), +// kafkaconsumergroup.WithMaxRetries(1), +// kafkaconsumergroup.WithSaramaConsumerGroupFactoryFunc(consumerGroupFactory.CreateConsumerGroup), +// ) +// +// assert.NotNil(t, consumer) +// assert.NoError(t, err) +// consumerGroupFactory.AssertNumberOfCalls(t, "CreateConsumerGroup", 1) +// consumerGroupFactory.AssertExpectations(t) // } +// +// // func TestNew(t *testing.T) { +// // mockConsumerGroup.AssertNumberOfCalls(t, "NewConsumerGroup", 1) +// // mockConsumerGroup.AssertExpectations(t) +// +// // // kversion, err := sarama.ParseKafkaVersion("V2_0_0_0") +// // // fmt.Println("err", err) +// // // fmt.Println("kversion", kversion) +// // fmt.Println(sarama.V3_9_0_0) +// // cg, err := kafkaconsumergroup.New() +// // assert.Nil(t, err) +// // assert.NotNil(t, cg) +// // } +// +// // func TestNew_Version(t *testing.T) { +// // kversion, err := sarama.ParseKafkaVersion("3.9.0") +// // fmt.Println("kversion", kversion) +// // fmt.Printf("%T\n", kversion) +// // fmt.Println("err", err) +// // } From a248c3512f405dc73a85dc3eb0eec02ae73e4bd5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?U=C4=9Fur=20=C3=96zy=C4=B1lmazel?= Date: Sat, 25 Jan 2025 23:46:26 +0300 Subject: [PATCH 04/13] wip - process message done, working on tests --- cmd/githubconsumergroup/main.go | 13 +- .../kafkaconsumergroup/kafkaconsumergroup.go | 139 ++- .../kafkaconsumergroup_test.go | 874 +++++++++--------- 3 files changed, 562 insertions(+), 464 deletions(-) diff --git a/cmd/githubconsumergroup/main.go b/cmd/githubconsumergroup/main.go index f2449ba..e4ac863 100644 --- a/cmd/githubconsumergroup/main.go +++ b/cmd/githubconsumergroup/main.go @@ -5,6 +5,7 @@ import ( "fmt" "log" + "github.com/IBM/sarama" "github.com/devchain-network/cauldron/internal/kafkacp" "github.com/devchain-network/cauldron/internal/kafkacp/kafkaconsumergroup" "github.com/devchain-network/cauldron/internal/slogger" @@ -13,6 +14,16 @@ import ( "github.com/vigo/getenv" ) +func storeMessage(strg storage.PingStorer) kafkaconsumergroup.ProcessMessageFunc { + return func(ctx context.Context, msg *sarama.ConsumerMessage) error { + if err := strg.MessageStore(ctx, msg); err != nil { + return fmt.Errorf("message store error: [%w]", err) + } + + return nil + } +} + // Run runs kafa github consumer group. func Run() error { logLevel := getenv.String("LOG_LEVEL", slogger.DefaultLogLevel) @@ -59,7 +70,7 @@ func Run() error { kafkaGitHubConsumer, err := kafkaconsumergroup.New( kafkaconsumergroup.WithLogger(logger), - kafkaconsumergroup.WithStorage(db), + kafkaconsumergroup.WithProcessMessageFunc(storeMessage(db)), kafkaconsumergroup.WithKafkaBrokers(*brokersList), kafkaconsumergroup.WithDialTimeout(*kafkaDialTimeout), kafkaconsumergroup.WithReadTimeout(*kafkaReadTimeout), diff --git a/internal/kafkacp/kafkaconsumergroup/kafkaconsumergroup.go b/internal/kafkacp/kafkaconsumergroup/kafkaconsumergroup.go index 50f9ef3..84578d8 100644 --- a/internal/kafkacp/kafkaconsumergroup/kafkaconsumergroup.go +++ b/internal/kafkacp/kafkaconsumergroup/kafkaconsumergroup.go @@ -14,7 +14,6 @@ import ( "github.com/IBM/sarama" "github.com/devchain-network/cauldron/internal/cerrors" "github.com/devchain-network/cauldron/internal/kafkacp" - "github.com/devchain-network/cauldron/internal/storage" ) // defaults. @@ -32,8 +31,8 @@ var _ sarama.ConsumerGroupHandler = (*Consumer)(nil) // compile time proof type Consumer struct { KafkaGroupName string Logger *slog.Logger - Storage storage.PingStorer SaramaConsumerGroupFactoryFunc SaramaConsumerGroupFactoryFunc + ProcessMessageFunc ProcessMessageFunc MessageQueue chan *sarama.ConsumerMessage SaramaConsumerGroup sarama.ConsumerGroup Topic kafkacp.KafkaTopicIdentifier @@ -51,21 +50,36 @@ type Consumer struct { // SaramaConsumerGroupFactoryFunc is a factory function. type SaramaConsumerGroupFactoryFunc func([]string, string, *sarama.Config) (sarama.ConsumerGroup, error) +// ProcessMessageFunc is a factory function for callers. +type ProcessMessageFunc func(ctx context.Context, msg *sarama.ConsumerMessage) error + func (c *Consumer) checkRequired() error { if c.Logger == nil { - return fmt.Errorf("kafka consumer group check required, Logger error: [%w]", cerrors.ErrValueRequired) + return fmt.Errorf( + "[kafkaconsumergroup.checkRequired] Logger error: [%w, 'nil' received]", + cerrors.ErrValueRequired, + ) } - if c.Storage == nil { - return fmt.Errorf("kafka consumer group check required, Storage error: [%w]", cerrors.ErrValueRequired) + if c.ProcessMessageFunc == nil { + return fmt.Errorf( + "[kafkaconsumergroup.checkRequired] ProcessMessageFunc error: [%w, 'nil' received]", + cerrors.ErrValueRequired, + ) } if c.KafkaGroupName == "" { - return fmt.Errorf("kafka consumer group check required, KafkaGroupName error: [%w]", cerrors.ErrValueRequired) + return fmt.Errorf( + "[kafkaconsumergroup.checkRequired] KafkaGroupName error: [%w, empty string received]", + cerrors.ErrValueRequired, + ) } if !c.Topic.Valid() { - return fmt.Errorf("kafka consumer group check required, Topic error: [%w]", cerrors.ErrInvalid) + return fmt.Errorf( + "[kafkaconsumergroup.checkRequired] Topic error: [%w, false received]", + cerrors.ErrInvalid, + ) } return nil @@ -135,14 +149,19 @@ func (c Consumer) StartConsume() error { return } + if err := c.ProcessMessageFunc(ctx, msg); err != nil { + c.Logger.Error("kafka consumer group process message", "error", err, "worker", i) + + continue + } + c.Logger.Info( - "store message here", + "message is stored to database", "worker", i, "topic", msg.Topic, "partition", msg.Partition, "offset", msg.Offset, "key", string(msg.Key), - // "value", string(msg.Value), ) case <-ctx.Done(): @@ -182,7 +201,10 @@ func (c Consumer) StartConsume() error { wg.Wait() if err := c.SaramaConsumerGroup.Close(); err != nil { - return fmt.Errorf("failed to close consumer group, error: [%w]", err) + return fmt.Errorf( + "[kafkaconsumergroup.StartConsume][SaramaConsumerGroup.Close] error: [%w]", + err, + ) } c.Logger.Info("all workers are stopped") @@ -197,7 +219,10 @@ type Option func(*Consumer) error func WithLogger(l *slog.Logger) Option { return func(c *Consumer) error { if l == nil { - return fmt.Errorf("kafka consumer group WithLogger error: [%w]", cerrors.ErrValueRequired) + return fmt.Errorf( + "[kafkaconsumergroup.WithLogger] error: [%w, 'nil' received]", + cerrors.ErrValueRequired, + ) } c.Logger = l @@ -205,24 +230,15 @@ func WithLogger(l *slog.Logger) Option { } } -// WithStorage sets storage value. -func WithStorage(st storage.PingStorer) Option { - return func(c *Consumer) error { - if st == nil { - return fmt.Errorf("kafka consumer group WithStorage error: [%w]", cerrors.ErrValueRequired) - } - c.Storage = st - - return nil - } -} - // WithTopic sets topic name to consume. func WithTopic(s string) Option { return func(c *Consumer) error { kt := kafkacp.KafkaTopicIdentifier(s) if !kt.Valid() { - return fmt.Errorf("kafka consumer group WithTopic error: [%w]", cerrors.ErrInvalid) + return fmt.Errorf( + "[kafkaconsumergroup.WithTopic] error: [%w, '%s' received]", + cerrors.ErrInvalid, s, + ) } c.Topic = kt @@ -235,7 +251,10 @@ func WithKafkaVersion(s string) Option { return func(c *Consumer) error { version, err := sarama.ParseKafkaVersion(s) if err != nil { - return fmt.Errorf("kafka consumer group WithKafkaVersion error: [%w][%w]", err, cerrors.ErrInvalid) + return fmt.Errorf( + "[kafkaconsumergroup.WithKafkaVersion] error: [(%w) %w, '%s' received]", + err, cerrors.ErrInvalid, s, + ) } c.KafkaVersion = version @@ -250,7 +269,10 @@ func WithKafkaBrokers(brokers string) Option { var kafkaBrokers kafkacp.KafkaBrokers kafkaBrokers.AddFromString(brokers) if !kafkaBrokers.Valid() { - return fmt.Errorf("kafka consumer group WithKafkaBrokers error: [%w]", cerrors.ErrInvalid) + return fmt.Errorf( + "[kafkaconsumergroup.WithKafkaBrokers] error: [%w, '%s' received]", + cerrors.ErrInvalid, brokers, + ) } c.KafkaBrokers = kafkaBrokers @@ -263,7 +285,10 @@ func WithKafkaBrokers(brokers string) Option { func WithDialTimeout(d time.Duration) Option { return func(c *Consumer) error { if d < 0 { - return fmt.Errorf("kafka consumer WithDialTimeout error: [%w]", cerrors.ErrInvalid) + return fmt.Errorf( + "[kafkaconsumergroup.WithDialTimeout] error: [%w, '%s' received, must > 0]", + cerrors.ErrInvalid, d, + ) } c.DialTimeout = d @@ -275,7 +300,10 @@ func WithDialTimeout(d time.Duration) Option { func WithReadTimeout(d time.Duration) Option { return func(c *Consumer) error { if d < 0 { - return fmt.Errorf("kafka consumer group WithReadTimeout error: [%w]", cerrors.ErrInvalid) + return fmt.Errorf( + "[kafkaconsumergroup.WithReadTimeout] error: [%w, '%s' received, must > 0]", + cerrors.ErrInvalid, d, + ) } c.ReadTimeout = d @@ -287,7 +315,10 @@ func WithReadTimeout(d time.Duration) Option { func WithWriteTimeout(d time.Duration) Option { return func(c *Consumer) error { if d < 0 { - return fmt.Errorf("kafka consumer group WithWriteTimeout error: [%w]", cerrors.ErrInvalid) + return fmt.Errorf( + "[kafkaconsumergroup.WithWriteTimeout] error: [%w, '%s' received, must > 0]", + cerrors.ErrInvalid, d, + ) } c.WriteTimeout = d @@ -299,11 +330,17 @@ func WithWriteTimeout(d time.Duration) Option { func WithBackoff(d time.Duration) Option { return func(c *Consumer) error { if d == 0 { - return fmt.Errorf("kafka consumer group WithBackoff error: [%w]", cerrors.ErrValueRequired) + return fmt.Errorf( + "[kafkaconsumergroup.WithBackoff] error: [%w, '%s' received, 0 is not allowed]", + cerrors.ErrValueRequired, d, + ) } if d < 0 || d > time.Minute { - return fmt.Errorf("kafka consumer group WithBackoff error: [%w]", cerrors.ErrInvalid) + return fmt.Errorf( + "[kafkaconsumergroup.WithBackoff] error: [%w, '%s' received, must > 0 or < minute]", + cerrors.ErrInvalid, d, + ) } c.Backoff = d @@ -316,7 +353,10 @@ func WithBackoff(d time.Duration) Option { func WithMaxRetries(i int) Option { return func(c *Consumer) error { if i > math.MaxUint8 || i < 0 { - return fmt.Errorf("kafka consumer group WithMaxRetries error: [%w]", cerrors.ErrInvalid) + return fmt.Errorf( + "[kafkaconsumergroup.WithMaxRetries] error: [%w, '%[2]d' received, must < %[2]d or > 0]", + cerrors.ErrInvalid, i, + ) } c.MaxRetries = uint8(i) @@ -328,7 +368,10 @@ func WithMaxRetries(i int) Option { func WithKafkaGroupName(s string) Option { return func(c *Consumer) error { if s == "" { - return fmt.Errorf("kafka consumer group WithKafkaGroupName error: [%w]", cerrors.ErrValueRequired) + return fmt.Errorf( + "[kafkaconsumergroup.WithKafkaGroupName] error: [%w, empty string received]", + cerrors.ErrValueRequired, + ) } c.KafkaGroupName = s @@ -337,16 +380,31 @@ func WithKafkaGroupName(s string) Option { } // WithSaramaConsumerGroupFactoryFunc sets a custom factory function for Sarama consumer group. -func WithSaramaConsumerGroupFactoryFunc(factory SaramaConsumerGroupFactoryFunc) Option { +func WithSaramaConsumerGroupFactoryFunc(fn SaramaConsumerGroupFactoryFunc) Option { return func(c *Consumer) error { - if factory == nil { + if fn == nil { return fmt.Errorf( - "kafka consumer group WithSaramaConsumerGroupFactoryFunc error: [%w]", + "[kafkaconsumergroup.WithSaramaConsumerGroupFactoryFunc] error: [%w, 'nil' received]", cerrors.ErrValueRequired, ) } - c.SaramaConsumerGroupFactoryFunc = factory + c.SaramaConsumerGroupFactoryFunc = fn + + return nil + } +} + +// WithProcessMessageFunc sets the message processor. +func WithProcessMessageFunc(fn ProcessMessageFunc) Option { + return func(c *Consumer) error { + if fn == nil { + return fmt.Errorf( + "[kafkaconsumergroup.WithProcessMessageFunc] error: [%w, 'nil' received]", + cerrors.ErrValueRequired, + ) + } + c.ProcessMessageFunc = fn return nil } @@ -373,7 +431,7 @@ func New(options ...Option) (*Consumer, error) { for _, option := range options { if err := option(consumer); err != nil { - return nil, fmt.Errorf("kafka consumer group option error: [%w]", err) + return nil, err } } @@ -414,7 +472,10 @@ func New(options ...Option) (*Consumer, error) { backoff *= 2 } if saramaConsumerGroupErr != nil { - return nil, fmt.Errorf("kafka consumer group, group error: [%w]", saramaConsumerGroupErr) + return nil, fmt.Errorf( + "[kafkaconsumergroup.New][SaramaConsumerGroupFactoryFunc] error: [%w]", + saramaConsumerGroupErr, + ) } consumer.Logger.Info("successfully connected to", "broker", consumer.KafkaBrokers) diff --git a/internal/kafkacp/kafkaconsumergroup/kafkaconsumergroup_test.go b/internal/kafkacp/kafkaconsumergroup/kafkaconsumergroup_test.go index 26135e7..c1831a8 100644 --- a/internal/kafkacp/kafkaconsumergroup/kafkaconsumergroup_test.go +++ b/internal/kafkacp/kafkaconsumergroup/kafkaconsumergroup_test.go @@ -1,426 +1,452 @@ package kafkaconsumergroup_test -// import ( -// "context" -// "log/slog" -// "testing" -// "time" -// -// "github.com/IBM/sarama" -// "github.com/devchain-network/cauldron/internal/cerrors" -// "github.com/devchain-network/cauldron/internal/kafkacp/kafkaconsumergroup" -// "github.com/stretchr/testify/assert" -// "github.com/stretchr/testify/mock" -// ) -// -// type mockLogger struct{} -// -// func (h *mockLogger) Enabled(_ context.Context, _ slog.Level) bool { -// return true -// } -// -// func (h *mockLogger) Handle(_ context.Context, record slog.Record) error { -// return nil -// } -// -// func (h *mockLogger) WithAttrs(attrs []slog.Attr) slog.Handler { -// return h -// } -// -// func (h *mockLogger) WithGroup(name string) slog.Handler { -// return h -// } -// -// type mockStorage struct { -// mock.Mock -// } -// -// func (m *mockStorage) MessageStore(ctx context.Context, msg *sarama.ConsumerMessage) error { -// args := m.Called(ctx, msg) -// return args.Error(0) -// } -// -// func (m *mockStorage) Ping(ctx context.Context, maxRetries uint8, backoff time.Duration) error { -// args := m.Called(ctx, maxRetries, backoff) -// return args.Error(0) -// } -// -// func TestNew_MissingRequiredFields(t *testing.T) { -// consumer, err := kafkaconsumergroup.New() -// -// assert.ErrorIs(t, err, cerrors.ErrValueRequired) -// assert.Nil(t, consumer) -// } -// -// func TestNew_NilLogger(t *testing.T) { -// consumer, err := kafkaconsumergroup.New( -// kafkaconsumergroup.WithLogger(nil), -// ) -// -// assert.ErrorIs(t, err, cerrors.ErrValueRequired) -// assert.Nil(t, consumer) -// } -// -// func TestNew_NoStorage(t *testing.T) { -// logger := slog.New(new(mockLogger)) -// -// consumer, err := kafkaconsumergroup.New( -// kafkaconsumergroup.WithLogger(logger), -// ) -// -// assert.ErrorIs(t, err, cerrors.ErrValueRequired) -// assert.Nil(t, consumer) -// } -// -// func TestNew_NilStorage(t *testing.T) { -// logger := slog.New(new(mockLogger)) -// -// consumer, err := kafkaconsumergroup.New( -// kafkaconsumergroup.WithLogger(logger), -// kafkaconsumergroup.WithStorage(nil), -// ) -// -// assert.ErrorIs(t, err, cerrors.ErrValueRequired) -// assert.Nil(t, consumer) -// } -// -// func TestNew_NoGroupName(t *testing.T) { -// logger := slog.New(new(mockLogger)) -// storage := new(mockStorage) -// -// consumer, err := kafkaconsumergroup.New( -// kafkaconsumergroup.WithLogger(logger), -// kafkaconsumergroup.WithStorage(storage), -// ) -// -// assert.ErrorIs(t, err, cerrors.ErrValueRequired) -// assert.Nil(t, consumer) -// } -// -// func TestNew_EmptyGroupName(t *testing.T) { -// logger := slog.New(new(mockLogger)) -// storage := new(mockStorage) -// -// consumer, err := kafkaconsumergroup.New( -// kafkaconsumergroup.WithLogger(logger), -// kafkaconsumergroup.WithStorage(storage), -// kafkaconsumergroup.WithKafkaGroupName(""), -// ) -// -// assert.ErrorIs(t, err, cerrors.ErrValueRequired) -// assert.Nil(t, consumer) -// } -// -// func TestNew_EmptyTopic(t *testing.T) { -// logger := slog.New(new(mockLogger)) -// storage := new(mockStorage) -// -// consumer, err := kafkaconsumergroup.New( -// kafkaconsumergroup.WithLogger(logger), -// kafkaconsumergroup.WithStorage(storage), -// kafkaconsumergroup.WithKafkaGroupName("github-group"), -// ) -// -// assert.ErrorIs(t, err, cerrors.ErrInvalid) -// assert.Nil(t, consumer) -// } -// -// func TestNew_InvalidTopic(t *testing.T) { -// logger := slog.New(new(mockLogger)) -// storage := new(mockStorage) -// -// consumer, err := kafkaconsumergroup.New( -// kafkaconsumergroup.WithLogger(logger), -// kafkaconsumergroup.WithStorage(storage), -// kafkaconsumergroup.WithKafkaGroupName("github-group"), -// kafkaconsumergroup.WithTopic("invalid"), -// ) -// -// assert.ErrorIs(t, err, cerrors.ErrInvalid) -// assert.Nil(t, consumer) -// } -// -// func TestNew_InvalidBrokers(t *testing.T) { -// logger := slog.New(new(mockLogger)) -// storage := new(mockStorage) -// -// consumer, err := kafkaconsumergroup.New( -// kafkaconsumergroup.WithLogger(logger), -// kafkaconsumergroup.WithStorage(storage), -// kafkaconsumergroup.WithKafkaGroupName("github-group"), -// kafkaconsumergroup.WithTopic("github"), -// kafkaconsumergroup.WithKafkaBrokers("invalid"), -// ) -// -// assert.ErrorIs(t, err, cerrors.ErrInvalid) -// assert.Nil(t, consumer) -// } -// -// func TestNew_InvalidDialTimeout(t *testing.T) { -// logger := slog.New(new(mockLogger)) -// storage := new(mockStorage) -// -// consumer, err := kafkaconsumergroup.New( -// kafkaconsumergroup.WithLogger(logger), -// kafkaconsumergroup.WithStorage(storage), -// kafkaconsumergroup.WithKafkaGroupName("github-group"), -// kafkaconsumergroup.WithTopic("github"), -// kafkaconsumergroup.WithKafkaBrokers("127.0.0.1:9094"), -// kafkaconsumergroup.WithDialTimeout(-1*time.Second), -// ) -// -// assert.ErrorIs(t, err, cerrors.ErrInvalid) -// assert.Nil(t, consumer) -// } -// -// func TestNew_InvalidReadTimeout(t *testing.T) { -// logger := slog.New(new(mockLogger)) -// storage := new(mockStorage) -// -// consumer, err := kafkaconsumergroup.New( -// kafkaconsumergroup.WithLogger(logger), -// kafkaconsumergroup.WithStorage(storage), -// kafkaconsumergroup.WithKafkaGroupName("github-group"), -// kafkaconsumergroup.WithTopic("github"), -// kafkaconsumergroup.WithKafkaBrokers("127.0.0.1:9094"), -// kafkaconsumergroup.WithReadTimeout(-1*time.Second), -// ) -// -// assert.ErrorIs(t, err, cerrors.ErrInvalid) -// assert.Nil(t, consumer) -// } -// -// func TestNew_InvalidWriteTimeout(t *testing.T) { -// logger := slog.New(new(mockLogger)) -// storage := new(mockStorage) -// -// consumer, err := kafkaconsumergroup.New( -// kafkaconsumergroup.WithLogger(logger), -// kafkaconsumergroup.WithStorage(storage), -// kafkaconsumergroup.WithKafkaGroupName("github-group"), -// kafkaconsumergroup.WithTopic("github"), -// kafkaconsumergroup.WithKafkaBrokers("127.0.0.1:9094"), -// kafkaconsumergroup.WithWriteTimeout(-1*time.Second), -// ) -// -// assert.ErrorIs(t, err, cerrors.ErrInvalid) -// assert.Nil(t, consumer) -// } -// -// func TestNew_ZeroBackoff(t *testing.T) { -// logger := slog.New(new(mockLogger)) -// storage := new(mockStorage) -// -// consumer, err := kafkaconsumergroup.New( -// kafkaconsumergroup.WithLogger(logger), -// kafkaconsumergroup.WithStorage(storage), -// kafkaconsumergroup.WithKafkaGroupName("github-group"), -// kafkaconsumergroup.WithTopic("github"), -// kafkaconsumergroup.WithKafkaBrokers("127.0.0.1:9094"), -// kafkaconsumergroup.WithBackoff(0), -// ) -// -// assert.ErrorIs(t, err, cerrors.ErrValueRequired) -// assert.Nil(t, consumer) -// } -// -// func TestNew_InvalidBackoff(t *testing.T) { -// logger := slog.New(new(mockLogger)) -// storage := new(mockStorage) -// -// consumer, err := kafkaconsumergroup.New( -// kafkaconsumergroup.WithLogger(logger), -// kafkaconsumergroup.WithStorage(storage), -// kafkaconsumergroup.WithKafkaGroupName("github-group"), -// kafkaconsumergroup.WithTopic("github"), -// kafkaconsumergroup.WithKafkaBrokers("127.0.0.1:9094"), -// kafkaconsumergroup.WithBackoff(2*time.Minute), -// ) -// -// assert.ErrorIs(t, err, cerrors.ErrInvalid) -// assert.Nil(t, consumer) -// } -// -// func TestNew_InvalidMaxRetries(t *testing.T) { -// logger := slog.New(new(mockLogger)) -// storage := new(mockStorage) -// -// consumer, err := kafkaconsumergroup.New( -// kafkaconsumergroup.WithLogger(logger), -// kafkaconsumergroup.WithStorage(storage), -// kafkaconsumergroup.WithKafkaGroupName("github-group"), -// kafkaconsumergroup.WithTopic("github"), -// kafkaconsumergroup.WithKafkaBrokers("127.0.0.1:9094"), -// kafkaconsumergroup.WithMaxRetries(256), -// ) -// -// assert.ErrorIs(t, err, cerrors.ErrInvalid) -// assert.Nil(t, consumer) -// } -// -// func TestNew_InvalidKafkaVersion(t *testing.T) { -// logger := slog.New(new(mockLogger)) -// storage := new(mockStorage) -// -// consumer, err := kafkaconsumergroup.New( -// kafkaconsumergroup.WithLogger(logger), -// kafkaconsumergroup.WithStorage(storage), -// kafkaconsumergroup.WithKafkaGroupName("github-group"), -// kafkaconsumergroup.WithTopic("github"), -// kafkaconsumergroup.WithKafkaBrokers("127.0.0.1:9094"), -// kafkaconsumergroup.WithKafkaVersion("1111"), -// ) -// -// assert.ErrorIs(t, err, cerrors.ErrInvalid) -// assert.Nil(t, consumer) -// } -// -// func TestNew_NilSaramaConsumerGroupFactoryFunc(t *testing.T) { -// logger := slog.New(new(mockLogger)) -// storage := new(mockStorage) -// -// consumer, err := kafkaconsumergroup.New( -// kafkaconsumergroup.WithLogger(logger), -// kafkaconsumergroup.WithStorage(storage), -// kafkaconsumergroup.WithKafkaGroupName("github-group"), -// kafkaconsumergroup.WithTopic("github"), -// kafkaconsumergroup.WithKafkaBrokers("127.0.0.1:9094"), -// kafkaconsumergroup.WithKafkaVersion("3.9.0"), -// kafkaconsumergroup.WithDialTimeout(5*time.Second), -// kafkaconsumergroup.WithReadTimeout(5*time.Second), -// kafkaconsumergroup.WithWriteTimeout(5*time.Second), -// kafkaconsumergroup.WithBackoff(1*time.Second), -// kafkaconsumergroup.WithMaxRetries(2), -// kafkaconsumergroup.WithSaramaConsumerGroupFactoryFunc(nil), -// ) -// -// assert.ErrorIs(t, err, cerrors.ErrValueRequired) -// assert.Nil(t, consumer) -// } -// -// // type mockConsumerGroup struct { -// // mock.Mock -// // } -// // -// // func (m *mockConsumerGroup) NewConsumerGroup( -// // brokers []string, -// // groupName string, -// // config *sarama.Config, -// // ) (sarama.ConsumerGroup, error) { -// // args := m.Called(brokers, groupName, config) -// // return args.Get(0).(sarama.ConsumerGroup), args.Error(1) -// // } -// -// func TestNew_NilSaramaConsumerGroupFactoryFunc_Error(t *testing.T) { -// logger := slog.New(new(mockLogger)) -// storage := new(mockStorage) -// -// consumer, err := kafkaconsumergroup.New( -// kafkaconsumergroup.WithLogger(logger), -// kafkaconsumergroup.WithStorage(storage), -// kafkaconsumergroup.WithKafkaGroupName("github-group"), -// kafkaconsumergroup.WithTopic("github"), -// kafkaconsumergroup.WithBackoff(100*time.Millisecond), -// kafkaconsumergroup.WithMaxRetries(1), -// ) -// -// assert.Nil(t, consumer) -// assert.Error(t, err) -// } -// -// type mockConsumerGroup struct { -// mock.Mock -// } -// -// func (m *mockConsumerGroup) Consume(ctx context.Context, topics []string, handler sarama.ConsumerGroupHandler) error { -// args := m.Called(ctx, topics, handler) -// return args.Error(0) -// } -// -// func (m *mockConsumerGroup) Errors() <-chan error { -// args := m.Called() -// return args.Get(0).(<-chan error) -// } -// -// func (m *mockConsumerGroup) Close() error { -// args := m.Called() -// return args.Error(0) -// } -// -// func (m *mockConsumerGroup) Pause(partitions map[string][]int32) { -// m.Called(partitions) -// } -// -// func (m *mockConsumerGroup) Resume(partitions map[string][]int32) { -// m.Called(partitions) -// } -// -// func (m *mockConsumerGroup) PauseAll() { -// m.Called() -// } -// -// func (m *mockConsumerGroup) ResumeAll() { -// m.Called() -// } -// -// type mockConsumerGroupFactory struct { -// mock.Mock -// } -// -// func (m *mockConsumerGroupFactory) CreateConsumerGroup( -// brokers []string, -// groupName string, -// config *sarama.Config, -// ) (sarama.ConsumerGroup, error) { -// args := m.Called(brokers, groupName, config) -// return args.Get(0).(sarama.ConsumerGroup), args.Error(1) -// } -// -// func TestNew_NilSaramaConsumerGroupFactoryFunc_Success(t *testing.T) { -// logger := slog.New(new(mockLogger)) -// storage := new(mockStorage) -// -// consumerGroup := &mockConsumerGroup{} -// consumerGroupFactory := &mockConsumerGroupFactory{} -// consumerGroupFactory.On( -// "CreateConsumerGroup", -// mock.Anything, -// mock.Anything, -// mock.Anything, -// ).Return(consumerGroup, nil).Once() -// -// consumer, err := kafkaconsumergroup.New( -// kafkaconsumergroup.WithLogger(logger), -// kafkaconsumergroup.WithStorage(storage), -// kafkaconsumergroup.WithKafkaGroupName("github-group"), -// kafkaconsumergroup.WithTopic("github"), -// kafkaconsumergroup.WithBackoff(100*time.Millisecond), -// kafkaconsumergroup.WithMaxRetries(1), -// kafkaconsumergroup.WithSaramaConsumerGroupFactoryFunc(consumerGroupFactory.CreateConsumerGroup), -// ) -// -// assert.NotNil(t, consumer) -// assert.NoError(t, err) -// consumerGroupFactory.AssertNumberOfCalls(t, "CreateConsumerGroup", 1) -// consumerGroupFactory.AssertExpectations(t) -// } -// -// // func TestNew(t *testing.T) { -// // mockConsumerGroup.AssertNumberOfCalls(t, "NewConsumerGroup", 1) -// // mockConsumerGroup.AssertExpectations(t) -// -// // // kversion, err := sarama.ParseKafkaVersion("V2_0_0_0") -// // // fmt.Println("err", err) -// // // fmt.Println("kversion", kversion) -// // fmt.Println(sarama.V3_9_0_0) -// // cg, err := kafkaconsumergroup.New() -// // assert.Nil(t, err) -// // assert.NotNil(t, cg) -// // } -// -// // func TestNew_Version(t *testing.T) { -// // kversion, err := sarama.ParseKafkaVersion("3.9.0") -// // fmt.Println("kversion", kversion) -// // fmt.Printf("%T\n", kversion) -// // fmt.Println("err", err) -// // } +import ( + "context" + "errors" + "log/slog" + "testing" + "time" + + "github.com/IBM/sarama" + "github.com/devchain-network/cauldron/internal/cerrors" + "github.com/devchain-network/cauldron/internal/kafkacp" + "github.com/devchain-network/cauldron/internal/kafkacp/kafkaconsumergroup" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/mock" +) + +type mockLogger struct{} + +func (h *mockLogger) Enabled(_ context.Context, _ slog.Level) bool { + return true +} + +func (h *mockLogger) Handle(_ context.Context, record slog.Record) error { + return nil +} + +func (h *mockLogger) WithAttrs(attrs []slog.Attr) slog.Handler { + return h +} + +func (h *mockLogger) WithGroup(name string) slog.Handler { + return h +} + +type mockStorage struct { + mock.Mock +} + +func (m *mockStorage) MessageStore(ctx context.Context, msg *sarama.ConsumerMessage) error { + args := m.Called(ctx, msg) + return args.Error(0) +} + +func (m *mockStorage) Ping(ctx context.Context, maxRetries uint8, backoff time.Duration) error { + args := m.Called(ctx, maxRetries, backoff) + return args.Error(0) +} + +type mockConsumerGroup struct { + mock.Mock +} + +func (m *mockConsumerGroup) Consume(ctx context.Context, topics []string, handler sarama.ConsumerGroupHandler) error { + args := m.Called(ctx, topics, handler) + return args.Error(0) +} + +func (m *mockConsumerGroup) Errors() <-chan error { + args := m.Called() + return args.Get(0).(<-chan error) +} + +func (m *mockConsumerGroup) Close() error { + args := m.Called() + return args.Error(0) +} + +func (m *mockConsumerGroup) Pause(partitions map[string][]int32) { + m.Called(partitions) +} + +func (m *mockConsumerGroup) Resume(partitions map[string][]int32) { + m.Called(partitions) +} + +func (m *mockConsumerGroup) PauseAll() { + m.Called() +} + +func (m *mockConsumerGroup) ResumeAll() { + m.Called() +} + +type mockConsumerGroupFactory struct { + mock.Mock +} + +func (m *mockConsumerGroupFactory) CreateConsumerGroup( + brokers []string, + groupName string, + config *sarama.Config, +) (sarama.ConsumerGroup, error) { + args := m.Called(brokers, groupName, config) + return args.Get(0).(sarama.ConsumerGroup), args.Error(1) +} + +func TestNew_MissingRequiredFields(t *testing.T) { + consumer, err := kafkaconsumergroup.New() + + assert.ErrorIs(t, err, cerrors.ErrValueRequired) + assert.Nil(t, consumer) +} + +func TestNew_NilLogger(t *testing.T) { + consumer, err := kafkaconsumergroup.New( + kafkaconsumergroup.WithLogger(nil), + ) + + assert.ErrorIs(t, err, cerrors.ErrValueRequired) + assert.Nil(t, consumer) +} + +func TestNew_NoProcessMessageFunc(t *testing.T) { + logger := slog.New(new(mockLogger)) + + consumer, err := kafkaconsumergroup.New( + kafkaconsumergroup.WithLogger(logger), + ) + + assert.ErrorIs(t, err, cerrors.ErrValueRequired) + assert.Nil(t, consumer) +} + +func TestNew_NilProcessMessageFunc(t *testing.T) { + logger := slog.New(new(mockLogger)) + + consumer, err := kafkaconsumergroup.New( + kafkaconsumergroup.WithLogger(logger), + kafkaconsumergroup.WithProcessMessageFunc(nil), + ) + + assert.ErrorIs(t, err, cerrors.ErrValueRequired) + assert.Nil(t, consumer) +} + +func TestNew_NoGroupName(t *testing.T) { + logger := slog.New(new(mockLogger)) + + consumer, err := kafkaconsumergroup.New( + kafkaconsumergroup.WithLogger(logger), + kafkaconsumergroup.WithProcessMessageFunc( + func(ctx context.Context, msg *sarama.ConsumerMessage) error { + return nil + }, + ), + ) + + assert.ErrorIs(t, err, cerrors.ErrValueRequired) + assert.Nil(t, consumer) +} + +func TestNew_EmptyGroupName(t *testing.T) { + logger := slog.New(new(mockLogger)) + + consumer, err := kafkaconsumergroup.New( + kafkaconsumergroup.WithLogger(logger), + kafkaconsumergroup.WithProcessMessageFunc( + func(ctx context.Context, msg *sarama.ConsumerMessage) error { + return nil + }, + ), + kafkaconsumergroup.WithKafkaGroupName(""), + ) + + assert.ErrorIs(t, err, cerrors.ErrValueRequired) + assert.Nil(t, consumer) +} + +func TestNew_NoTopic(t *testing.T) { + logger := slog.New(new(mockLogger)) + + consumer, err := kafkaconsumergroup.New( + kafkaconsumergroup.WithLogger(logger), + kafkaconsumergroup.WithProcessMessageFunc( + func(ctx context.Context, msg *sarama.ConsumerMessage) error { + return nil + }, + ), + kafkaconsumergroup.WithKafkaGroupName("github-group"), + ) + + assert.ErrorIs(t, err, cerrors.ErrInvalid) + assert.Nil(t, consumer) +} + +func TestNew_InvalidTopic(t *testing.T) { + logger := slog.New(new(mockLogger)) + + consumer, err := kafkaconsumergroup.New( + kafkaconsumergroup.WithLogger(logger), + kafkaconsumergroup.WithProcessMessageFunc( + func(ctx context.Context, msg *sarama.ConsumerMessage) error { + return nil + }, + ), + kafkaconsumergroup.WithKafkaGroupName("github-group"), + kafkaconsumergroup.WithTopic("invalid"), + ) + + assert.ErrorIs(t, err, cerrors.ErrInvalid) + assert.Nil(t, consumer) +} + +func TestNew_InvalidBrokers(t *testing.T) { + logger := slog.New(new(mockLogger)) + + consumer, err := kafkaconsumergroup.New( + kafkaconsumergroup.WithLogger(logger), + kafkaconsumergroup.WithProcessMessageFunc( + func(ctx context.Context, msg *sarama.ConsumerMessage) error { + return nil + }, + ), + kafkaconsumergroup.WithKafkaGroupName("github-group"), + kafkaconsumergroup.WithTopic(kafkacp.KafkaTopicIdentifierGitHub.String()), + kafkaconsumergroup.WithKafkaBrokers("invalid"), + ) + + assert.ErrorIs(t, err, cerrors.ErrInvalid) + assert.Nil(t, consumer) +} + +func TestNew_InvalidDialTimeout(t *testing.T) { + logger := slog.New(new(mockLogger)) + + consumer, err := kafkaconsumergroup.New( + kafkaconsumergroup.WithLogger(logger), + kafkaconsumergroup.WithProcessMessageFunc( + func(ctx context.Context, msg *sarama.ConsumerMessage) error { + return nil + }, + ), + kafkaconsumergroup.WithKafkaGroupName("github-group"), + kafkaconsumergroup.WithTopic(kafkacp.KafkaTopicIdentifierGitHub.String()), + kafkaconsumergroup.WithKafkaBrokers(kafkacp.DefaultKafkaBrokers), + kafkaconsumergroup.WithDialTimeout(-1*time.Second), + ) + + assert.ErrorIs(t, err, cerrors.ErrInvalid) + assert.Nil(t, consumer) +} + +func TestNew_InvalidReadTimeout(t *testing.T) { + logger := slog.New(new(mockLogger)) + + consumer, err := kafkaconsumergroup.New( + kafkaconsumergroup.WithLogger(logger), + kafkaconsumergroup.WithProcessMessageFunc( + func(ctx context.Context, msg *sarama.ConsumerMessage) error { + return nil + }, + ), + kafkaconsumergroup.WithKafkaGroupName("github-group"), + kafkaconsumergroup.WithTopic(kafkacp.KafkaTopicIdentifierGitHub.String()), + kafkaconsumergroup.WithKafkaBrokers(kafkacp.DefaultKafkaBrokers), + kafkaconsumergroup.WithReadTimeout(-1*time.Second), + ) + + assert.ErrorIs(t, err, cerrors.ErrInvalid) + assert.Nil(t, consumer) +} + +func TestNew_InvalidWriteTimeout(t *testing.T) { + logger := slog.New(new(mockLogger)) + + consumer, err := kafkaconsumergroup.New( + kafkaconsumergroup.WithLogger(logger), + kafkaconsumergroup.WithProcessMessageFunc( + func(ctx context.Context, msg *sarama.ConsumerMessage) error { + return nil + }, + ), + kafkaconsumergroup.WithKafkaGroupName("github-group"), + kafkaconsumergroup.WithTopic(kafkacp.KafkaTopicIdentifierGitHub.String()), + kafkaconsumergroup.WithKafkaBrokers(kafkacp.DefaultKafkaBrokers), + kafkaconsumergroup.WithWriteTimeout(-1*time.Second), + ) + + assert.ErrorIs(t, err, cerrors.ErrInvalid) + assert.Nil(t, consumer) +} + +func TestNew_ZeroBackoff(t *testing.T) { + logger := slog.New(new(mockLogger)) + + consumer, err := kafkaconsumergroup.New( + kafkaconsumergroup.WithLogger(logger), + kafkaconsumergroup.WithProcessMessageFunc( + func(ctx context.Context, msg *sarama.ConsumerMessage) error { + return nil + }, + ), + kafkaconsumergroup.WithKafkaGroupName("github-group"), + kafkaconsumergroup.WithTopic(kafkacp.KafkaTopicIdentifierGitHub.String()), + kafkaconsumergroup.WithKafkaBrokers(kafkacp.DefaultKafkaBrokers), + kafkaconsumergroup.WithBackoff(0), + ) + + assert.ErrorIs(t, err, cerrors.ErrValueRequired) + assert.Nil(t, consumer) +} + +func TestNew_InvalidBackoff(t *testing.T) { + logger := slog.New(new(mockLogger)) + + consumer, err := kafkaconsumergroup.New( + kafkaconsumergroup.WithLogger(logger), + kafkaconsumergroup.WithProcessMessageFunc( + func(ctx context.Context, msg *sarama.ConsumerMessage) error { + return nil + }, + ), + kafkaconsumergroup.WithKafkaGroupName("github-group"), + kafkaconsumergroup.WithTopic(kafkacp.KafkaTopicIdentifierGitHub.String()), + kafkaconsumergroup.WithKafkaBrokers(kafkacp.DefaultKafkaBrokers), + kafkaconsumergroup.WithBackoff(2*time.Minute), + ) + + assert.ErrorIs(t, err, cerrors.ErrInvalid) + assert.Nil(t, consumer) +} + +func TestNew_InvalidMaxRetries(t *testing.T) { + logger := slog.New(new(mockLogger)) + + consumer, err := kafkaconsumergroup.New( + kafkaconsumergroup.WithLogger(logger), + kafkaconsumergroup.WithProcessMessageFunc( + func(ctx context.Context, msg *sarama.ConsumerMessage) error { + return nil + }, + ), + kafkaconsumergroup.WithKafkaGroupName("github-group"), + kafkaconsumergroup.WithTopic(kafkacp.KafkaTopicIdentifierGitHub.String()), + kafkaconsumergroup.WithKafkaBrokers(kafkacp.DefaultKafkaBrokers), + kafkaconsumergroup.WithMaxRetries(256), + ) + + assert.ErrorIs(t, err, cerrors.ErrInvalid) + assert.Nil(t, consumer) +} + +func TestNew_InvalidKafkaVersion(t *testing.T) { + logger := slog.New(new(mockLogger)) + + consumer, err := kafkaconsumergroup.New( + kafkaconsumergroup.WithLogger(logger), + kafkaconsumergroup.WithProcessMessageFunc( + func(ctx context.Context, msg *sarama.ConsumerMessage) error { + return nil + }, + ), + kafkaconsumergroup.WithKafkaGroupName("github-group"), + kafkaconsumergroup.WithTopic(kafkacp.KafkaTopicIdentifierGitHub.String()), + kafkaconsumergroup.WithKafkaBrokers(kafkacp.DefaultKafkaBrokers), + kafkaconsumergroup.WithKafkaVersion("1111"), + ) + + assert.ErrorIs(t, err, cerrors.ErrInvalid) + assert.Nil(t, consumer) +} + +func TestNew_NilSaramaConsumerGroupFactoryFunc(t *testing.T) { + logger := slog.New(new(mockLogger)) + + consumer, err := kafkaconsumergroup.New( + kafkaconsumergroup.WithLogger(logger), + kafkaconsumergroup.WithProcessMessageFunc( + func(ctx context.Context, msg *sarama.ConsumerMessage) error { + return nil + }, + ), + kafkaconsumergroup.WithKafkaGroupName("github-group"), + kafkaconsumergroup.WithTopic(kafkacp.KafkaTopicIdentifierGitHub.String()), + kafkaconsumergroup.WithKafkaBrokers(kafkacp.DefaultKafkaBrokers), + kafkaconsumergroup.WithKafkaVersion("3.9.0"), + kafkaconsumergroup.WithDialTimeout(5*time.Second), + kafkaconsumergroup.WithReadTimeout(5*time.Second), + kafkaconsumergroup.WithWriteTimeout(5*time.Second), + kafkaconsumergroup.WithBackoff(1*time.Second), + kafkaconsumergroup.WithMaxRetries(2), + kafkaconsumergroup.WithSaramaConsumerGroupFactoryFunc(nil), + ) + + assert.ErrorIs(t, err, cerrors.ErrValueRequired) + assert.Nil(t, consumer) +} + +func TestNew_NilSaramaConsumerGroupFactoryFunc_Error(t *testing.T) { + logger := slog.New(new(mockLogger)) + + consumerGroup := &mockConsumerGroup{} + consumerGroupFactory := &mockConsumerGroupFactory{} + consumerGroupFactory.On( + "CreateConsumerGroup", + mock.Anything, + mock.Anything, + mock.Anything, + ).Return(consumerGroup, errors.New("error")).Once() + + consumer, err := kafkaconsumergroup.New( + kafkaconsumergroup.WithLogger(logger), + kafkaconsumergroup.WithProcessMessageFunc( + func(ctx context.Context, msg *sarama.ConsumerMessage) error { + return nil + }, + ), + kafkaconsumergroup.WithKafkaGroupName("github-group"), + kafkaconsumergroup.WithTopic(kafkacp.KafkaTopicIdentifierGitHub.String()), + kafkaconsumergroup.WithBackoff(100*time.Millisecond), + kafkaconsumergroup.WithMaxRetries(1), + kafkaconsumergroup.WithSaramaConsumerGroupFactoryFunc(consumerGroupFactory.CreateConsumerGroup), + ) + + assert.Nil(t, consumer) + assert.Error(t, err) + consumerGroupFactory.AssertNumberOfCalls(t, "CreateConsumerGroup", 1) + consumerGroupFactory.AssertExpectations(t) +} + +func TestNew_NilSaramaConsumerGroupFactoryFunc_Success(t *testing.T) { + logger := slog.New(new(mockLogger)) + + consumerGroup := &mockConsumerGroup{} + consumerGroupFactory := &mockConsumerGroupFactory{} + consumerGroupFactory.On( + "CreateConsumerGroup", + mock.Anything, + mock.Anything, + mock.Anything, + ).Return(consumerGroup, nil).Once() + + consumer, err := kafkaconsumergroup.New( + kafkaconsumergroup.WithLogger(logger), + kafkaconsumergroup.WithProcessMessageFunc( + func(ctx context.Context, msg *sarama.ConsumerMessage) error { + return nil + }, + ), + kafkaconsumergroup.WithKafkaGroupName("github-group"), + kafkaconsumergroup.WithTopic(kafkacp.KafkaTopicIdentifierGitHub.String()), + kafkaconsumergroup.WithBackoff(100*time.Millisecond), + kafkaconsumergroup.WithMaxRetries(1), + kafkaconsumergroup.WithSaramaConsumerGroupFactoryFunc(consumerGroupFactory.CreateConsumerGroup), + ) + + assert.NotNil(t, consumer) + assert.NoError(t, err) + consumerGroupFactory.AssertNumberOfCalls(t, "CreateConsumerGroup", 1) + consumerGroupFactory.AssertExpectations(t) +} From 6e34310a06e4d6d416ddaf7fa709efae2d70cc80 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?U=C4=9Fur=20=C3=96zy=C4=B1lmazel?= Date: Sun, 26 Jan 2025 22:20:57 +0300 Subject: [PATCH 05/13] complete - test --- .../kafkaconsumergroup/kafkaconsumergroup.go | 22 ++++- .../kafkaconsumergroup_test.go | 98 ++++++++++++++++++- 2 files changed, 115 insertions(+), 5 deletions(-) diff --git a/internal/kafkacp/kafkaconsumergroup/kafkaconsumergroup.go b/internal/kafkacp/kafkaconsumergroup/kafkaconsumergroup.go index 84578d8..2aa5806 100644 --- a/internal/kafkacp/kafkaconsumergroup/kafkaconsumergroup.go +++ b/internal/kafkacp/kafkaconsumergroup/kafkaconsumergroup.go @@ -35,6 +35,7 @@ type Consumer struct { ProcessMessageFunc ProcessMessageFunc MessageQueue chan *sarama.ConsumerMessage SaramaConsumerGroup sarama.ConsumerGroup + SaramaConsumerGroupHandler sarama.ConsumerGroupHandler Topic kafkacp.KafkaTopicIdentifier KafkaBrokers kafkacp.KafkaBrokers KafkaVersion sarama.KafkaVersion @@ -185,7 +186,7 @@ func (c Consumer) StartConsume() error { return } - if err := c.SaramaConsumerGroup.Consume(ctx, topics, c); err != nil { + if err := c.SaramaConsumerGroup.Consume(ctx, topics, c.SaramaConsumerGroupHandler); err != nil { if ctx.Err() != nil { c.Logger.Info("consume stopped due to context cancellation") @@ -410,6 +411,21 @@ func WithProcessMessageFunc(fn ProcessMessageFunc) Option { } } +// WithSaramaConsumerGroupHandler sets sarama consumer group handler. +func WithSaramaConsumerGroupHandler(handler sarama.ConsumerGroupHandler) Option { + return func(c *Consumer) error { + if handler == nil { + return fmt.Errorf( + "[kafkaconsumergroup.WithSaramaConsumerGroupHandler] error: [%w, 'nil' received]", + cerrors.ErrValueRequired, + ) + } + c.SaramaConsumerGroupHandler = handler + + return nil + } +} + // New instantiates new kafka github consumer group instance. func New(options ...Option) (*Consumer, error) { consumer := new(Consumer) @@ -439,6 +455,10 @@ func New(options ...Option) (*Consumer, error) { return nil, err } + if consumer.SaramaConsumerGroupHandler == nil { + consumer.SaramaConsumerGroupHandler = consumer + } + config := sarama.NewConfig() config.Net.DialTimeout = consumer.DialTimeout config.Net.ReadTimeout = consumer.ReadTimeout diff --git a/internal/kafkacp/kafkaconsumergroup/kafkaconsumergroup_test.go b/internal/kafkacp/kafkaconsumergroup/kafkaconsumergroup_test.go index c1831a8..d193481 100644 --- a/internal/kafkacp/kafkaconsumergroup/kafkaconsumergroup_test.go +++ b/internal/kafkacp/kafkaconsumergroup/kafkaconsumergroup_test.go @@ -4,6 +4,9 @@ import ( "context" "errors" "log/slog" + "os" + "sync" + "syscall" "testing" "time" @@ -15,6 +18,7 @@ import ( "github.com/stretchr/testify/mock" ) +// mockLogger ----------------------------------------------------------------- type mockLogger struct{} func (h *mockLogger) Enabled(_ context.Context, _ slog.Level) bool { @@ -33,6 +37,7 @@ func (h *mockLogger) WithGroup(name string) slog.Handler { return h } +// mockStorage ---------------------------------------------------------------- type mockStorage struct { mock.Mock } @@ -47,6 +52,7 @@ func (m *mockStorage) Ping(ctx context.Context, maxRetries uint8, backoff time.D return args.Error(0) } +// mockConsumerGroup ---------------------------------------------------------- type mockConsumerGroup struct { mock.Mock } @@ -82,6 +88,7 @@ func (m *mockConsumerGroup) ResumeAll() { m.Called() } +// mockConsumerGroupFactory --------------------------------------------------- type mockConsumerGroupFactory struct { mock.Mock } @@ -361,6 +368,26 @@ func TestNew_InvalidKafkaVersion(t *testing.T) { assert.Nil(t, consumer) } +func TestNew_NilSaramaConsumerGroupHandler(t *testing.T) { + logger := slog.New(new(mockLogger)) + + consumer, err := kafkaconsumergroup.New( + kafkaconsumergroup.WithLogger(logger), + kafkaconsumergroup.WithProcessMessageFunc( + func(ctx context.Context, msg *sarama.ConsumerMessage) error { + return nil + }, + ), + kafkaconsumergroup.WithKafkaGroupName("github-group"), + kafkaconsumergroup.WithTopic(kafkacp.KafkaTopicIdentifierGitHub.String()), + kafkaconsumergroup.WithKafkaBrokers(kafkacp.DefaultKafkaBrokers), + kafkaconsumergroup.WithSaramaConsumerGroupHandler(nil), + ) + + assert.ErrorIs(t, err, cerrors.ErrValueRequired) + assert.Nil(t, consumer) +} + func TestNew_NilSaramaConsumerGroupFactoryFunc(t *testing.T) { logger := slog.New(new(mockLogger)) @@ -387,7 +414,7 @@ func TestNew_NilSaramaConsumerGroupFactoryFunc(t *testing.T) { assert.Nil(t, consumer) } -func TestNew_NilSaramaConsumerGroupFactoryFunc_Error(t *testing.T) { +func TestNew_SaramaConsumerGroupFactoryFunc_Error(t *testing.T) { logger := slog.New(new(mockLogger)) consumerGroup := &mockConsumerGroup{} @@ -419,13 +446,12 @@ func TestNew_NilSaramaConsumerGroupFactoryFunc_Error(t *testing.T) { consumerGroupFactory.AssertExpectations(t) } -func TestNew_NilSaramaConsumerGroupFactoryFunc_Success(t *testing.T) { +func TestNew_SaramaConsumerGroupFactoryFunc_Success(t *testing.T) { logger := slog.New(new(mockLogger)) consumerGroup := &mockConsumerGroup{} consumerGroupFactory := &mockConsumerGroupFactory{} - consumerGroupFactory.On( - "CreateConsumerGroup", + consumerGroupFactory.On("CreateConsumerGroup", mock.Anything, mock.Anything, mock.Anything, @@ -450,3 +476,67 @@ func TestNew_NilSaramaConsumerGroupFactoryFunc_Success(t *testing.T) { consumerGroupFactory.AssertNumberOfCalls(t, "CreateConsumerGroup", 1) consumerGroupFactory.AssertExpectations(t) } + +func TestNew_Consume_Success(t *testing.T) { + logger := slog.New(new(mockLogger)) + + consumerGroup := &mockConsumerGroup{} + consumerGroup.On("Errors").Return((<-chan error)(make(chan error))) + + consumerGroupFactory := &mockConsumerGroupFactory{} + consumerGroupFactory.On("CreateConsumerGroup", + mock.Anything, + mock.Anything, + mock.Anything, + ).Return(consumerGroup, nil) + + consumerGroup.On("Consume", mock.Anything, mock.Anything, mock.Anything).Return(nil) + consumerGroup.On("Close").Return(nil).Once() + + consumer, err := kafkaconsumergroup.New( + kafkaconsumergroup.WithLogger(logger), + kafkaconsumergroup.WithProcessMessageFunc( + func(ctx context.Context, msg *sarama.ConsumerMessage) error { + return nil + }, + ), + kafkaconsumergroup.WithKafkaGroupName("github-group"), + kafkaconsumergroup.WithTopic(kafkacp.KafkaTopicIdentifierGitHub.String()), + kafkaconsumergroup.WithBackoff(100*time.Millisecond), + kafkaconsumergroup.WithMaxRetries(1), + kafkaconsumergroup.WithSaramaConsumerGroupFactoryFunc(consumerGroupFactory.CreateConsumerGroup), + ) + + assert.NotNil(t, consumer) + assert.NoError(t, err) + + consumerGroupFactory.AssertNumberOfCalls(t, "CreateConsumerGroup", 1) + consumerGroupFactory.AssertExpectations(t) + + var wg sync.WaitGroup + + wg.Add(1) + go func() { + defer wg.Done() + err := consumer.StartConsume() + assert.NoError(t, err) + }() + + wg.Add(1) + go func() { + defer wg.Done() + + consumer.MessageQueue <- &sarama.ConsumerMessage{ + Topic: kafkacp.KafkaTopicIdentifierGitHub.String(), + Partition: 0, + Offset: 1, + Key: []byte("key"), + Value: []byte("value"), + } + }() + + time.Sleep(100 * time.Millisecond) + process, _ := os.FindProcess(syscall.Getpid()) + _ = process.Signal(os.Interrupt) + wg.Wait() +} From 812267dc397d4092ac9cb4882dfe6d26530f9264 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?U=C4=9Fur=20=C3=96zy=C4=B1lmazel?= Date: Sun, 26 Jan 2025 23:14:47 +0300 Subject: [PATCH 06/13] wip - continue fix fmt.Errorf --- cmd/githubconsumer/main.go | 13 +- .../kafkacp/kafkaconsumer/kafkaconsumer.go | 124 ++++++++++++------ .../kafkaconsumer/kafkaconsumer_test.go | 123 ++++++++--------- .../kafkaconsumergroup/kafkaconsumergroup.go | 4 +- .../kafkaconsumergroup_test.go | 15 --- 5 files changed, 158 insertions(+), 121 deletions(-) diff --git a/cmd/githubconsumer/main.go b/cmd/githubconsumer/main.go index db07e29..b36ee63 100644 --- a/cmd/githubconsumer/main.go +++ b/cmd/githubconsumer/main.go @@ -5,6 +5,7 @@ import ( "fmt" "log" + "github.com/IBM/sarama" "github.com/devchain-network/cauldron/internal/kafkacp" "github.com/devchain-network/cauldron/internal/kafkacp/kafkaconsumer" "github.com/devchain-network/cauldron/internal/slogger" @@ -13,6 +14,16 @@ import ( "github.com/vigo/getenv" ) +func storeMessage(strg storage.PingStorer) kafkaconsumer.ProcessMessageFunc { + return func(ctx context.Context, msg *sarama.ConsumerMessage) error { + if err := strg.MessageStore(ctx, msg); err != nil { + return fmt.Errorf("message store error: [%w]", err) + } + + return nil + } +} + // Run runs kafa github consumer. func Run() error { logLevel := getenv.String("LOG_LEVEL", slogger.DefaultLogLevel) @@ -60,7 +71,7 @@ func Run() error { kafkaGitHubConsumer, err := kafkaconsumer.New( kafkaconsumer.WithLogger(logger), - kafkaconsumer.WithStorage(db), + kafkaconsumer.WithProcessMessageFunc(storeMessage(db)), kafkaconsumer.WithKafkaBrokers(*brokersList), kafkaconsumer.WithDialTimeout(*kafkaDialTimeout), kafkaconsumer.WithReadTimeout(*kafkaReadTimeout), diff --git a/internal/kafkacp/kafkaconsumer/kafkaconsumer.go b/internal/kafkacp/kafkaconsumer/kafkaconsumer.go index 30e3eda..440f65b 100644 --- a/internal/kafkacp/kafkaconsumer/kafkaconsumer.go +++ b/internal/kafkacp/kafkaconsumer/kafkaconsumer.go @@ -14,7 +14,6 @@ import ( "github.com/IBM/sarama" "github.com/devchain-network/cauldron/internal/cerrors" "github.com/devchain-network/cauldron/internal/kafkacp" - "github.com/devchain-network/cauldron/internal/storage" ) // defaults. @@ -37,13 +36,16 @@ type KafkaConsumer interface { // SaramaConsumerFactoryFunc is a factory function. type SaramaConsumerFactoryFunc func([]string, *sarama.Config) (sarama.Consumer, error) +// ProcessMessageFunc is a factory function for callers. +type ProcessMessageFunc func(ctx context.Context, msg *sarama.ConsumerMessage) error + // Consumer represents kafa consumer setup. type Consumer struct { Topic kafkacp.KafkaTopicIdentifier Logger *slog.Logger - Storage storage.PingStorer SaramaConsumer sarama.Consumer SaramaConsumerFactoryFunc SaramaConsumerFactoryFunc + ProcessMessageFunc ProcessMessageFunc KafkaBrokers kafkacp.KafkaBrokers DialTimeout time.Duration ReadTimeout time.Duration @@ -57,25 +59,34 @@ type Consumer struct { func (c *Consumer) checkRequired() error { if c.Logger == nil { - return fmt.Errorf("kafka consumer check required, Logger error: [%w]", cerrors.ErrValueRequired) + return fmt.Errorf( + "[kafkaconsumer.checkRequired] Logger error: [%w, 'nil' received]", + cerrors.ErrValueRequired, + ) } - if c.Storage == nil { - return fmt.Errorf("kafka consumer check required, Storage error: [%w]", cerrors.ErrValueRequired) + if c.ProcessMessageFunc == nil { + return fmt.Errorf( + "[kafkaconsumer.checkRequired] ProcessMessageFunc error: [%w, 'nil' received]", + cerrors.ErrValueRequired, + ) } if !c.Topic.Valid() { - return fmt.Errorf("kafka consumer check required, Topic error: [%w]", cerrors.ErrInvalid) + return fmt.Errorf( + "[kafkaconsumer.checkRequired] Topic error: [%w, false received]", + cerrors.ErrInvalid, + ) } return nil } -// Consume consumes message and stores it to database. +// Consume consumes kafka message with using partition consumer. func (c Consumer) Consume() error { partitionConsumer, err := c.SaramaConsumer.ConsumePartition(c.Topic.String(), c.Partition, sarama.OffsetNewest) if err != nil { - return fmt.Errorf("kafka consumer partition consumer instantiation error: [%w]", err) + return fmt.Errorf("[kafkaconsumer.Consume][SaramaConsumer.ConsumePartition] error: [%w]", err) } defer func() { _ = partitionConsumer.Close() }() @@ -112,7 +123,7 @@ func (c Consumer) Consume() error { }() for msg := range messagesQueue { - if err = c.Storage.MessageStore(ctx, msg); err != nil { + if err = c.ProcessMessageFunc(ctx, msg); err != nil { c.Logger.Error("kafka consumer message store", "error", err, "worker", i) continue @@ -162,7 +173,10 @@ type Option func(*Consumer) error func WithLogger(l *slog.Logger) Option { return func(c *Consumer) error { if l == nil { - return fmt.Errorf("kafka consumer WithLogger error: [%w]", cerrors.ErrValueRequired) + return fmt.Errorf( + "[kafkaconsumer.WithLogger] error: [%w, 'nil' received]", + cerrors.ErrValueRequired, + ) } c.Logger = l @@ -170,24 +184,15 @@ func WithLogger(l *slog.Logger) Option { } } -// WithStorage sets storage value. -func WithStorage(st storage.PingStorer) Option { - return func(c *Consumer) error { - if st == nil { - return fmt.Errorf("kafka consumer WithStorage error: [%w]", cerrors.ErrValueRequired) - } - c.Storage = st - - return nil - } -} - // WithTopic sets topic name to consume. func WithTopic(s string) Option { return func(c *Consumer) error { kt := kafkacp.KafkaTopicIdentifier(s) if !kt.Valid() { - return fmt.Errorf("kafka consumer WithTopic error: [%w]", cerrors.ErrInvalid) + return fmt.Errorf( + "[kafkaconsumer.WithTopic] error: [%w, '%s' received]", + cerrors.ErrInvalid, s, + ) } c.Topic = kt @@ -199,7 +204,10 @@ func WithTopic(s string) Option { func WithPartition(i int) Option { return func(c *Consumer) error { if i < 0 || i > math.MaxInt32 { - return fmt.Errorf("kafka consumer WithPartition error: [%w]", cerrors.ErrInvalid) + return fmt.Errorf( + "[kafkaconsumer.WithPartition] error: [%w, '%d' received, must > 0 or must < %d ]", + cerrors.ErrInvalid, i, math.MaxInt32, + ) } c.Partition = int32(i) @@ -213,7 +221,10 @@ func WithKafkaBrokers(brokers string) Option { var kafkaBrokers kafkacp.KafkaBrokers kafkaBrokers.AddFromString(brokers) if !kafkaBrokers.Valid() { - return fmt.Errorf("kafka consumer WithKafkaBrokers error: [%w]", cerrors.ErrInvalid) + return fmt.Errorf( + "[kafkaconsumer.WithKafkaBrokers] error: [%w, '%s' received]", + cerrors.ErrInvalid, brokers, + ) } c.KafkaBrokers = kafkaBrokers @@ -226,7 +237,10 @@ func WithKafkaBrokers(brokers string) Option { func WithDialTimeout(d time.Duration) Option { return func(c *Consumer) error { if d < 0 { - return fmt.Errorf("kafka consumer WithDialTimeout error: [%w]", cerrors.ErrInvalid) + return fmt.Errorf( + "[kafkaconsumer.WithDialTimeout] error: [%w, '%s' received, must > 0]", + cerrors.ErrInvalid, d, + ) } c.DialTimeout = d @@ -238,7 +252,10 @@ func WithDialTimeout(d time.Duration) Option { func WithReadTimeout(d time.Duration) Option { return func(c *Consumer) error { if d < 0 { - return fmt.Errorf("kafka consumer WithReadTimeout error: [%w]", cerrors.ErrInvalid) + return fmt.Errorf( + "[kafkaconsumer.WithReadTimeout] error: [%w, '%s' received, must > 0]", + cerrors.ErrInvalid, d, + ) } c.ReadTimeout = d @@ -250,7 +267,10 @@ func WithReadTimeout(d time.Duration) Option { func WithWriteTimeout(d time.Duration) Option { return func(c *Consumer) error { if d < 0 { - return fmt.Errorf("kafka consumer WithWriteTimeout error: [%w]", cerrors.ErrInvalid) + return fmt.Errorf( + "[kafkaconsumer.WithWriteTimeout] error: [%w, '%s' received, must > 0]", + cerrors.ErrInvalid, d, + ) } c.WriteTimeout = d @@ -262,11 +282,17 @@ func WithWriteTimeout(d time.Duration) Option { func WithBackoff(d time.Duration) Option { return func(c *Consumer) error { if d == 0 { - return fmt.Errorf("kafka consumer WithBackoff error: [%w]", cerrors.ErrValueRequired) + return fmt.Errorf( + "[kafkaconsumer.WithBackoff] error: [%w, '%s' received, 0 is not allowed]", + cerrors.ErrValueRequired, d, + ) } if d < 0 || d > time.Minute { - return fmt.Errorf("kafka consumer WithBackoff error: [%w]", cerrors.ErrInvalid) + return fmt.Errorf( + "[kafkaconsumer.WithBackoff] error: [%w, '%s' received, must > 0 or < minute]", + cerrors.ErrInvalid, d, + ) } c.Backoff = d @@ -279,7 +305,10 @@ func WithBackoff(d time.Duration) Option { func WithMaxRetries(i int) Option { return func(c *Consumer) error { if i > math.MaxUint8 || i < 0 { - return fmt.Errorf("kafka consumer WithMaxRetries error: [%w]", cerrors.ErrInvalid) + return fmt.Errorf( + "[kafkaconsumer.WithMaxRetries] error: [%w, '%d' received, must < %d or > 0]", + cerrors.ErrInvalid, i, math.MaxUint8, + ) } c.MaxRetries = uint8(i) @@ -288,12 +317,30 @@ func WithMaxRetries(i int) Option { } // WithSaramaConsumerFactoryFunc sets a custom factory function for creating Sarama consumers. -func WithSaramaConsumerFactoryFunc(factory SaramaConsumerFactoryFunc) Option { +func WithSaramaConsumerFactoryFunc(fn SaramaConsumerFactoryFunc) Option { return func(c *Consumer) error { - if factory == nil { - return fmt.Errorf("kafka consumer WithSaramaConsumerFactoryFunc error: [%w]", cerrors.ErrValueRequired) + if fn == nil { + return fmt.Errorf( + "[kafkaconsumer.WithSaramaConsumerFactoryFunc] error: [%w, 'nil' received]", + cerrors.ErrValueRequired, + ) } - c.SaramaConsumerFactoryFunc = factory + c.SaramaConsumerFactoryFunc = fn + + return nil + } +} + +// WithProcessMessageFunc sets the message processor. +func WithProcessMessageFunc(fn ProcessMessageFunc) Option { + return func(c *Consumer) error { + if fn == nil { + return fmt.Errorf( + "[kafkaconsumer.WithProcessMessageFunc] error: [%w, 'nil' received]", + cerrors.ErrValueRequired, + ) + } + c.ProcessMessageFunc = fn return nil } @@ -318,7 +365,7 @@ func New(options ...Option) (*Consumer, error) { for _, option := range options { if err := option(consumer); err != nil { - return nil, fmt.Errorf("kafka consumer option error: [%w]", err) + return nil, err } } @@ -357,7 +404,10 @@ func New(options ...Option) (*Consumer, error) { } if sconsumerErr != nil { - return nil, fmt.Errorf("kafka consumer NewConsumer error: [%w]", sconsumerErr) + return nil, fmt.Errorf( + "[kafkaconsumer.New][SaramaConsumerFactoryFunc] error: [%w]", + sconsumerErr, + ) } consumer.Logger.Info("successfully connected to", "broker", consumer.KafkaBrokers) diff --git a/internal/kafkacp/kafkaconsumer/kafkaconsumer_test.go b/internal/kafkacp/kafkaconsumer/kafkaconsumer_test.go index d4ec10f..f867367 100644 --- a/internal/kafkacp/kafkaconsumer/kafkaconsumer_test.go +++ b/internal/kafkacp/kafkaconsumer/kafkaconsumer_test.go @@ -4,6 +4,7 @@ import ( "context" "log/slog" "os" + "sync" "syscall" "testing" "time" @@ -11,6 +12,7 @@ import ( "github.com/IBM/sarama" "github.com/IBM/sarama/mocks" "github.com/devchain-network/cauldron/internal/cerrors" + "github.com/devchain-network/cauldron/internal/kafkacp" "github.com/devchain-network/cauldron/internal/kafkacp/kafkaconsumer" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/mock" @@ -43,18 +45,8 @@ func (h *mockLogger) WithGroup(name string) slog.Handler { return h } -type mockStorage struct { - mock.Mock -} - -func (m *mockStorage) MessageStore(ctx context.Context, msg *sarama.ConsumerMessage) error { - args := m.Called(ctx, msg) - return args.Error(0) -} - -func (m *mockStorage) Ping(ctx context.Context, maxRetries uint8, backoff time.Duration) error { - args := m.Called(ctx, maxRetries, backoff) - return args.Error(0) +var mockProcessMessage = func(ctx context.Context, msg *sarama.ConsumerMessage) error { + return nil } func TestNew_MissingRequiredFields(t *testing.T) { @@ -73,7 +65,7 @@ func TestNew_NilLogger(t *testing.T) { assert.Nil(t, consumer) } -func TestNew_NoStorage(t *testing.T) { +func TestNew_NoProcessMessageFunc(t *testing.T) { logger := slog.New(new(mockLogger)) consumer, err := kafkaconsumer.New( @@ -84,12 +76,12 @@ func TestNew_NoStorage(t *testing.T) { assert.Nil(t, consumer) } -func TestNew_NilStorage(t *testing.T) { +func TestNew_NilProcessMessageFunc(t *testing.T) { logger := slog.New(new(mockLogger)) consumer, err := kafkaconsumer.New( kafkaconsumer.WithLogger(logger), - kafkaconsumer.WithStorage(nil), + kafkaconsumer.WithProcessMessageFunc(nil), ) assert.ErrorIs(t, err, cerrors.ErrValueRequired) @@ -98,11 +90,10 @@ func TestNew_NilStorage(t *testing.T) { func TestNew_EmptyTopic(t *testing.T) { logger := slog.New(new(mockLogger)) - storage := new(mockStorage) consumer, err := kafkaconsumer.New( kafkaconsumer.WithLogger(logger), - kafkaconsumer.WithStorage(storage), + kafkaconsumer.WithProcessMessageFunc(mockProcessMessage), ) assert.ErrorIs(t, err, cerrors.ErrInvalid) @@ -111,11 +102,10 @@ func TestNew_EmptyTopic(t *testing.T) { func TestNew_InvalidTopic(t *testing.T) { logger := slog.New(new(mockLogger)) - storage := new(mockStorage) consumer, err := kafkaconsumer.New( kafkaconsumer.WithLogger(logger), - kafkaconsumer.WithStorage(storage), + kafkaconsumer.WithProcessMessageFunc(mockProcessMessage), kafkaconsumer.WithTopic("invalid"), ) @@ -125,12 +115,11 @@ func TestNew_InvalidTopic(t *testing.T) { func TestNew_InvalidPartition(t *testing.T) { logger := slog.New(new(mockLogger)) - storage := new(mockStorage) consumer, err := kafkaconsumer.New( kafkaconsumer.WithLogger(logger), - kafkaconsumer.WithStorage(storage), - kafkaconsumer.WithTopic("github"), + kafkaconsumer.WithProcessMessageFunc(mockProcessMessage), + kafkaconsumer.WithTopic(kafkacp.KafkaTopicIdentifierGitHub.String()), kafkaconsumer.WithPartition(2147483648), ) @@ -140,12 +129,11 @@ func TestNew_InvalidPartition(t *testing.T) { func TestNew_InvalidBrokers(t *testing.T) { logger := slog.New(new(mockLogger)) - storage := new(mockStorage) consumer, err := kafkaconsumer.New( kafkaconsumer.WithLogger(logger), - kafkaconsumer.WithStorage(storage), - kafkaconsumer.WithTopic("github"), + kafkaconsumer.WithProcessMessageFunc(mockProcessMessage), + kafkaconsumer.WithTopic(kafkacp.KafkaTopicIdentifierGitHub.String()), kafkaconsumer.WithKafkaBrokers("invalid"), ) @@ -155,12 +143,11 @@ func TestNew_InvalidBrokers(t *testing.T) { func TestNew_InvalidDialTimeout(t *testing.T) { logger := slog.New(new(mockLogger)) - storage := new(mockStorage) consumer, err := kafkaconsumer.New( kafkaconsumer.WithLogger(logger), - kafkaconsumer.WithStorage(storage), - kafkaconsumer.WithTopic("github"), + kafkaconsumer.WithProcessMessageFunc(mockProcessMessage), + kafkaconsumer.WithTopic(kafkacp.KafkaTopicIdentifierGitHub.String()), kafkaconsumer.WithKafkaBrokers("127.0.0.1:9094"), kafkaconsumer.WithDialTimeout(-1*time.Second), ) @@ -171,12 +158,11 @@ func TestNew_InvalidDialTimeout(t *testing.T) { func TestNew_InvalidReadTimeout(t *testing.T) { logger := slog.New(new(mockLogger)) - storage := new(mockStorage) consumer, err := kafkaconsumer.New( kafkaconsumer.WithLogger(logger), - kafkaconsumer.WithStorage(storage), - kafkaconsumer.WithTopic("github"), + kafkaconsumer.WithProcessMessageFunc(mockProcessMessage), + kafkaconsumer.WithTopic(kafkacp.KafkaTopicIdentifierGitHub.String()), kafkaconsumer.WithKafkaBrokers("127.0.0.1:9094"), kafkaconsumer.WithReadTimeout(-1*time.Second), ) @@ -187,12 +173,11 @@ func TestNew_InvalidReadTimeout(t *testing.T) { func TestNew_InvalidWriteTimeout(t *testing.T) { logger := slog.New(new(mockLogger)) - storage := new(mockStorage) consumer, err := kafkaconsumer.New( kafkaconsumer.WithLogger(logger), - kafkaconsumer.WithStorage(storage), - kafkaconsumer.WithTopic("github"), + kafkaconsumer.WithProcessMessageFunc(mockProcessMessage), + kafkaconsumer.WithTopic(kafkacp.KafkaTopicIdentifierGitHub.String()), kafkaconsumer.WithKafkaBrokers("127.0.0.1:9094"), kafkaconsumer.WithWriteTimeout(-1*time.Second), ) @@ -203,12 +188,11 @@ func TestNew_InvalidWriteTimeout(t *testing.T) { func TestNew_ZeroBackoff(t *testing.T) { logger := slog.New(new(mockLogger)) - storage := new(mockStorage) consumer, err := kafkaconsumer.New( kafkaconsumer.WithLogger(logger), - kafkaconsumer.WithStorage(storage), - kafkaconsumer.WithTopic("github"), + kafkaconsumer.WithProcessMessageFunc(mockProcessMessage), + kafkaconsumer.WithTopic(kafkacp.KafkaTopicIdentifierGitHub.String()), kafkaconsumer.WithKafkaBrokers("127.0.0.1:9094"), kafkaconsumer.WithBackoff(0), ) @@ -219,12 +203,11 @@ func TestNew_ZeroBackoff(t *testing.T) { func TestNew_InvalidBackoff(t *testing.T) { logger := slog.New(new(mockLogger)) - storage := new(mockStorage) consumer, err := kafkaconsumer.New( kafkaconsumer.WithLogger(logger), - kafkaconsumer.WithStorage(storage), - kafkaconsumer.WithTopic("github"), + kafkaconsumer.WithProcessMessageFunc(mockProcessMessage), + kafkaconsumer.WithTopic(kafkacp.KafkaTopicIdentifierGitHub.String()), kafkaconsumer.WithKafkaBrokers("127.0.0.1:9094"), kafkaconsumer.WithBackoff(2*time.Minute), ) @@ -235,12 +218,11 @@ func TestNew_InvalidBackoff(t *testing.T) { func TestNew_InvalidMaxRetries(t *testing.T) { logger := slog.New(new(mockLogger)) - storage := new(mockStorage) consumer, err := kafkaconsumer.New( kafkaconsumer.WithLogger(logger), - kafkaconsumer.WithStorage(storage), - kafkaconsumer.WithTopic("github"), + kafkaconsumer.WithProcessMessageFunc(mockProcessMessage), + kafkaconsumer.WithTopic(kafkacp.KafkaTopicIdentifierGitHub.String()), kafkaconsumer.WithKafkaBrokers("127.0.0.1:9094"), kafkaconsumer.WithMaxRetries(256), ) @@ -249,14 +231,13 @@ func TestNew_InvalidMaxRetries(t *testing.T) { assert.Nil(t, consumer) } -func TestNew_NilSaramaConsumerFactor(t *testing.T) { +func TestNew_NilSaramaConsumerFactoryFunc(t *testing.T) { logger := slog.New(new(mockLogger)) - storage := new(mockStorage) consumer, err := kafkaconsumer.New( kafkaconsumer.WithLogger(logger), - kafkaconsumer.WithStorage(storage), - kafkaconsumer.WithTopic("github"), + kafkaconsumer.WithProcessMessageFunc(mockProcessMessage), + kafkaconsumer.WithTopic(kafkacp.KafkaTopicIdentifierGitHub.String()), kafkaconsumer.WithKafkaBrokers("127.0.0.1:9094"), kafkaconsumer.WithSaramaConsumerFactoryFunc(nil), ) @@ -267,7 +248,6 @@ func TestNew_NilSaramaConsumerFactor(t *testing.T) { func TestNew_WithSaramaConsumerFactoryFunc_Error(t *testing.T) { logger := slog.New(new(mockLogger)) - storage := new(mockStorage) mockConfig := mocks.NewTestConfig() mockSarama := mocks.NewConsumer(t, mockConfig) @@ -277,8 +257,8 @@ func TestNew_WithSaramaConsumerFactoryFunc_Error(t *testing.T) { consumer, err := kafkaconsumer.New( kafkaconsumer.WithLogger(logger), - kafkaconsumer.WithStorage(storage), - kafkaconsumer.WithTopic("github"), + kafkaconsumer.WithProcessMessageFunc(mockProcessMessage), + kafkaconsumer.WithTopic(kafkacp.KafkaTopicIdentifierGitHub.String()), kafkaconsumer.WithBackoff(100*time.Millisecond), kafkaconsumer.WithSaramaConsumerFactoryFunc(mockFactory.NewConsumer), kafkaconsumer.WithMaxRetries(1), @@ -293,7 +273,6 @@ func TestNew_WithSaramaConsumerFactoryFunc_Error(t *testing.T) { func TestNew_WithSaramaConsumerFactoryFunc_Success(t *testing.T) { logger := slog.New(new(mockLogger)) - storage := new(mockStorage) mockConfig := mocks.NewTestConfig() mockSarama := mocks.NewConsumer(t, mockConfig) @@ -304,8 +283,8 @@ func TestNew_WithSaramaConsumerFactoryFunc_Success(t *testing.T) { consumer, err := kafkaconsumer.New( kafkaconsumer.WithLogger(logger), - kafkaconsumer.WithStorage(storage), - kafkaconsumer.WithTopic("github"), + kafkaconsumer.WithProcessMessageFunc(mockProcessMessage), + kafkaconsumer.WithTopic(kafkacp.KafkaTopicIdentifierGitHub.String()), kafkaconsumer.WithPartition(0), kafkaconsumer.WithDialTimeout(10*time.Second), kafkaconsumer.WithReadTimeout(10*time.Second), @@ -324,15 +303,13 @@ func TestNew_WithSaramaConsumerFactoryFunc_Success(t *testing.T) { func TestConsumer_Consume_Success(t *testing.T) { logger := slog.New(new(mockLogger)) - storage := new(mockStorage) - storage.On("MessageStore", mock.Anything, mock.Anything).Return(nil) mockConfig := mocks.NewTestConfig() mockSarama := mocks.NewConsumer(t, mockConfig) - mockSarama.ExpectConsumePartition("github", 0, sarama.OffsetNewest).YieldMessage( + mockSarama.ExpectConsumePartition(kafkacp.KafkaTopicIdentifierGitHub.String(), 0, sarama.OffsetNewest).YieldMessage( &sarama.ConsumerMessage{ Value: []byte(`{"test": "message"}`), - Topic: "github", + Topic: kafkacp.KafkaTopicIdentifierGitHub.String(), Partition: 0, }, ) @@ -342,8 +319,8 @@ func TestConsumer_Consume_Success(t *testing.T) { consumer, err := kafkaconsumer.New( kafkaconsumer.WithLogger(logger), - kafkaconsumer.WithStorage(storage), - kafkaconsumer.WithTopic("github"), + kafkaconsumer.WithProcessMessageFunc(mockProcessMessage), + kafkaconsumer.WithTopic(kafkacp.KafkaTopicIdentifierGitHub.String()), kafkaconsumer.WithSaramaConsumerFactoryFunc(mockFactory.NewConsumer), kafkaconsumer.WithMaxRetries(1), ) @@ -351,48 +328,62 @@ func TestConsumer_Consume_Success(t *testing.T) { assert.NoError(t, err) assert.NotNil(t, consumer) + var wg sync.WaitGroup + + wg.Add(1) go func() { + defer wg.Done() + err := consumer.Consume() assert.NoError(t, err) }() - time.Sleep(1 * time.Second) + time.Sleep(100 * time.Millisecond) + + process, _ := os.FindProcess(syscall.Getpid()) + _ = process.Signal(os.Interrupt) mockFactory.AssertNumberOfCalls(t, "NewConsumer", 1) mockFactory.AssertExpectations(t) - storage.AssertNumberOfCalls(t, "MessageStore", 1) - storage.AssertExpectations(t) + wg.Wait() } func TestConsumer_Consume_PartitionConsumeError(t *testing.T) { logger := slog.New(new(mockLogger)) - storage := new(mockStorage) mockConfig := mocks.NewTestConfig() mockSarama := mocks.NewConsumer(t, mockConfig) - mockSarama.ExpectConsumePartition("github", 0, sarama.OffsetNewest).YieldError(sarama.ErrOutOfBrokers) + mockSarama.ExpectConsumePartition(kafkacp.KafkaTopicIdentifierGitHub.String(), 0, sarama.OffsetNewest). + YieldError(sarama.ErrOutOfBrokers) mockFactory := &mockConsumerFactory{} mockFactory.On("NewConsumer", mock.Anything, mock.Anything).Return(mockSarama, nil).Once() consumer, err := kafkaconsumer.New( kafkaconsumer.WithLogger(logger), - kafkaconsumer.WithStorage(storage), - kafkaconsumer.WithTopic("github"), + kafkaconsumer.WithProcessMessageFunc(mockProcessMessage), + kafkaconsumer.WithTopic(kafkacp.KafkaTopicIdentifierGitHub.String()), kafkaconsumer.WithSaramaConsumerFactoryFunc(mockFactory.NewConsumer), kafkaconsumer.WithMaxRetries(1), ) assert.NoError(t, err) assert.NotNil(t, consumer) + var wg sync.WaitGroup + + wg.Add(1) go func() { + defer wg.Done() + err = consumer.Consume() assert.NoError(t, err) }() time.Sleep(100 * time.Millisecond) + process, _ := os.FindProcess(syscall.Getpid()) _ = process.Signal(os.Interrupt) mockFactory.AssertNumberOfCalls(t, "NewConsumer", 1) mockFactory.AssertExpectations(t) + wg.Wait() } diff --git a/internal/kafkacp/kafkaconsumergroup/kafkaconsumergroup.go b/internal/kafkacp/kafkaconsumergroup/kafkaconsumergroup.go index 2aa5806..54afa5d 100644 --- a/internal/kafkacp/kafkaconsumergroup/kafkaconsumergroup.go +++ b/internal/kafkacp/kafkaconsumergroup/kafkaconsumergroup.go @@ -355,8 +355,8 @@ func WithMaxRetries(i int) Option { return func(c *Consumer) error { if i > math.MaxUint8 || i < 0 { return fmt.Errorf( - "[kafkaconsumergroup.WithMaxRetries] error: [%w, '%[2]d' received, must < %[2]d or > 0]", - cerrors.ErrInvalid, i, + "[kafkaconsumergroup.WithMaxRetries] error: [%w, '%d' received, must < %d or > 0]", + cerrors.ErrInvalid, i, math.MaxUint8, ) } c.MaxRetries = uint8(i) diff --git a/internal/kafkacp/kafkaconsumergroup/kafkaconsumergroup_test.go b/internal/kafkacp/kafkaconsumergroup/kafkaconsumergroup_test.go index d193481..989c949 100644 --- a/internal/kafkacp/kafkaconsumergroup/kafkaconsumergroup_test.go +++ b/internal/kafkacp/kafkaconsumergroup/kafkaconsumergroup_test.go @@ -37,21 +37,6 @@ func (h *mockLogger) WithGroup(name string) slog.Handler { return h } -// mockStorage ---------------------------------------------------------------- -type mockStorage struct { - mock.Mock -} - -func (m *mockStorage) MessageStore(ctx context.Context, msg *sarama.ConsumerMessage) error { - args := m.Called(ctx, msg) - return args.Error(0) -} - -func (m *mockStorage) Ping(ctx context.Context, maxRetries uint8, backoff time.Duration) error { - args := m.Called(ctx, maxRetries, backoff) - return args.Error(0) -} - // mockConsumerGroup ---------------------------------------------------------- type mockConsumerGroup struct { mock.Mock From 747c6121604b464c89b4952d1c424e2482f2546d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?U=C4=9Fur=20=C3=96zy=C4=B1lmazel?= Date: Sun, 26 Jan 2025 23:17:27 +0300 Subject: [PATCH 07/13] wip - continue fix fmt.Errorf --- .../kafkaconsumergroup_test.go | 106 ++++-------------- 1 file changed, 21 insertions(+), 85 deletions(-) diff --git a/internal/kafkacp/kafkaconsumergroup/kafkaconsumergroup_test.go b/internal/kafkacp/kafkaconsumergroup/kafkaconsumergroup_test.go index 989c949..f4426df 100644 --- a/internal/kafkacp/kafkaconsumergroup/kafkaconsumergroup_test.go +++ b/internal/kafkacp/kafkaconsumergroup/kafkaconsumergroup_test.go @@ -18,6 +18,10 @@ import ( "github.com/stretchr/testify/mock" ) +var mockProcessMessage = func(ctx context.Context, msg *sarama.ConsumerMessage) error { + return nil +} + // mockLogger ----------------------------------------------------------------- type mockLogger struct{} @@ -131,11 +135,7 @@ func TestNew_NoGroupName(t *testing.T) { consumer, err := kafkaconsumergroup.New( kafkaconsumergroup.WithLogger(logger), - kafkaconsumergroup.WithProcessMessageFunc( - func(ctx context.Context, msg *sarama.ConsumerMessage) error { - return nil - }, - ), + kafkaconsumergroup.WithProcessMessageFunc(mockProcessMessage), ) assert.ErrorIs(t, err, cerrors.ErrValueRequired) @@ -147,11 +147,7 @@ func TestNew_EmptyGroupName(t *testing.T) { consumer, err := kafkaconsumergroup.New( kafkaconsumergroup.WithLogger(logger), - kafkaconsumergroup.WithProcessMessageFunc( - func(ctx context.Context, msg *sarama.ConsumerMessage) error { - return nil - }, - ), + kafkaconsumergroup.WithProcessMessageFunc(mockProcessMessage), kafkaconsumergroup.WithKafkaGroupName(""), ) @@ -164,11 +160,7 @@ func TestNew_NoTopic(t *testing.T) { consumer, err := kafkaconsumergroup.New( kafkaconsumergroup.WithLogger(logger), - kafkaconsumergroup.WithProcessMessageFunc( - func(ctx context.Context, msg *sarama.ConsumerMessage) error { - return nil - }, - ), + kafkaconsumergroup.WithProcessMessageFunc(mockProcessMessage), kafkaconsumergroup.WithKafkaGroupName("github-group"), ) @@ -181,11 +173,7 @@ func TestNew_InvalidTopic(t *testing.T) { consumer, err := kafkaconsumergroup.New( kafkaconsumergroup.WithLogger(logger), - kafkaconsumergroup.WithProcessMessageFunc( - func(ctx context.Context, msg *sarama.ConsumerMessage) error { - return nil - }, - ), + kafkaconsumergroup.WithProcessMessageFunc(mockProcessMessage), kafkaconsumergroup.WithKafkaGroupName("github-group"), kafkaconsumergroup.WithTopic("invalid"), ) @@ -199,11 +187,7 @@ func TestNew_InvalidBrokers(t *testing.T) { consumer, err := kafkaconsumergroup.New( kafkaconsumergroup.WithLogger(logger), - kafkaconsumergroup.WithProcessMessageFunc( - func(ctx context.Context, msg *sarama.ConsumerMessage) error { - return nil - }, - ), + kafkaconsumergroup.WithProcessMessageFunc(mockProcessMessage), kafkaconsumergroup.WithKafkaGroupName("github-group"), kafkaconsumergroup.WithTopic(kafkacp.KafkaTopicIdentifierGitHub.String()), kafkaconsumergroup.WithKafkaBrokers("invalid"), @@ -218,11 +202,7 @@ func TestNew_InvalidDialTimeout(t *testing.T) { consumer, err := kafkaconsumergroup.New( kafkaconsumergroup.WithLogger(logger), - kafkaconsumergroup.WithProcessMessageFunc( - func(ctx context.Context, msg *sarama.ConsumerMessage) error { - return nil - }, - ), + kafkaconsumergroup.WithProcessMessageFunc(mockProcessMessage), kafkaconsumergroup.WithKafkaGroupName("github-group"), kafkaconsumergroup.WithTopic(kafkacp.KafkaTopicIdentifierGitHub.String()), kafkaconsumergroup.WithKafkaBrokers(kafkacp.DefaultKafkaBrokers), @@ -238,11 +218,7 @@ func TestNew_InvalidReadTimeout(t *testing.T) { consumer, err := kafkaconsumergroup.New( kafkaconsumergroup.WithLogger(logger), - kafkaconsumergroup.WithProcessMessageFunc( - func(ctx context.Context, msg *sarama.ConsumerMessage) error { - return nil - }, - ), + kafkaconsumergroup.WithProcessMessageFunc(mockProcessMessage), kafkaconsumergroup.WithKafkaGroupName("github-group"), kafkaconsumergroup.WithTopic(kafkacp.KafkaTopicIdentifierGitHub.String()), kafkaconsumergroup.WithKafkaBrokers(kafkacp.DefaultKafkaBrokers), @@ -258,11 +234,7 @@ func TestNew_InvalidWriteTimeout(t *testing.T) { consumer, err := kafkaconsumergroup.New( kafkaconsumergroup.WithLogger(logger), - kafkaconsumergroup.WithProcessMessageFunc( - func(ctx context.Context, msg *sarama.ConsumerMessage) error { - return nil - }, - ), + kafkaconsumergroup.WithProcessMessageFunc(mockProcessMessage), kafkaconsumergroup.WithKafkaGroupName("github-group"), kafkaconsumergroup.WithTopic(kafkacp.KafkaTopicIdentifierGitHub.String()), kafkaconsumergroup.WithKafkaBrokers(kafkacp.DefaultKafkaBrokers), @@ -278,11 +250,7 @@ func TestNew_ZeroBackoff(t *testing.T) { consumer, err := kafkaconsumergroup.New( kafkaconsumergroup.WithLogger(logger), - kafkaconsumergroup.WithProcessMessageFunc( - func(ctx context.Context, msg *sarama.ConsumerMessage) error { - return nil - }, - ), + kafkaconsumergroup.WithProcessMessageFunc(mockProcessMessage), kafkaconsumergroup.WithKafkaGroupName("github-group"), kafkaconsumergroup.WithTopic(kafkacp.KafkaTopicIdentifierGitHub.String()), kafkaconsumergroup.WithKafkaBrokers(kafkacp.DefaultKafkaBrokers), @@ -298,11 +266,7 @@ func TestNew_InvalidBackoff(t *testing.T) { consumer, err := kafkaconsumergroup.New( kafkaconsumergroup.WithLogger(logger), - kafkaconsumergroup.WithProcessMessageFunc( - func(ctx context.Context, msg *sarama.ConsumerMessage) error { - return nil - }, - ), + kafkaconsumergroup.WithProcessMessageFunc(mockProcessMessage), kafkaconsumergroup.WithKafkaGroupName("github-group"), kafkaconsumergroup.WithTopic(kafkacp.KafkaTopicIdentifierGitHub.String()), kafkaconsumergroup.WithKafkaBrokers(kafkacp.DefaultKafkaBrokers), @@ -318,11 +282,7 @@ func TestNew_InvalidMaxRetries(t *testing.T) { consumer, err := kafkaconsumergroup.New( kafkaconsumergroup.WithLogger(logger), - kafkaconsumergroup.WithProcessMessageFunc( - func(ctx context.Context, msg *sarama.ConsumerMessage) error { - return nil - }, - ), + kafkaconsumergroup.WithProcessMessageFunc(mockProcessMessage), kafkaconsumergroup.WithKafkaGroupName("github-group"), kafkaconsumergroup.WithTopic(kafkacp.KafkaTopicIdentifierGitHub.String()), kafkaconsumergroup.WithKafkaBrokers(kafkacp.DefaultKafkaBrokers), @@ -338,11 +298,7 @@ func TestNew_InvalidKafkaVersion(t *testing.T) { consumer, err := kafkaconsumergroup.New( kafkaconsumergroup.WithLogger(logger), - kafkaconsumergroup.WithProcessMessageFunc( - func(ctx context.Context, msg *sarama.ConsumerMessage) error { - return nil - }, - ), + kafkaconsumergroup.WithProcessMessageFunc(mockProcessMessage), kafkaconsumergroup.WithKafkaGroupName("github-group"), kafkaconsumergroup.WithTopic(kafkacp.KafkaTopicIdentifierGitHub.String()), kafkaconsumergroup.WithKafkaBrokers(kafkacp.DefaultKafkaBrokers), @@ -358,11 +314,7 @@ func TestNew_NilSaramaConsumerGroupHandler(t *testing.T) { consumer, err := kafkaconsumergroup.New( kafkaconsumergroup.WithLogger(logger), - kafkaconsumergroup.WithProcessMessageFunc( - func(ctx context.Context, msg *sarama.ConsumerMessage) error { - return nil - }, - ), + kafkaconsumergroup.WithProcessMessageFunc(mockProcessMessage), kafkaconsumergroup.WithKafkaGroupName("github-group"), kafkaconsumergroup.WithTopic(kafkacp.KafkaTopicIdentifierGitHub.String()), kafkaconsumergroup.WithKafkaBrokers(kafkacp.DefaultKafkaBrokers), @@ -378,11 +330,7 @@ func TestNew_NilSaramaConsumerGroupFactoryFunc(t *testing.T) { consumer, err := kafkaconsumergroup.New( kafkaconsumergroup.WithLogger(logger), - kafkaconsumergroup.WithProcessMessageFunc( - func(ctx context.Context, msg *sarama.ConsumerMessage) error { - return nil - }, - ), + kafkaconsumergroup.WithProcessMessageFunc(mockProcessMessage), kafkaconsumergroup.WithKafkaGroupName("github-group"), kafkaconsumergroup.WithTopic(kafkacp.KafkaTopicIdentifierGitHub.String()), kafkaconsumergroup.WithKafkaBrokers(kafkacp.DefaultKafkaBrokers), @@ -413,11 +361,7 @@ func TestNew_SaramaConsumerGroupFactoryFunc_Error(t *testing.T) { consumer, err := kafkaconsumergroup.New( kafkaconsumergroup.WithLogger(logger), - kafkaconsumergroup.WithProcessMessageFunc( - func(ctx context.Context, msg *sarama.ConsumerMessage) error { - return nil - }, - ), + kafkaconsumergroup.WithProcessMessageFunc(mockProcessMessage), kafkaconsumergroup.WithKafkaGroupName("github-group"), kafkaconsumergroup.WithTopic(kafkacp.KafkaTopicIdentifierGitHub.String()), kafkaconsumergroup.WithBackoff(100*time.Millisecond), @@ -444,11 +388,7 @@ func TestNew_SaramaConsumerGroupFactoryFunc_Success(t *testing.T) { consumer, err := kafkaconsumergroup.New( kafkaconsumergroup.WithLogger(logger), - kafkaconsumergroup.WithProcessMessageFunc( - func(ctx context.Context, msg *sarama.ConsumerMessage) error { - return nil - }, - ), + kafkaconsumergroup.WithProcessMessageFunc(mockProcessMessage), kafkaconsumergroup.WithKafkaGroupName("github-group"), kafkaconsumergroup.WithTopic(kafkacp.KafkaTopicIdentifierGitHub.String()), kafkaconsumergroup.WithBackoff(100*time.Millisecond), @@ -480,11 +420,7 @@ func TestNew_Consume_Success(t *testing.T) { consumer, err := kafkaconsumergroup.New( kafkaconsumergroup.WithLogger(logger), - kafkaconsumergroup.WithProcessMessageFunc( - func(ctx context.Context, msg *sarama.ConsumerMessage) error { - return nil - }, - ), + kafkaconsumergroup.WithProcessMessageFunc(mockProcessMessage), kafkaconsumergroup.WithKafkaGroupName("github-group"), kafkaconsumergroup.WithTopic(kafkacp.KafkaTopicIdentifierGitHub.String()), kafkaconsumergroup.WithBackoff(100*time.Millisecond), From 8f68c8e8a62bb3026495e78a2cd8e3f101d7c581 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?U=C4=9Fur=20=C3=96zy=C4=B1lmazel?= Date: Mon, 27 Jan 2025 10:29:01 +0300 Subject: [PATCH 08/13] add mock slogger + improve apiserver + tests --- internal/apiserver/apiserver.go | 36 +++++-- internal/apiserver/apiserver_test.go | 106 ++++++++++++++------ internal/slogger/mockslogger/mockslogger.go | 25 +++++ 3 files changed, 125 insertions(+), 42 deletions(-) create mode 100644 internal/slogger/mockslogger/mockslogger.go diff --git a/internal/apiserver/apiserver.go b/internal/apiserver/apiserver.go index b02235b..5055883 100644 --- a/internal/apiserver/apiserver.go +++ b/internal/apiserver/apiserver.go @@ -5,6 +5,7 @@ import ( _ "embed" "fmt" "log/slog" + "slices" "time" "github.com/devchain-network/cauldron/internal/cerrors" @@ -53,11 +54,23 @@ type Server struct { IdleTimeout time.Duration } +var validHTTPMethods = []string{ + fasthttp.MethodGet, + fasthttp.MethodHead, + fasthttp.MethodPost, + fasthttp.MethodPut, + fasthttp.MethodPatch, + fasthttp.MethodDelete, + fasthttp.MethodConnect, + fasthttp.MethodOptions, + fasthttp.MethodTrace, +} + // Start starts the fast http server. func (s *Server) Start() error { s.Logger.Info("start listening at", "addr", s.ListenAddr, "version", ServerVersion) if err := s.FastHTTP.ListenAndServe(s.ListenAddr); err != nil { - return fmt.Errorf("fast http listen and serve error: [%w]", err) + return fmt.Errorf("[apiserver.Start][ListenAndServe] error: [%w]", err) } return nil @@ -69,7 +82,7 @@ func (s *Server) Stop() error { if err := s.FastHTTP.ShutdownWithContext(context.Background()); err != nil { s.Logger.Error("fast http shutdown with context error", "error", err) - return fmt.Errorf("fast http shutdown with context error: [%w]", err) + return fmt.Errorf("[apiserver.Start][ShutdownWithContext] error: [%w]", err) } return nil @@ -77,15 +90,15 @@ func (s *Server) Stop() error { func (s Server) checkRequired() error { if s.Logger == nil { - return fmt.Errorf("api server check required, Logger error: [%w]", cerrors.ErrValueRequired) + return fmt.Errorf("[apiserver.checkRequired] Logger error: [%w, 'nil' received]", cerrors.ErrValueRequired) } if s.Handlers == nil { - return fmt.Errorf("api server check required, Handlers error: [%w]", cerrors.ErrValueRequired) + return fmt.Errorf("[apiserver.checkRequired] Handlers error: [%w, 'nil' received]", cerrors.ErrValueRequired) } if !s.KafkaGitHubTopic.Valid() { - return fmt.Errorf("api server check required, KafkaGitHubTopic error: [%w]", cerrors.ErrInvalid) + return fmt.Errorf("[apiserver.checkRequired] KafkaGitHubTopic error: [%w, false received]", cerrors.ErrInvalid) } return nil @@ -95,7 +108,7 @@ func (s Server) checkRequired() error { func WithLogger(l *slog.Logger) Option { return func(server *Server) error { if l == nil { - return fmt.Errorf("api server WithLogger error: [%w]", cerrors.ErrValueRequired) + return fmt.Errorf("[apiserver.WithLogger] error: [%w, 'nil' received]", cerrors.ErrValueRequired) } server.Logger = l @@ -107,13 +120,18 @@ func WithLogger(l *slog.Logger) Option { func WithHTTPHandler(method, path string, handler fasthttp.RequestHandler) Option { return func(server *Server) error { if method == "" { - return fmt.Errorf("api server WithHTTPHandler method error: [%w]", cerrors.ErrValueRequired) + return fmt.Errorf("[apiserver.WithHTTPHandler] method error: [%w, empty string]", cerrors.ErrValueRequired) } + + if !slices.Contains(validHTTPMethods, method) { + return fmt.Errorf("[apiserver.WithHTTPHandler] method error: ['%s' is %w]", method, cerrors.ErrInvalid) + } + if path == "" { - return fmt.Errorf("api server WithHTTPHandler path error: [%w]", cerrors.ErrValueRequired) + return fmt.Errorf("[apiserver.WithHTTPHandler] path error: [%w, empty string]", cerrors.ErrValueRequired) } if handler == nil { - return fmt.Errorf("api server WithHTTPHandler http handler error: [%w]", cerrors.ErrValueRequired) + return fmt.Errorf("[apiserver.WithHTTPHandler] handler error: [%w, empty string]", cerrors.ErrValueRequired) } if server.Handlers == nil { diff --git a/internal/apiserver/apiserver_test.go b/internal/apiserver/apiserver_test.go index e44860d..763dd16 100644 --- a/internal/apiserver/apiserver_test.go +++ b/internal/apiserver/apiserver_test.go @@ -1,35 +1,20 @@ package apiserver_test import ( - "context" "log/slog" + "sync" "testing" "time" "github.com/devchain-network/cauldron/internal/apiserver" "github.com/devchain-network/cauldron/internal/cerrors" "github.com/devchain-network/cauldron/internal/kafkacp" + "github.com/devchain-network/cauldron/internal/slogger/mockslogger" "github.com/stretchr/testify/assert" "github.com/valyala/fasthttp" ) -type mockLogger struct{} - -func (h *mockLogger) Enabled(_ context.Context, _ slog.Level) bool { - return true -} - -func (h *mockLogger) Handle(_ context.Context, record slog.Record) error { - return nil -} - -func (h *mockLogger) WithAttrs(attrs []slog.Attr) slog.Handler { - return h -} - -func (h *mockLogger) WithGroup(name string) slog.Handler { - return h -} +var mockLog = slog.New(new(mockslogger.MockLogger)) func TestNew_NoParams(t *testing.T) { server, err := apiserver.New() @@ -48,7 +33,7 @@ func TestNew_NilLogger(t *testing.T) { } func TestNew_EmptyListenAddr(t *testing.T) { - logger := slog.New(new(mockLogger)) + logger := mockLog server, err := apiserver.New( apiserver.WithLogger(logger), @@ -60,7 +45,7 @@ func TestNew_EmptyListenAddr(t *testing.T) { } func TestNew_InvalidListenAddr(t *testing.T) { - logger := slog.New(new(mockLogger)) + logger := mockLog server, err := apiserver.New( apiserver.WithLogger(logger), @@ -72,7 +57,7 @@ func TestNew_InvalidListenAddr(t *testing.T) { } func TestNew_InvalidKafkaTopic(t *testing.T) { - logger := slog.New(new(mockLogger)) + logger := mockLog server, err := apiserver.New( apiserver.WithLogger(logger), @@ -84,7 +69,7 @@ func TestNew_InvalidKafkaTopic(t *testing.T) { } func TestNew_InvalidBrokers(t *testing.T) { - logger := slog.New(new(mockLogger)) + logger := mockLog var kafkaBrokers kafkacp.KafkaBrokers kafkaBrokers.AddFromString("foo") @@ -99,7 +84,7 @@ func TestNew_InvalidBrokers(t *testing.T) { } func TestNew_InvalidReadTimeout(t *testing.T) { - logger := slog.New(new(mockLogger)) + logger := mockLog server, err := apiserver.New( apiserver.WithLogger(logger), @@ -111,7 +96,7 @@ func TestNew_InvalidReadTimeout(t *testing.T) { } func TestNew_InvalidWriteTimeout(t *testing.T) { - logger := slog.New(new(mockLogger)) + logger := mockLog server, err := apiserver.New( apiserver.WithLogger(logger), @@ -123,7 +108,7 @@ func TestNew_InvalidWriteTimeout(t *testing.T) { } func TestNew_InvalidIdleTimeout(t *testing.T) { - logger := slog.New(new(mockLogger)) + logger := mockLog server, err := apiserver.New( apiserver.WithLogger(logger), @@ -135,7 +120,7 @@ func TestNew_InvalidIdleTimeout(t *testing.T) { } func TestNew_NilHTTPHandler(t *testing.T) { - logger := slog.New(new(mockLogger)) + logger := mockLog var kafkaBrokers kafkacp.KafkaBrokers kafkaBrokers.AddFromString("localhost:9194") @@ -155,7 +140,7 @@ func TestNew_NilHTTPHandler(t *testing.T) { } func TestNew_InvalidKafkaTopic_check(t *testing.T) { - logger := slog.New(new(mockLogger)) + logger := mockLog var kafkaBrokers kafkacp.KafkaBrokers kafkaBrokers.AddFromString("localhost:9194") @@ -179,7 +164,7 @@ func TestNew_InvalidKafkaTopic_check(t *testing.T) { } func TestNew_MissingArgsHTTPHandler_method(t *testing.T) { - logger := slog.New(new(mockLogger)) + logger := mockLog server, err := apiserver.New( apiserver.WithLogger(logger), @@ -199,8 +184,29 @@ func TestNew_MissingArgsHTTPHandler_method(t *testing.T) { assert.Nil(t, server) } +func TestNew_InvalidArgsHTTPHandler_method(t *testing.T) { + logger := mockLog + + server, err := apiserver.New( + apiserver.WithLogger(logger), + apiserver.WithListenAddr(":9000"), + apiserver.WithReadTimeout(5*time.Second), + apiserver.WithWriteTimeout(5*time.Second), + apiserver.WithIdleTimeout(5*time.Second), + apiserver.WithKafkaGitHubTopic(kafkacp.KafkaTopicIdentifierGitHub), + apiserver.WithHTTPHandler( + "FOO", + "/test", + func(ctx *fasthttp.RequestCtx) { ctx.SetStatusCode(fasthttp.StatusOK) }, + ), + ) + + assert.ErrorIs(t, err, cerrors.ErrInvalid) + assert.Nil(t, server) +} + func TestNew_MissingArgsHTTPHandler_path(t *testing.T) { - logger := slog.New(new(mockLogger)) + logger := mockLog server, err := apiserver.New( apiserver.WithLogger(logger), @@ -221,7 +227,7 @@ func TestNew_MissingArgsHTTPHandler_path(t *testing.T) { } func TestNew_MissingArgsHTTPHandler_handler(t *testing.T) { - logger := slog.New(new(mockLogger)) + logger := mockLog server, err := apiserver.New( apiserver.WithLogger(logger), @@ -242,7 +248,7 @@ func TestNew_MissingArgsHTTPHandler_handler(t *testing.T) { } func TestHttpRouter_NotFound(t *testing.T) { - logger := slog.New(new(mockLogger)) + logger := mockLog server, err := apiserver.New( apiserver.WithLogger(logger), @@ -265,7 +271,7 @@ func TestHttpRouter_NotFound(t *testing.T) { } func TestHttpRouter_MethodNotAllowed(t *testing.T) { - logger := slog.New(new(mockLogger)) + logger := mockLog server, err := apiserver.New( apiserver.WithLogger(logger), apiserver.WithKafkaGitHubTopic(kafkacp.KafkaTopicIdentifierGitHub), @@ -287,7 +293,7 @@ func TestHttpRouter_MethodNotAllowed(t *testing.T) { } func TestHttpRouter_ValidRouteAndMethod(t *testing.T) { - logger := slog.New(new(mockLogger)) + logger := mockLog server, err := apiserver.New( apiserver.WithLogger(logger), apiserver.WithKafkaGitHubTopic(kafkacp.KafkaTopicIdentifierGitHub), @@ -311,3 +317,37 @@ func TestHttpRouter_ValidRouteAndMethod(t *testing.T) { assert.Equal(t, fasthttp.StatusOK, ctx.Response.StatusCode()) assert.Equal(t, "success", string(ctx.Response.Body())) } + +func TestServer_Start(t *testing.T) { + logger := mockLog + server, err := apiserver.New( + apiserver.WithLogger(logger), + apiserver.WithKafkaGitHubTopic(kafkacp.KafkaTopicIdentifierGitHub), + apiserver.WithHTTPHandler( + fasthttp.MethodGet, + "/existing-path", + func(ctx *fasthttp.RequestCtx) { + ctx.SetStatusCode(fasthttp.StatusOK) + ctx.SetBody([]byte("success")) + }, + ), + ) + assert.NoError(t, err) + + var wg sync.WaitGroup + + wg.Add(1) + go func() { + defer wg.Done() + + err = server.Start() + assert.NoError(t, err) + }() + + time.Sleep(100 * time.Millisecond) + + err = server.Stop() + assert.NoError(t, err) + + wg.Wait() +} diff --git a/internal/slogger/mockslogger/mockslogger.go b/internal/slogger/mockslogger/mockslogger.go new file mode 100644 index 0000000..54e64d1 --- /dev/null +++ b/internal/slogger/mockslogger/mockslogger.go @@ -0,0 +1,25 @@ +//nolint:all +package mockslogger + +import ( + "context" + "log/slog" +) + +type MockLogger struct{} + +func (h *MockLogger) Enabled(_ context.Context, _ slog.Level) bool { + return true +} + +func (h *MockLogger) Handle(_ context.Context, _ slog.Record) error { + return nil +} + +func (h *MockLogger) WithAttrs(_ []slog.Attr) slog.Handler { + return h +} + +func (h *MockLogger) WithGroup(_ string) slog.Handler { + return h +} From 5422288001b0b7f85185fb249f8823c281bc9851 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?U=C4=9Fur=20=C3=96zy=C4=B1lmazel?= Date: Mon, 27 Jan 2025 11:56:19 +0300 Subject: [PATCH 09/13] refactor - wip --- cmd/server/main.go | 11 +-- internal/apiserver/apiserver.go | 84 ++++++++++++---- internal/apiserver/apiserver_test.go | 35 +++---- .../kafkaconsumer/kafkaconsumer_test.go | 62 +++++------- .../kafkaconsumergroup_test.go | 59 ++++-------- .../kafkacp/kafkaproducer/kafkaproducer.go | 75 +++++++++++---- .../kafkaproducer/kafkaproducer_test.go | 96 +++++++------------ 7 files changed, 217 insertions(+), 205 deletions(-) diff --git a/cmd/server/main.go b/cmd/server/main.go index 8f1bd27..6319f7c 100644 --- a/cmd/server/main.go +++ b/cmd/server/main.go @@ -55,12 +55,9 @@ func Run() error { return fmt.Errorf("logger instantiate error: [%w]", err) } - var kafkaBrokers kafkacp.KafkaBrokers - kafkaBrokers.AddFromString(*brokersList) - kafkaProducer, err := kafkaproducer.New( kafkaproducer.WithLogger(logger), - kafkaproducer.WithKafkaBrokers(kafkaBrokers), + kafkaproducer.WithKafkaBrokers(*brokersList), kafkaproducer.WithMaxRetries(*kafkaProducerMaxRetries), kafkaproducer.WithBackoff(*kafkaProducerBackoff), kafkaproducer.WithDialTimeout(*kafkaProducerDialTimeout), @@ -73,7 +70,7 @@ func Run() error { defer kafkaProducer.AsyncClose() - logger.Info("connected to kafka brokers", "addrs", kafkaBrokers) + logger.Info("connected to kafka brokers", "addrs", *brokersList) githubWebhookMessageQueue := make(chan *sarama.ProducerMessage, *kafkaProducerGithubWebhookMessageQueueSize) @@ -107,8 +104,8 @@ func Run() error { apiserver.WithReadTimeout(*serverReadTimeout), apiserver.WithWriteTimeout(*serverWriteTimeout), apiserver.WithIdleTimeout(*serverIdleTimeout), - apiserver.WithKafkaGitHubTopic(kafkacp.KafkaTopicIdentifierGitHub), - apiserver.WithKafkaBrokers(kafkaBrokers), + apiserver.WithKafkaGitHubTopic(kafkacp.KafkaTopicIdentifierGitHub.String()), + apiserver.WithKafkaBrokers(*brokersList), apiserver.WithHTTPHandler(fasthttp.MethodGet, "/healthz", healthCheckHandler.Handle), apiserver.WithHTTPHandler(fasthttp.MethodPost, "/v1/webhook/github", githubWebhookHandler.Handle), ) diff --git a/internal/apiserver/apiserver.go b/internal/apiserver/apiserver.go index 5055883..5017b92 100644 --- a/internal/apiserver/apiserver.go +++ b/internal/apiserver/apiserver.go @@ -98,7 +98,10 @@ func (s Server) checkRequired() error { } if !s.KafkaGitHubTopic.Valid() { - return fmt.Errorf("[apiserver.checkRequired] KafkaGitHubTopic error: [%w, false received]", cerrors.ErrInvalid) + return fmt.Errorf( + "[apiserver.checkRequired] KafkaGitHubTopic error: [%w, '%s' received]", + cerrors.ErrInvalid, s.KafkaGitHubTopic, + ) } return nil @@ -108,7 +111,10 @@ func (s Server) checkRequired() error { func WithLogger(l *slog.Logger) Option { return func(server *Server) error { if l == nil { - return fmt.Errorf("[apiserver.WithLogger] error: [%w, 'nil' received]", cerrors.ErrValueRequired) + return fmt.Errorf( + "[apiserver.WithLogger] error: [%w, 'nil' received]", + cerrors.ErrValueRequired, + ) } server.Logger = l @@ -120,18 +126,30 @@ func WithLogger(l *slog.Logger) Option { func WithHTTPHandler(method, path string, handler fasthttp.RequestHandler) Option { return func(server *Server) error { if method == "" { - return fmt.Errorf("[apiserver.WithHTTPHandler] method error: [%w, empty string]", cerrors.ErrValueRequired) + return fmt.Errorf( + "[apiserver.WithHTTPHandler] method error: [%w, empty string]", + cerrors.ErrValueRequired, + ) } if !slices.Contains(validHTTPMethods, method) { - return fmt.Errorf("[apiserver.WithHTTPHandler] method error: ['%s' is %w]", method, cerrors.ErrInvalid) + return fmt.Errorf( + "[apiserver.WithHTTPHandler] method error: ['%s' is %w]", + method, cerrors.ErrInvalid, + ) } if path == "" { - return fmt.Errorf("[apiserver.WithHTTPHandler] path error: [%w, empty string]", cerrors.ErrValueRequired) + return fmt.Errorf( + "[apiserver.WithHTTPHandler] path error: [%w, empty string]", + cerrors.ErrValueRequired, + ) } if handler == nil { - return fmt.Errorf("[apiserver.WithHTTPHandler] handler error: [%w, empty string]", cerrors.ErrValueRequired) + return fmt.Errorf( + "[apiserver.WithHTTPHandler] handler error: [%w, empty string]", + cerrors.ErrValueRequired, + ) } if server.Handlers == nil { @@ -147,11 +165,17 @@ func WithHTTPHandler(method, path string, handler fasthttp.RequestHandler) Optio func WithListenAddr(addr string) Option { return func(server *Server) error { if addr == "" { - return fmt.Errorf("api server WithListenAddr addr error: [%w]", cerrors.ErrValueRequired) + return fmt.Errorf( + "[apiserver.WithListenAddr] error: [%w, empty string]", + cerrors.ErrValueRequired, + ) } if _, err := getenv.ValidateTCPNetworkAddress(addr); err != nil { - return fmt.Errorf("api server WithListenAddr tcp addr error: [%w] [%w]", err, cerrors.ErrInvalid) + return fmt.Errorf( + "[apiserver.WithListenAddr] error: [%w] ['%s' %w]", + err, addr, cerrors.ErrInvalid, + ) } server.ListenAddr = addr @@ -164,7 +188,10 @@ func WithListenAddr(addr string) Option { func WithReadTimeout(d time.Duration) Option { return func(server *Server) error { if d < 0 { - return fmt.Errorf("api server WithReadTimeout error: [%w]", cerrors.ErrInvalid) + return fmt.Errorf( + "[apiserver.WithReadTimeout] error: [%w, '%s' received, must > 0]", + cerrors.ErrInvalid, d, + ) } server.ReadTimeout = d @@ -177,7 +204,10 @@ func WithReadTimeout(d time.Duration) Option { func WithWriteTimeout(d time.Duration) Option { return func(server *Server) error { if d < 0 { - return fmt.Errorf("api server WithWriteTimeout error: [%w]", cerrors.ErrInvalid) + return fmt.Errorf( + "[apiserver.WithWriteTimeout] error: [%w, '%s' received, must > 0]", + cerrors.ErrInvalid, d, + ) } server.WriteTimeout = d @@ -189,7 +219,10 @@ func WithWriteTimeout(d time.Duration) Option { func WithIdleTimeout(d time.Duration) Option { return func(server *Server) error { if d < 0 { - return fmt.Errorf("api server WithIdleTimeout error: [%w]", cerrors.ErrInvalid) + return fmt.Errorf( + "[apiserver.WithIdleTimeout] error: [%w, '%s' received, must > 0]", + cerrors.ErrInvalid, d, + ) } server.IdleTimeout = d @@ -198,25 +231,36 @@ func WithIdleTimeout(d time.Duration) Option { } // WithKafkaBrokers sets kafka brokers list. -func WithKafkaBrokers(brokers kafkacp.KafkaBrokers) Option { +func WithKafkaBrokers(brokers string) Option { return func(server *Server) error { - if !brokers.Valid() { - return fmt.Errorf("api server WithKafkaBrokers error: [%w]", cerrors.ErrInvalid) + var kafkaBrokers kafkacp.KafkaBrokers + kafkaBrokers.AddFromString(brokers) + + if !kafkaBrokers.Valid() { + return fmt.Errorf( + "[apiserver.WithKafkaBrokers] error: [%w, '%s' received]", + cerrors.ErrInvalid, brokers, + ) } - server.KafkaBrokers = brokers + server.KafkaBrokers = kafkaBrokers return nil } } // WithKafkaGitHubTopic sets kafka topic name for github webhooks. -func WithKafkaGitHubTopic(s kafkacp.KafkaTopicIdentifier) Option { +func WithKafkaGitHubTopic(s string) Option { return func(server *Server) error { - if !s.Valid() { - return fmt.Errorf("api server WithKafkaGitHubTopic error: [%w]", cerrors.ErrInvalid) + topic := kafkacp.KafkaTopicIdentifier(s) + + if !topic.Valid() { + return fmt.Errorf( + "[apiserver.WithKafkaGitHubTopic] error: [%w, '%s' received]", + cerrors.ErrInvalid, s, + ) } - server.KafkaGitHubTopic = s + server.KafkaGitHubTopic = topic return nil } @@ -236,7 +280,7 @@ func New(options ...Option) (*Server, error) { for _, option := range options { if err := option(server); err != nil { - return nil, fmt.Errorf("api server option error: [%w]", err) + return nil, err } } diff --git a/internal/apiserver/apiserver_test.go b/internal/apiserver/apiserver_test.go index 763dd16..5937007 100644 --- a/internal/apiserver/apiserver_test.go +++ b/internal/apiserver/apiserver_test.go @@ -61,7 +61,7 @@ func TestNew_InvalidKafkaTopic(t *testing.T) { server, err := apiserver.New( apiserver.WithLogger(logger), - apiserver.WithKafkaGitHubTopic(kafkacp.KafkaTopicIdentifier("foo")), + apiserver.WithKafkaGitHubTopic("foo"), ) assert.ErrorIs(t, err, cerrors.ErrInvalid) @@ -71,12 +71,9 @@ func TestNew_InvalidKafkaTopic(t *testing.T) { func TestNew_InvalidBrokers(t *testing.T) { logger := mockLog - var kafkaBrokers kafkacp.KafkaBrokers - kafkaBrokers.AddFromString("foo") - server, err := apiserver.New( apiserver.WithLogger(logger), - apiserver.WithKafkaBrokers(kafkaBrokers), + apiserver.WithKafkaBrokers("foo"), ) assert.ErrorIs(t, err, cerrors.ErrInvalid) @@ -122,17 +119,14 @@ func TestNew_InvalidIdleTimeout(t *testing.T) { func TestNew_NilHTTPHandler(t *testing.T) { logger := mockLog - var kafkaBrokers kafkacp.KafkaBrokers - kafkaBrokers.AddFromString("localhost:9194") - server, err := apiserver.New( apiserver.WithLogger(logger), apiserver.WithListenAddr(":9000"), apiserver.WithReadTimeout(5*time.Second), apiserver.WithWriteTimeout(5*time.Second), apiserver.WithIdleTimeout(5*time.Second), - apiserver.WithKafkaBrokers(kafkaBrokers), - apiserver.WithKafkaGitHubTopic(kafkacp.KafkaTopicIdentifierGitHub), + apiserver.WithKafkaBrokers("localhost:9194"), + apiserver.WithKafkaGitHubTopic(kafkacp.KafkaTopicIdentifierGitHub.String()), ) assert.ErrorIs(t, err, cerrors.ErrValueRequired) @@ -142,16 +136,13 @@ func TestNew_NilHTTPHandler(t *testing.T) { func TestNew_InvalidKafkaTopic_check(t *testing.T) { logger := mockLog - var kafkaBrokers kafkacp.KafkaBrokers - kafkaBrokers.AddFromString("localhost:9194") - server, err := apiserver.New( apiserver.WithLogger(logger), apiserver.WithListenAddr(":9000"), apiserver.WithReadTimeout(5*time.Second), apiserver.WithWriteTimeout(5*time.Second), apiserver.WithIdleTimeout(5*time.Second), - apiserver.WithKafkaBrokers(kafkaBrokers), + apiserver.WithKafkaBrokers("localhost:9194"), apiserver.WithHTTPHandler( fasthttp.MethodGet, "/test", @@ -172,7 +163,7 @@ func TestNew_MissingArgsHTTPHandler_method(t *testing.T) { apiserver.WithReadTimeout(5*time.Second), apiserver.WithWriteTimeout(5*time.Second), apiserver.WithIdleTimeout(5*time.Second), - apiserver.WithKafkaGitHubTopic(kafkacp.KafkaTopicIdentifierGitHub), + apiserver.WithKafkaGitHubTopic(kafkacp.KafkaTopicIdentifierGitHub.String()), apiserver.WithHTTPHandler( "", "/test", @@ -193,7 +184,7 @@ func TestNew_InvalidArgsHTTPHandler_method(t *testing.T) { apiserver.WithReadTimeout(5*time.Second), apiserver.WithWriteTimeout(5*time.Second), apiserver.WithIdleTimeout(5*time.Second), - apiserver.WithKafkaGitHubTopic(kafkacp.KafkaTopicIdentifierGitHub), + apiserver.WithKafkaGitHubTopic(kafkacp.KafkaTopicIdentifierGitHub.String()), apiserver.WithHTTPHandler( "FOO", "/test", @@ -214,7 +205,7 @@ func TestNew_MissingArgsHTTPHandler_path(t *testing.T) { apiserver.WithReadTimeout(5*time.Second), apiserver.WithWriteTimeout(5*time.Second), apiserver.WithIdleTimeout(5*time.Second), - apiserver.WithKafkaGitHubTopic(kafkacp.KafkaTopicIdentifierGitHub), + apiserver.WithKafkaGitHubTopic(kafkacp.KafkaTopicIdentifierGitHub.String()), apiserver.WithHTTPHandler( fasthttp.MethodGet, "", @@ -235,7 +226,7 @@ func TestNew_MissingArgsHTTPHandler_handler(t *testing.T) { apiserver.WithReadTimeout(5*time.Second), apiserver.WithWriteTimeout(5*time.Second), apiserver.WithIdleTimeout(5*time.Second), - apiserver.WithKafkaGitHubTopic(kafkacp.KafkaTopicIdentifierGitHub), + apiserver.WithKafkaGitHubTopic(kafkacp.KafkaTopicIdentifierGitHub.String()), apiserver.WithHTTPHandler( fasthttp.MethodGet, "/test", @@ -252,7 +243,7 @@ func TestHttpRouter_NotFound(t *testing.T) { server, err := apiserver.New( apiserver.WithLogger(logger), - apiserver.WithKafkaGitHubTopic(kafkacp.KafkaTopicIdentifierGitHub), + apiserver.WithKafkaGitHubTopic(kafkacp.KafkaTopicIdentifierGitHub.String()), apiserver.WithHTTPHandler( fasthttp.MethodGet, "/existing-path", @@ -274,7 +265,7 @@ func TestHttpRouter_MethodNotAllowed(t *testing.T) { logger := mockLog server, err := apiserver.New( apiserver.WithLogger(logger), - apiserver.WithKafkaGitHubTopic(kafkacp.KafkaTopicIdentifierGitHub), + apiserver.WithKafkaGitHubTopic(kafkacp.KafkaTopicIdentifierGitHub.String()), apiserver.WithHTTPHandler( fasthttp.MethodGet, "/existing-path", @@ -296,7 +287,7 @@ func TestHttpRouter_ValidRouteAndMethod(t *testing.T) { logger := mockLog server, err := apiserver.New( apiserver.WithLogger(logger), - apiserver.WithKafkaGitHubTopic(kafkacp.KafkaTopicIdentifierGitHub), + apiserver.WithKafkaGitHubTopic(kafkacp.KafkaTopicIdentifierGitHub.String()), apiserver.WithHTTPHandler( fasthttp.MethodGet, "/existing-path", @@ -322,7 +313,7 @@ func TestServer_Start(t *testing.T) { logger := mockLog server, err := apiserver.New( apiserver.WithLogger(logger), - apiserver.WithKafkaGitHubTopic(kafkacp.KafkaTopicIdentifierGitHub), + apiserver.WithKafkaGitHubTopic(kafkacp.KafkaTopicIdentifierGitHub.String()), apiserver.WithHTTPHandler( fasthttp.MethodGet, "/existing-path", diff --git a/internal/kafkacp/kafkaconsumer/kafkaconsumer_test.go b/internal/kafkacp/kafkaconsumer/kafkaconsumer_test.go index f867367..c7568a9 100644 --- a/internal/kafkacp/kafkaconsumer/kafkaconsumer_test.go +++ b/internal/kafkacp/kafkaconsumer/kafkaconsumer_test.go @@ -14,6 +14,7 @@ import ( "github.com/devchain-network/cauldron/internal/cerrors" "github.com/devchain-network/cauldron/internal/kafkacp" "github.com/devchain-network/cauldron/internal/kafkacp/kafkaconsumer" + "github.com/devchain-network/cauldron/internal/slogger/mockslogger" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/mock" ) @@ -27,27 +28,12 @@ func (m *mockConsumerFactory) NewConsumer(brokers []string, config *sarama.Confi return args.Get(0).(sarama.Consumer), args.Error(1) } -type mockLogger struct{} - -func (h *mockLogger) Enabled(_ context.Context, _ slog.Level) bool { - return true -} - -func (h *mockLogger) Handle(_ context.Context, record slog.Record) error { - return nil -} - -func (h *mockLogger) WithAttrs(attrs []slog.Attr) slog.Handler { - return h -} - -func (h *mockLogger) WithGroup(name string) slog.Handler { - return h -} - -var mockProcessMessage = func(ctx context.Context, msg *sarama.ConsumerMessage) error { - return nil -} +var ( + mockLog = slog.New(new(mockslogger.MockLogger)) + mockProcessMessage = func(ctx context.Context, msg *sarama.ConsumerMessage) error { + return nil + } +) func TestNew_MissingRequiredFields(t *testing.T) { consumer, err := kafkaconsumer.New() @@ -66,7 +52,7 @@ func TestNew_NilLogger(t *testing.T) { } func TestNew_NoProcessMessageFunc(t *testing.T) { - logger := slog.New(new(mockLogger)) + logger := mockLog consumer, err := kafkaconsumer.New( kafkaconsumer.WithLogger(logger), @@ -77,7 +63,7 @@ func TestNew_NoProcessMessageFunc(t *testing.T) { } func TestNew_NilProcessMessageFunc(t *testing.T) { - logger := slog.New(new(mockLogger)) + logger := mockLog consumer, err := kafkaconsumer.New( kafkaconsumer.WithLogger(logger), @@ -89,7 +75,7 @@ func TestNew_NilProcessMessageFunc(t *testing.T) { } func TestNew_EmptyTopic(t *testing.T) { - logger := slog.New(new(mockLogger)) + logger := mockLog consumer, err := kafkaconsumer.New( kafkaconsumer.WithLogger(logger), @@ -101,7 +87,7 @@ func TestNew_EmptyTopic(t *testing.T) { } func TestNew_InvalidTopic(t *testing.T) { - logger := slog.New(new(mockLogger)) + logger := mockLog consumer, err := kafkaconsumer.New( kafkaconsumer.WithLogger(logger), @@ -114,7 +100,7 @@ func TestNew_InvalidTopic(t *testing.T) { } func TestNew_InvalidPartition(t *testing.T) { - logger := slog.New(new(mockLogger)) + logger := mockLog consumer, err := kafkaconsumer.New( kafkaconsumer.WithLogger(logger), @@ -128,7 +114,7 @@ func TestNew_InvalidPartition(t *testing.T) { } func TestNew_InvalidBrokers(t *testing.T) { - logger := slog.New(new(mockLogger)) + logger := mockLog consumer, err := kafkaconsumer.New( kafkaconsumer.WithLogger(logger), @@ -142,7 +128,7 @@ func TestNew_InvalidBrokers(t *testing.T) { } func TestNew_InvalidDialTimeout(t *testing.T) { - logger := slog.New(new(mockLogger)) + logger := mockLog consumer, err := kafkaconsumer.New( kafkaconsumer.WithLogger(logger), @@ -157,7 +143,7 @@ func TestNew_InvalidDialTimeout(t *testing.T) { } func TestNew_InvalidReadTimeout(t *testing.T) { - logger := slog.New(new(mockLogger)) + logger := mockLog consumer, err := kafkaconsumer.New( kafkaconsumer.WithLogger(logger), @@ -172,7 +158,7 @@ func TestNew_InvalidReadTimeout(t *testing.T) { } func TestNew_InvalidWriteTimeout(t *testing.T) { - logger := slog.New(new(mockLogger)) + logger := mockLog consumer, err := kafkaconsumer.New( kafkaconsumer.WithLogger(logger), @@ -187,7 +173,7 @@ func TestNew_InvalidWriteTimeout(t *testing.T) { } func TestNew_ZeroBackoff(t *testing.T) { - logger := slog.New(new(mockLogger)) + logger := mockLog consumer, err := kafkaconsumer.New( kafkaconsumer.WithLogger(logger), @@ -202,7 +188,7 @@ func TestNew_ZeroBackoff(t *testing.T) { } func TestNew_InvalidBackoff(t *testing.T) { - logger := slog.New(new(mockLogger)) + logger := mockLog consumer, err := kafkaconsumer.New( kafkaconsumer.WithLogger(logger), @@ -217,7 +203,7 @@ func TestNew_InvalidBackoff(t *testing.T) { } func TestNew_InvalidMaxRetries(t *testing.T) { - logger := slog.New(new(mockLogger)) + logger := mockLog consumer, err := kafkaconsumer.New( kafkaconsumer.WithLogger(logger), @@ -232,7 +218,7 @@ func TestNew_InvalidMaxRetries(t *testing.T) { } func TestNew_NilSaramaConsumerFactoryFunc(t *testing.T) { - logger := slog.New(new(mockLogger)) + logger := mockLog consumer, err := kafkaconsumer.New( kafkaconsumer.WithLogger(logger), @@ -247,7 +233,7 @@ func TestNew_NilSaramaConsumerFactoryFunc(t *testing.T) { } func TestNew_WithSaramaConsumerFactoryFunc_Error(t *testing.T) { - logger := slog.New(new(mockLogger)) + logger := mockLog mockConfig := mocks.NewTestConfig() mockSarama := mocks.NewConsumer(t, mockConfig) @@ -272,7 +258,7 @@ func TestNew_WithSaramaConsumerFactoryFunc_Error(t *testing.T) { } func TestNew_WithSaramaConsumerFactoryFunc_Success(t *testing.T) { - logger := slog.New(new(mockLogger)) + logger := mockLog mockConfig := mocks.NewTestConfig() mockSarama := mocks.NewConsumer(t, mockConfig) @@ -302,7 +288,7 @@ func TestNew_WithSaramaConsumerFactoryFunc_Success(t *testing.T) { } func TestConsumer_Consume_Success(t *testing.T) { - logger := slog.New(new(mockLogger)) + logger := mockLog mockConfig := mocks.NewTestConfig() mockSarama := mocks.NewConsumer(t, mockConfig) @@ -348,7 +334,7 @@ func TestConsumer_Consume_Success(t *testing.T) { } func TestConsumer_Consume_PartitionConsumeError(t *testing.T) { - logger := slog.New(new(mockLogger)) + logger := mockLog mockConfig := mocks.NewTestConfig() mockSarama := mocks.NewConsumer(t, mockConfig) diff --git a/internal/kafkacp/kafkaconsumergroup/kafkaconsumergroup_test.go b/internal/kafkacp/kafkaconsumergroup/kafkaconsumergroup_test.go index f4426df..6af610a 100644 --- a/internal/kafkacp/kafkaconsumergroup/kafkaconsumergroup_test.go +++ b/internal/kafkacp/kafkaconsumergroup/kafkaconsumergroup_test.go @@ -14,6 +14,7 @@ import ( "github.com/devchain-network/cauldron/internal/cerrors" "github.com/devchain-network/cauldron/internal/kafkacp" "github.com/devchain-network/cauldron/internal/kafkacp/kafkaconsumergroup" + "github.com/devchain-network/cauldron/internal/slogger/mockslogger" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/mock" ) @@ -21,25 +22,7 @@ import ( var mockProcessMessage = func(ctx context.Context, msg *sarama.ConsumerMessage) error { return nil } - -// mockLogger ----------------------------------------------------------------- -type mockLogger struct{} - -func (h *mockLogger) Enabled(_ context.Context, _ slog.Level) bool { - return true -} - -func (h *mockLogger) Handle(_ context.Context, record slog.Record) error { - return nil -} - -func (h *mockLogger) WithAttrs(attrs []slog.Attr) slog.Handler { - return h -} - -func (h *mockLogger) WithGroup(name string) slog.Handler { - return h -} +var mockLog = slog.New(new(mockslogger.MockLogger)) // mockConsumerGroup ---------------------------------------------------------- type mockConsumerGroup struct { @@ -108,7 +91,7 @@ func TestNew_NilLogger(t *testing.T) { } func TestNew_NoProcessMessageFunc(t *testing.T) { - logger := slog.New(new(mockLogger)) + logger := mockLog consumer, err := kafkaconsumergroup.New( kafkaconsumergroup.WithLogger(logger), @@ -119,7 +102,7 @@ func TestNew_NoProcessMessageFunc(t *testing.T) { } func TestNew_NilProcessMessageFunc(t *testing.T) { - logger := slog.New(new(mockLogger)) + logger := mockLog consumer, err := kafkaconsumergroup.New( kafkaconsumergroup.WithLogger(logger), @@ -131,7 +114,7 @@ func TestNew_NilProcessMessageFunc(t *testing.T) { } func TestNew_NoGroupName(t *testing.T) { - logger := slog.New(new(mockLogger)) + logger := mockLog consumer, err := kafkaconsumergroup.New( kafkaconsumergroup.WithLogger(logger), @@ -143,7 +126,7 @@ func TestNew_NoGroupName(t *testing.T) { } func TestNew_EmptyGroupName(t *testing.T) { - logger := slog.New(new(mockLogger)) + logger := mockLog consumer, err := kafkaconsumergroup.New( kafkaconsumergroup.WithLogger(logger), @@ -156,7 +139,7 @@ func TestNew_EmptyGroupName(t *testing.T) { } func TestNew_NoTopic(t *testing.T) { - logger := slog.New(new(mockLogger)) + logger := mockLog consumer, err := kafkaconsumergroup.New( kafkaconsumergroup.WithLogger(logger), @@ -169,7 +152,7 @@ func TestNew_NoTopic(t *testing.T) { } func TestNew_InvalidTopic(t *testing.T) { - logger := slog.New(new(mockLogger)) + logger := mockLog consumer, err := kafkaconsumergroup.New( kafkaconsumergroup.WithLogger(logger), @@ -183,7 +166,7 @@ func TestNew_InvalidTopic(t *testing.T) { } func TestNew_InvalidBrokers(t *testing.T) { - logger := slog.New(new(mockLogger)) + logger := mockLog consumer, err := kafkaconsumergroup.New( kafkaconsumergroup.WithLogger(logger), @@ -198,7 +181,7 @@ func TestNew_InvalidBrokers(t *testing.T) { } func TestNew_InvalidDialTimeout(t *testing.T) { - logger := slog.New(new(mockLogger)) + logger := mockLog consumer, err := kafkaconsumergroup.New( kafkaconsumergroup.WithLogger(logger), @@ -214,7 +197,7 @@ func TestNew_InvalidDialTimeout(t *testing.T) { } func TestNew_InvalidReadTimeout(t *testing.T) { - logger := slog.New(new(mockLogger)) + logger := mockLog consumer, err := kafkaconsumergroup.New( kafkaconsumergroup.WithLogger(logger), @@ -230,7 +213,7 @@ func TestNew_InvalidReadTimeout(t *testing.T) { } func TestNew_InvalidWriteTimeout(t *testing.T) { - logger := slog.New(new(mockLogger)) + logger := mockLog consumer, err := kafkaconsumergroup.New( kafkaconsumergroup.WithLogger(logger), @@ -246,7 +229,7 @@ func TestNew_InvalidWriteTimeout(t *testing.T) { } func TestNew_ZeroBackoff(t *testing.T) { - logger := slog.New(new(mockLogger)) + logger := mockLog consumer, err := kafkaconsumergroup.New( kafkaconsumergroup.WithLogger(logger), @@ -262,7 +245,7 @@ func TestNew_ZeroBackoff(t *testing.T) { } func TestNew_InvalidBackoff(t *testing.T) { - logger := slog.New(new(mockLogger)) + logger := mockLog consumer, err := kafkaconsumergroup.New( kafkaconsumergroup.WithLogger(logger), @@ -278,7 +261,7 @@ func TestNew_InvalidBackoff(t *testing.T) { } func TestNew_InvalidMaxRetries(t *testing.T) { - logger := slog.New(new(mockLogger)) + logger := mockLog consumer, err := kafkaconsumergroup.New( kafkaconsumergroup.WithLogger(logger), @@ -294,7 +277,7 @@ func TestNew_InvalidMaxRetries(t *testing.T) { } func TestNew_InvalidKafkaVersion(t *testing.T) { - logger := slog.New(new(mockLogger)) + logger := mockLog consumer, err := kafkaconsumergroup.New( kafkaconsumergroup.WithLogger(logger), @@ -310,7 +293,7 @@ func TestNew_InvalidKafkaVersion(t *testing.T) { } func TestNew_NilSaramaConsumerGroupHandler(t *testing.T) { - logger := slog.New(new(mockLogger)) + logger := mockLog consumer, err := kafkaconsumergroup.New( kafkaconsumergroup.WithLogger(logger), @@ -326,7 +309,7 @@ func TestNew_NilSaramaConsumerGroupHandler(t *testing.T) { } func TestNew_NilSaramaConsumerGroupFactoryFunc(t *testing.T) { - logger := slog.New(new(mockLogger)) + logger := mockLog consumer, err := kafkaconsumergroup.New( kafkaconsumergroup.WithLogger(logger), @@ -348,7 +331,7 @@ func TestNew_NilSaramaConsumerGroupFactoryFunc(t *testing.T) { } func TestNew_SaramaConsumerGroupFactoryFunc_Error(t *testing.T) { - logger := slog.New(new(mockLogger)) + logger := mockLog consumerGroup := &mockConsumerGroup{} consumerGroupFactory := &mockConsumerGroupFactory{} @@ -376,7 +359,7 @@ func TestNew_SaramaConsumerGroupFactoryFunc_Error(t *testing.T) { } func TestNew_SaramaConsumerGroupFactoryFunc_Success(t *testing.T) { - logger := slog.New(new(mockLogger)) + logger := mockLog consumerGroup := &mockConsumerGroup{} consumerGroupFactory := &mockConsumerGroupFactory{} @@ -403,7 +386,7 @@ func TestNew_SaramaConsumerGroupFactoryFunc_Success(t *testing.T) { } func TestNew_Consume_Success(t *testing.T) { - logger := slog.New(new(mockLogger)) + logger := mockLog consumerGroup := &mockConsumerGroup{} consumerGroup.On("Errors").Return((<-chan error)(make(chan error))) diff --git a/internal/kafkacp/kafkaproducer/kafkaproducer.go b/internal/kafkacp/kafkaproducer/kafkaproducer.go index 754fae6..d4a5c2f 100644 --- a/internal/kafkacp/kafkaproducer/kafkaproducer.go +++ b/internal/kafkacp/kafkaproducer/kafkaproducer.go @@ -37,7 +37,10 @@ type SaramaProducerFactoryFunc func([]string, *sarama.Config) (sarama.AsyncProdu func (p Producer) checkRequired() error { if p.Logger == nil { - return fmt.Errorf("kafka producer check required, Logger error: [%w]", cerrors.ErrValueRequired) + return fmt.Errorf( + "[kafkaproducer.checkRequired] Logger error: [%w, 'nil' received]", + cerrors.ErrValueRequired, + ) } return nil @@ -50,7 +53,10 @@ type Option func(*Producer) error func WithLogger(l *slog.Logger) Option { return func(p *Producer) error { if l == nil { - return fmt.Errorf("kafka producer WithLogger error: [%w]", cerrors.ErrValueRequired) + return fmt.Errorf( + "[kafkaproducer.WithLogger] error: [%w, 'nil' received]", + cerrors.ErrValueRequired, + ) } p.Logger = l @@ -59,13 +65,19 @@ func WithLogger(l *slog.Logger) Option { } // WithKafkaBrokers sets kafka brokers list. -func WithKafkaBrokers(brokers kafkacp.KafkaBrokers) Option { +func WithKafkaBrokers(brokers string) Option { return func(p *Producer) error { - if !brokers.Valid() { - return fmt.Errorf("kafka producer WithKafkaBrokers error: [%w]", cerrors.ErrInvalid) + var kafkaBrokers kafkacp.KafkaBrokers + kafkaBrokers.AddFromString(brokers) + + if !kafkaBrokers.Valid() { + return fmt.Errorf( + "[kafkaproducer.WithKafkaBrokers] error: [%w, '%s' received]", + cerrors.ErrInvalid, brokers, + ) } - p.KafkaBrokers = brokers + p.KafkaBrokers = kafkaBrokers return nil } @@ -75,7 +87,10 @@ func WithKafkaBrokers(brokers kafkacp.KafkaBrokers) Option { func WithMaxRetries(i int) Option { return func(p *Producer) error { if i > math.MaxUint8 || i < 0 { - return fmt.Errorf("kafka producer WithMaxRetries error: [%w]", cerrors.ErrInvalid) + return fmt.Errorf( + "[kafkaproducer.WithMaxRetries] error: [%w, '%d' received, must < %d or > 0]", + cerrors.ErrInvalid, i, math.MaxUint8, + ) } p.MaxRetries = uint8(i) @@ -87,8 +102,19 @@ func WithMaxRetries(i int) Option { func WithBackoff(d time.Duration) Option { return func(p *Producer) error { if d == 0 { - return fmt.Errorf("kafka producer WithBackoff error: [%w]", cerrors.ErrValueRequired) + return fmt.Errorf( + "[kafkaproducer.WithBackoff] error: [%w, '%s' received, 0 is not allowed]", + cerrors.ErrValueRequired, d, + ) + } + + if d < 0 || d > time.Minute { + return fmt.Errorf( + "[kafkaproducer.WithBackoff] error: [%w, '%s' received, must > 0 or < minute]", + cerrors.ErrInvalid, d, + ) } + p.Backoff = d return nil @@ -99,7 +125,10 @@ func WithBackoff(d time.Duration) Option { func WithDialTimeout(d time.Duration) Option { return func(p *Producer) error { if d < 0 { - return fmt.Errorf("kafka producer WithDialTimeout error: [%w]", cerrors.ErrInvalid) + return fmt.Errorf( + "[kafkaproducer.WithDialTimeout] error: [%w, '%s' received, must > 0]", + cerrors.ErrInvalid, d, + ) } p.DialTimeout = d @@ -111,7 +140,10 @@ func WithDialTimeout(d time.Duration) Option { func WithReadTimeout(d time.Duration) Option { return func(p *Producer) error { if d < 0 { - return fmt.Errorf("kafka producer WithReadTimeout error: [%w]", cerrors.ErrInvalid) + return fmt.Errorf( + "[kafkaproducer.WithReadTimeout] error: [%w, '%s' received, must > 0]", + cerrors.ErrInvalid, d, + ) } p.ReadTimeout = d @@ -123,7 +155,10 @@ func WithReadTimeout(d time.Duration) Option { func WithWriteTimeout(d time.Duration) Option { return func(p *Producer) error { if d < 0 { - return fmt.Errorf("kafka producer WithWriteTimeout error: [%w]", cerrors.ErrInvalid) + return fmt.Errorf( + "[kafkaproducer.WithWriteTimeout] error: [%w, '%s' received, must > 0]", + cerrors.ErrInvalid, d, + ) } p.WriteTimeout = d @@ -132,12 +167,15 @@ func WithWriteTimeout(d time.Duration) Option { } // WithSaramaProducerFactoryFunc sets a custom factory function for creating Sarama producers. -func WithSaramaProducerFactoryFunc(factory SaramaProducerFactoryFunc) Option { +func WithSaramaProducerFactoryFunc(fn SaramaProducerFactoryFunc) Option { return func(p *Producer) error { - if factory == nil { - return fmt.Errorf("kafka producer WithSaramaProducerFactoryFunc error: [%w]", cerrors.ErrValueRequired) + if fn == nil { + return fmt.Errorf( + "[kafkaproducer.WithSaramaProducerFactoryFunc] error: [%w, 'nil' received]", + cerrors.ErrValueRequired, + ) } - p.SaramaProducerFactoryFunc = factory + p.SaramaProducerFactoryFunc = fn return nil } @@ -160,7 +198,7 @@ func New(options ...Option) (sarama.AsyncProducer, error) { for _, option := range options { if err := option(producer); err != nil { - return nil, fmt.Errorf("kafka producer option error: [%w]", err) + return nil, err } } @@ -199,7 +237,10 @@ func New(options ...Option) (sarama.AsyncProducer, error) { backoff *= 2 } if kafkaProducerErr != nil { - return nil, fmt.Errorf("kafka producer sarama.NewAsyncProducer error: [%w]", kafkaProducerErr) + return nil, fmt.Errorf( + "[kafkaproducer.New][SaramaProducerFactoryFunc] error: [%w]", + kafkaProducerErr, + ) } return kafkaProducer, nil diff --git a/internal/kafkacp/kafkaproducer/kafkaproducer_test.go b/internal/kafkacp/kafkaproducer/kafkaproducer_test.go index dcb1918..730c32c 100644 --- a/internal/kafkacp/kafkaproducer/kafkaproducer_test.go +++ b/internal/kafkacp/kafkaproducer/kafkaproducer_test.go @@ -1,7 +1,6 @@ package kafkaproducer_test import ( - "context" "log/slog" "testing" "time" @@ -11,27 +10,12 @@ import ( "github.com/devchain-network/cauldron/internal/cerrors" "github.com/devchain-network/cauldron/internal/kafkacp" "github.com/devchain-network/cauldron/internal/kafkacp/kafkaproducer" + "github.com/devchain-network/cauldron/internal/slogger/mockslogger" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/mock" ) -type mockLogger struct{} - -func (h *mockLogger) Enabled(_ context.Context, _ slog.Level) bool { - return true -} - -func (h *mockLogger) Handle(_ context.Context, record slog.Record) error { - return nil -} - -func (h *mockLogger) WithAttrs(attrs []slog.Attr) slog.Handler { - return h -} - -func (h *mockLogger) WithGroup(name string) slog.Handler { - return h -} +var mockLog = slog.New(new(mockslogger.MockLogger)) type mockProducerFactory struct { mock.Mock @@ -48,21 +32,31 @@ func TestNew_MissingRequiredFields(t *testing.T) { assert.Nil(t, producer) } -func TestNew_InvalidKafkaBrokers(t *testing.T) { +func TestNew_NilLogger(t *testing.T) { var kafkaBrokers kafkacp.KafkaBrokers - kafkaBrokers.AddFromString("invalid") + kafkaBrokers.AddFromString("127.0.0.1:9094") + + producer, err := kafkaproducer.New( + kafkaproducer.WithLogger(nil), + ) + assert.ErrorIs(t, err, cerrors.ErrValueRequired) + assert.Nil(t, producer) +} + +func TestNew_InvalidKafkaBrokers(t *testing.T) { + logger := mockLog - logger := slog.New(new(mockLogger)) producer, err := kafkaproducer.New( kafkaproducer.WithLogger(logger), - kafkaproducer.WithKafkaBrokers(kafkaBrokers), + kafkaproducer.WithKafkaBrokers("invalid"), ) assert.ErrorIs(t, err, cerrors.ErrInvalid) assert.Nil(t, producer) } func TestNew_InvalidMaxRetries(t *testing.T) { - logger := slog.New(new(mockLogger)) + logger := mockLog + producer, err := kafkaproducer.New( kafkaproducer.WithLogger(logger), kafkaproducer.WithMaxRetries(300), @@ -72,7 +66,8 @@ func TestNew_InvalidMaxRetries(t *testing.T) { } func TestNew_InvalidBackoff(t *testing.T) { - logger := slog.New(new(mockLogger)) + logger := mockLog + producer, err := kafkaproducer.New( kafkaproducer.WithLogger(logger), kafkaproducer.WithBackoff(0), @@ -82,7 +77,8 @@ func TestNew_InvalidBackoff(t *testing.T) { } func TestNew_InvalidDialTimeout(t *testing.T) { - logger := slog.New(new(mockLogger)) + logger := mockLog + producer, err := kafkaproducer.New( kafkaproducer.WithLogger(logger), kafkaproducer.WithDialTimeout(-1*time.Second), @@ -92,7 +88,8 @@ func TestNew_InvalidDialTimeout(t *testing.T) { } func TestNew_InvalidReadTimeout(t *testing.T) { - logger := slog.New(new(mockLogger)) + logger := mockLog + producer, err := kafkaproducer.New( kafkaproducer.WithLogger(logger), kafkaproducer.WithReadTimeout(-1*time.Second), @@ -102,7 +99,8 @@ func TestNew_InvalidReadTimeout(t *testing.T) { } func TestNew_InvalidWriteTimeout(t *testing.T) { - logger := slog.New(new(mockLogger)) + logger := mockLog + producer, err := kafkaproducer.New( kafkaproducer.WithLogger(logger), kafkaproducer.WithWriteTimeout(-1*time.Second), @@ -112,7 +110,8 @@ func TestNew_InvalidWriteTimeout(t *testing.T) { } func TestNew_WithNilProducerFactoryFunc(t *testing.T) { - logger := slog.New(new(mockLogger)) + logger := mockLog + producer, err := kafkaproducer.New( kafkaproducer.WithLogger(logger), kafkaproducer.WithSaramaProducerFactoryFunc(nil), @@ -122,7 +121,7 @@ func TestNew_WithNilProducerFactoryFunc(t *testing.T) { } func TestNew_WithSaramaProducerFactoryFunc_Error(t *testing.T) { - logger := slog.New(new(mockLogger)) + logger := mockLog mockConfig := mocks.NewTestConfig() mockProducer := mocks.NewAsyncProducer(t, mockConfig) @@ -145,41 +144,8 @@ func TestNew_WithSaramaProducerFactoryFunc_Error(t *testing.T) { mockFactory.AssertExpectations(t) } -func TestNew_NoLogger(t *testing.T) { - var kafkaBrokers kafkacp.KafkaBrokers - kafkaBrokers.AddFromString("127.0.0.1:9094") - - producer, err := kafkaproducer.New( - kafkaproducer.WithKafkaBrokers(kafkaBrokers), - kafkaproducer.WithMaxRetries(2), - kafkaproducer.WithBackoff(time.Second), - kafkaproducer.WithDialTimeout(5*time.Second), - kafkaproducer.WithReadTimeout(5*time.Second), - kafkaproducer.WithWriteTimeout(5*time.Second), - ) - assert.ErrorIs(t, err, cerrors.ErrValueRequired) - assert.Nil(t, producer) -} - -func TestNew_NilLogger(t *testing.T) { - var kafkaBrokers kafkacp.KafkaBrokers - kafkaBrokers.AddFromString("127.0.0.1:9094") - - producer, err := kafkaproducer.New( - kafkaproducer.WithLogger(nil), - kafkaproducer.WithKafkaBrokers(kafkaBrokers), - kafkaproducer.WithMaxRetries(2), - kafkaproducer.WithBackoff(time.Second), - kafkaproducer.WithDialTimeout(5*time.Second), - kafkaproducer.WithReadTimeout(5*time.Second), - kafkaproducer.WithWriteTimeout(5*time.Second), - ) - assert.ErrorIs(t, err, cerrors.ErrValueRequired) - assert.Nil(t, producer) -} - func TestNew_Success(t *testing.T) { - logger := slog.New(new(mockLogger)) + logger := mockLog mockConfig := mocks.NewTestConfig() mockProducer := mocks.NewAsyncProducer(t, mockConfig) @@ -192,9 +158,13 @@ func TestNew_Success(t *testing.T) { producer, err := kafkaproducer.New( kafkaproducer.WithLogger(logger), + kafkaproducer.WithKafkaBrokers("127.0.0.1:9094"), kafkaproducer.WithSaramaProducerFactoryFunc(mockFactory.NewAsyncProducer), kafkaproducer.WithMaxRetries(3), kafkaproducer.WithBackoff(100*time.Millisecond), + kafkaproducer.WithDialTimeout(5*time.Second), + kafkaproducer.WithReadTimeout(5*time.Second), + kafkaproducer.WithWriteTimeout(5*time.Second), ) assert.NoError(t, err) From ce129df965f42accdd5e878b43f51e86ace10f99 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?U=C4=9Fur=20=C3=96zy=C4=B1lmazel?= Date: Mon, 27 Jan 2025 13:06:08 +0300 Subject: [PATCH 10/13] refactor - wip --- internal/apiserver/apiserver_test.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/internal/apiserver/apiserver_test.go b/internal/apiserver/apiserver_test.go index 5937007..3b14750 100644 --- a/internal/apiserver/apiserver_test.go +++ b/internal/apiserver/apiserver_test.go @@ -335,7 +335,7 @@ func TestServer_Start(t *testing.T) { assert.NoError(t, err) }() - time.Sleep(100 * time.Millisecond) + time.Sleep(500 * time.Millisecond) err = server.Stop() assert.NoError(t, err) From 2a675050550f5ae13a2c5dca22b37f3adbb734ca Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?U=C4=9Fur=20=C3=96zy=C4=B1lmazel?= Date: Mon, 27 Jan 2025 15:30:23 +0300 Subject: [PATCH 11/13] refactor - wip --- internal/apiserver/apiserver.go | 24 +++++++++--------- internal/kafkacp/kafkacp.go | 12 +++++---- internal/slogger/slogger.go | 44 ++++++++++++++++++++++----------- internal/storage/storage.go | 12 +++++---- 4 files changed, 56 insertions(+), 36 deletions(-) diff --git a/internal/apiserver/apiserver.go b/internal/apiserver/apiserver.go index 5017b92..6f7cfce 100644 --- a/internal/apiserver/apiserver.go +++ b/internal/apiserver/apiserver.go @@ -54,16 +54,18 @@ type Server struct { IdleTimeout time.Duration } -var validHTTPMethods = []string{ - fasthttp.MethodGet, - fasthttp.MethodHead, - fasthttp.MethodPost, - fasthttp.MethodPut, - fasthttp.MethodPatch, - fasthttp.MethodDelete, - fasthttp.MethodConnect, - fasthttp.MethodOptions, - fasthttp.MethodTrace, +func validHTTPMethods() []string { + return []string{ + fasthttp.MethodGet, + fasthttp.MethodHead, + fasthttp.MethodPost, + fasthttp.MethodPut, + fasthttp.MethodPatch, + fasthttp.MethodDelete, + fasthttp.MethodConnect, + fasthttp.MethodOptions, + fasthttp.MethodTrace, + } } // Start starts the fast http server. @@ -132,7 +134,7 @@ func WithHTTPHandler(method, path string, handler fasthttp.RequestHandler) Optio ) } - if !slices.Contains(validHTTPMethods, method) { + if !slices.Contains(validHTTPMethods(), method) { return fmt.Errorf( "[apiserver.WithHTTPHandler] method error: ['%s' is %w]", method, cerrors.ErrInvalid, diff --git a/internal/kafkacp/kafkacp.go b/internal/kafkacp/kafkacp.go index 5c4ec3f..bf4b8af 100644 --- a/internal/kafkacp/kafkacp.go +++ b/internal/kafkacp/kafkacp.go @@ -16,10 +16,12 @@ const ( KafkaTopicIdentifierBitBucket KafkaTopicIdentifier = "bitbucket" ) -var validKafkaTopicIdentifiers = []KafkaTopicIdentifier{ - KafkaTopicIdentifierGitHub, - KafkaTopicIdentifierGitLab, - KafkaTopicIdentifierBitBucket, +func validKafkaTopicIdentifiers() []KafkaTopicIdentifier { + return []KafkaTopicIdentifier{ + KafkaTopicIdentifierGitHub, + KafkaTopicIdentifierGitLab, + KafkaTopicIdentifierBitBucket, + } } // KafkaTopicIdentifier represents custom type for kafka topic names. @@ -35,7 +37,7 @@ func (s KafkaTopicIdentifier) Valid() bool { return false } - return slices.Contains(validKafkaTopicIdentifiers, s) + return slices.Contains(validKafkaTopicIdentifiers(), s) } // TCPAddr represents tcp address as string. diff --git a/internal/slogger/slogger.go b/internal/slogger/slogger.go index 8055215..7701ea6 100644 --- a/internal/slogger/slogger.go +++ b/internal/slogger/slogger.go @@ -32,7 +32,10 @@ type Option func(*JSONLogger) error func WithLogLevel(l slog.Leveler) Option { return func(jl *JSONLogger) error { if l == nil { - return fmt.Errorf("slogger WithLogLevel error: [%w]", cerrors.ErrValueRequired) + return fmt.Errorf( + "[slogger.WithLogLevel] error: [%w, 'nil' received]", + cerrors.ErrValueRequired, + ) } jl.Level = l @@ -40,27 +43,35 @@ func WithLogLevel(l slog.Leveler) Option { } } +func validLogLevels() map[string]slog.Level { + return map[string]slog.Level{ + "DEBUG": LevelDebug, + "INFO": LevelInfo, + "WARN": LevelWarn, + "ERROR": LevelError, + } +} + // WithLogLevelName sets log level from level name, such as INFO. -func WithLogLevelName(n string) Option { +func WithLogLevelName(s string) Option { return func(jl *JSONLogger) error { - if n == "" { - return fmt.Errorf("slogger WithLogLevelName error: [%w]", cerrors.ErrValueRequired) - } - - logLevelMap := map[string]slog.Level{ - "DEBUG": LevelDebug, - "INFO": LevelInfo, - "WARN": LevelWarn, - "ERROR": LevelError, + if s == "" { + return fmt.Errorf( + "[slogger.WithLogLevelName] error: [%w, empty string received]", + cerrors.ErrValueRequired, + ) } - if level, exists := logLevelMap[n]; exists { + if level, exists := validLogLevels()[s]; exists { jl.Level = level return nil } - return fmt.Errorf("slogger WithLogLevelName error: '%s' [%w]", n, cerrors.ErrInvalid) + return fmt.Errorf( + "[slogger.WithLogLevelName] error: [%w, '%s' received]", + cerrors.ErrInvalid, s, + ) } } @@ -68,7 +79,10 @@ func WithLogLevelName(n string) Option { func WithWriter(w io.Writer) Option { return func(jl *JSONLogger) error { if w == nil { - return fmt.Errorf("slogger WithWriter error: [%w]", cerrors.ErrValueRequired) + return fmt.Errorf( + "[slogger.WithWriter] error: [%w, 'nil' received]", + cerrors.ErrValueRequired, + ) } jl.Writer = w @@ -82,7 +96,7 @@ func New(options ...Option) (*slog.Logger, error) { for _, option := range options { if err := option(jlogger); err != nil { - return nil, fmt.Errorf("slogger option error: [%w]", err) + return nil, err } } diff --git a/internal/storage/storage.go b/internal/storage/storage.go index b596938..97520f9 100644 --- a/internal/storage/storage.go +++ b/internal/storage/storage.go @@ -19,10 +19,12 @@ const ( GitProviderBitbucket GitProvider = "bitbucket" ) -var validGitProviders = []GitProvider{ - GitProviderGitHub, - GitProviderGitLab, - GitProviderBitbucket, +func validGitProviders() []GitProvider { + return []GitProvider{ + GitProviderGitHub, + GitProviderGitLab, + GitProviderBitbucket, + } } // PGPooler defines pgxpool behaviours. @@ -57,7 +59,7 @@ func (g GitProvider) String() string { // Valid checks if the GitProvider is valid. func (g GitProvider) Valid() bool { - for _, provider := range validGitProviders { + for _, provider := range validGitProviders() { if g == provider { return true } From 538815a27467c8ddb93215f7473b1c103275fb4e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?U=C4=9Fur=20=C3=96zy=C4=B1lmazel?= Date: Mon, 27 Jan 2025 20:43:02 +0300 Subject: [PATCH 12/13] fix tests --- cmd/server/main.go | 2 +- internal/apiserver/apiserver_test.go | 38 ++++---- .../kafkaconsumer/kafkaconsumer_test.go | 72 +++++++------- .../kafkaconsumergroup_test.go | 80 ++++++++-------- .../kafkaproducer/kafkaproducer_test.go | 21 ++-- internal/slogger/mockslogger/mockslogger.go | 6 ++ .../storage/githubstorage/githubstorage.go | 55 ++++++++--- .../githubstorage/githubstorage_test.go | 44 +++------ .../githubwebhookhandler.go | 46 ++++++--- .../githubwebhookhandler_test.go | 95 ++++++++----------- .../healthcheckhandler/healthcheckhandler.go | 12 ++- 11 files changed, 243 insertions(+), 228 deletions(-) diff --git a/cmd/server/main.go b/cmd/server/main.go index 6319f7c..9f9257a 100644 --- a/cmd/server/main.go +++ b/cmd/server/main.go @@ -90,7 +90,7 @@ func Run() error { githubWebhookHandler, err := githubwebhookhandler.New( githubwebhookhandler.WithLogger(logger), - githubwebhookhandler.WithTopic(kafkacp.KafkaTopicIdentifierGitHub), + githubwebhookhandler.WithTopic(kafkacp.KafkaTopicIdentifierGitHub.String()), githubwebhookhandler.WithWebhookSecret(*githubHMACSecret), githubwebhookhandler.WithProducerGitHubMessageQueue(githubWebhookMessageQueue), ) diff --git a/internal/apiserver/apiserver_test.go b/internal/apiserver/apiserver_test.go index 3b14750..020b7cd 100644 --- a/internal/apiserver/apiserver_test.go +++ b/internal/apiserver/apiserver_test.go @@ -1,7 +1,6 @@ package apiserver_test import ( - "log/slog" "sync" "testing" "time" @@ -14,8 +13,6 @@ import ( "github.com/valyala/fasthttp" ) -var mockLog = slog.New(new(mockslogger.MockLogger)) - func TestNew_NoParams(t *testing.T) { server, err := apiserver.New() @@ -33,7 +30,7 @@ func TestNew_NilLogger(t *testing.T) { } func TestNew_EmptyListenAddr(t *testing.T) { - logger := mockLog + logger := mockslogger.New() server, err := apiserver.New( apiserver.WithLogger(logger), @@ -45,7 +42,7 @@ func TestNew_EmptyListenAddr(t *testing.T) { } func TestNew_InvalidListenAddr(t *testing.T) { - logger := mockLog + logger := mockslogger.New() server, err := apiserver.New( apiserver.WithLogger(logger), @@ -57,7 +54,7 @@ func TestNew_InvalidListenAddr(t *testing.T) { } func TestNew_InvalidKafkaTopic(t *testing.T) { - logger := mockLog + logger := mockslogger.New() server, err := apiserver.New( apiserver.WithLogger(logger), @@ -69,7 +66,7 @@ func TestNew_InvalidKafkaTopic(t *testing.T) { } func TestNew_InvalidBrokers(t *testing.T) { - logger := mockLog + logger := mockslogger.New() server, err := apiserver.New( apiserver.WithLogger(logger), @@ -81,7 +78,7 @@ func TestNew_InvalidBrokers(t *testing.T) { } func TestNew_InvalidReadTimeout(t *testing.T) { - logger := mockLog + logger := mockslogger.New() server, err := apiserver.New( apiserver.WithLogger(logger), @@ -93,7 +90,7 @@ func TestNew_InvalidReadTimeout(t *testing.T) { } func TestNew_InvalidWriteTimeout(t *testing.T) { - logger := mockLog + logger := mockslogger.New() server, err := apiserver.New( apiserver.WithLogger(logger), @@ -105,7 +102,7 @@ func TestNew_InvalidWriteTimeout(t *testing.T) { } func TestNew_InvalidIdleTimeout(t *testing.T) { - logger := mockLog + logger := mockslogger.New() server, err := apiserver.New( apiserver.WithLogger(logger), @@ -117,7 +114,7 @@ func TestNew_InvalidIdleTimeout(t *testing.T) { } func TestNew_NilHTTPHandler(t *testing.T) { - logger := mockLog + logger := mockslogger.New() server, err := apiserver.New( apiserver.WithLogger(logger), @@ -134,7 +131,7 @@ func TestNew_NilHTTPHandler(t *testing.T) { } func TestNew_InvalidKafkaTopic_check(t *testing.T) { - logger := mockLog + logger := mockslogger.New() server, err := apiserver.New( apiserver.WithLogger(logger), @@ -155,7 +152,7 @@ func TestNew_InvalidKafkaTopic_check(t *testing.T) { } func TestNew_MissingArgsHTTPHandler_method(t *testing.T) { - logger := mockLog + logger := mockslogger.New() server, err := apiserver.New( apiserver.WithLogger(logger), @@ -176,7 +173,7 @@ func TestNew_MissingArgsHTTPHandler_method(t *testing.T) { } func TestNew_InvalidArgsHTTPHandler_method(t *testing.T) { - logger := mockLog + logger := mockslogger.New() server, err := apiserver.New( apiserver.WithLogger(logger), @@ -197,7 +194,7 @@ func TestNew_InvalidArgsHTTPHandler_method(t *testing.T) { } func TestNew_MissingArgsHTTPHandler_path(t *testing.T) { - logger := mockLog + logger := mockslogger.New() server, err := apiserver.New( apiserver.WithLogger(logger), @@ -218,7 +215,7 @@ func TestNew_MissingArgsHTTPHandler_path(t *testing.T) { } func TestNew_MissingArgsHTTPHandler_handler(t *testing.T) { - logger := mockLog + logger := mockslogger.New() server, err := apiserver.New( apiserver.WithLogger(logger), @@ -239,7 +236,7 @@ func TestNew_MissingArgsHTTPHandler_handler(t *testing.T) { } func TestHttpRouter_NotFound(t *testing.T) { - logger := mockLog + logger := mockslogger.New() server, err := apiserver.New( apiserver.WithLogger(logger), @@ -262,7 +259,7 @@ func TestHttpRouter_NotFound(t *testing.T) { } func TestHttpRouter_MethodNotAllowed(t *testing.T) { - logger := mockLog + logger := mockslogger.New() server, err := apiserver.New( apiserver.WithLogger(logger), apiserver.WithKafkaGitHubTopic(kafkacp.KafkaTopicIdentifierGitHub.String()), @@ -284,7 +281,7 @@ func TestHttpRouter_MethodNotAllowed(t *testing.T) { } func TestHttpRouter_ValidRouteAndMethod(t *testing.T) { - logger := mockLog + logger := mockslogger.New() server, err := apiserver.New( apiserver.WithLogger(logger), apiserver.WithKafkaGitHubTopic(kafkacp.KafkaTopicIdentifierGitHub.String()), @@ -310,9 +307,10 @@ func TestHttpRouter_ValidRouteAndMethod(t *testing.T) { } func TestServer_Start(t *testing.T) { - logger := mockLog + logger := mockslogger.New() server, err := apiserver.New( apiserver.WithLogger(logger), + apiserver.WithListenAddr(":0"), apiserver.WithKafkaGitHubTopic(kafkacp.KafkaTopicIdentifierGitHub.String()), apiserver.WithHTTPHandler( fasthttp.MethodGet, diff --git a/internal/kafkacp/kafkaconsumer/kafkaconsumer_test.go b/internal/kafkacp/kafkaconsumer/kafkaconsumer_test.go index c7568a9..620ae4c 100644 --- a/internal/kafkacp/kafkaconsumer/kafkaconsumer_test.go +++ b/internal/kafkacp/kafkaconsumer/kafkaconsumer_test.go @@ -2,7 +2,6 @@ package kafkaconsumer_test import ( "context" - "log/slog" "os" "sync" "syscall" @@ -28,12 +27,11 @@ func (m *mockConsumerFactory) NewConsumer(brokers []string, config *sarama.Confi return args.Get(0).(sarama.Consumer), args.Error(1) } -var ( - mockLog = slog.New(new(mockslogger.MockLogger)) - mockProcessMessage = func(ctx context.Context, msg *sarama.ConsumerMessage) error { +func mockProcessMessageFunc() kafkaconsumer.ProcessMessageFunc { + return func(ctx context.Context, msg *sarama.ConsumerMessage) error { return nil } -) +} func TestNew_MissingRequiredFields(t *testing.T) { consumer, err := kafkaconsumer.New() @@ -52,7 +50,7 @@ func TestNew_NilLogger(t *testing.T) { } func TestNew_NoProcessMessageFunc(t *testing.T) { - logger := mockLog + logger := mockslogger.New() consumer, err := kafkaconsumer.New( kafkaconsumer.WithLogger(logger), @@ -63,7 +61,7 @@ func TestNew_NoProcessMessageFunc(t *testing.T) { } func TestNew_NilProcessMessageFunc(t *testing.T) { - logger := mockLog + logger := mockslogger.New() consumer, err := kafkaconsumer.New( kafkaconsumer.WithLogger(logger), @@ -75,11 +73,11 @@ func TestNew_NilProcessMessageFunc(t *testing.T) { } func TestNew_EmptyTopic(t *testing.T) { - logger := mockLog + logger := mockslogger.New() consumer, err := kafkaconsumer.New( kafkaconsumer.WithLogger(logger), - kafkaconsumer.WithProcessMessageFunc(mockProcessMessage), + kafkaconsumer.WithProcessMessageFunc(mockProcessMessageFunc()), ) assert.ErrorIs(t, err, cerrors.ErrInvalid) @@ -87,11 +85,11 @@ func TestNew_EmptyTopic(t *testing.T) { } func TestNew_InvalidTopic(t *testing.T) { - logger := mockLog + logger := mockslogger.New() consumer, err := kafkaconsumer.New( kafkaconsumer.WithLogger(logger), - kafkaconsumer.WithProcessMessageFunc(mockProcessMessage), + kafkaconsumer.WithProcessMessageFunc(mockProcessMessageFunc()), kafkaconsumer.WithTopic("invalid"), ) @@ -100,11 +98,11 @@ func TestNew_InvalidTopic(t *testing.T) { } func TestNew_InvalidPartition(t *testing.T) { - logger := mockLog + logger := mockslogger.New() consumer, err := kafkaconsumer.New( kafkaconsumer.WithLogger(logger), - kafkaconsumer.WithProcessMessageFunc(mockProcessMessage), + kafkaconsumer.WithProcessMessageFunc(mockProcessMessageFunc()), kafkaconsumer.WithTopic(kafkacp.KafkaTopicIdentifierGitHub.String()), kafkaconsumer.WithPartition(2147483648), ) @@ -114,11 +112,11 @@ func TestNew_InvalidPartition(t *testing.T) { } func TestNew_InvalidBrokers(t *testing.T) { - logger := mockLog + logger := mockslogger.New() consumer, err := kafkaconsumer.New( kafkaconsumer.WithLogger(logger), - kafkaconsumer.WithProcessMessageFunc(mockProcessMessage), + kafkaconsumer.WithProcessMessageFunc(mockProcessMessageFunc()), kafkaconsumer.WithTopic(kafkacp.KafkaTopicIdentifierGitHub.String()), kafkaconsumer.WithKafkaBrokers("invalid"), ) @@ -128,11 +126,11 @@ func TestNew_InvalidBrokers(t *testing.T) { } func TestNew_InvalidDialTimeout(t *testing.T) { - logger := mockLog + logger := mockslogger.New() consumer, err := kafkaconsumer.New( kafkaconsumer.WithLogger(logger), - kafkaconsumer.WithProcessMessageFunc(mockProcessMessage), + kafkaconsumer.WithProcessMessageFunc(mockProcessMessageFunc()), kafkaconsumer.WithTopic(kafkacp.KafkaTopicIdentifierGitHub.String()), kafkaconsumer.WithKafkaBrokers("127.0.0.1:9094"), kafkaconsumer.WithDialTimeout(-1*time.Second), @@ -143,11 +141,11 @@ func TestNew_InvalidDialTimeout(t *testing.T) { } func TestNew_InvalidReadTimeout(t *testing.T) { - logger := mockLog + logger := mockslogger.New() consumer, err := kafkaconsumer.New( kafkaconsumer.WithLogger(logger), - kafkaconsumer.WithProcessMessageFunc(mockProcessMessage), + kafkaconsumer.WithProcessMessageFunc(mockProcessMessageFunc()), kafkaconsumer.WithTopic(kafkacp.KafkaTopicIdentifierGitHub.String()), kafkaconsumer.WithKafkaBrokers("127.0.0.1:9094"), kafkaconsumer.WithReadTimeout(-1*time.Second), @@ -158,11 +156,11 @@ func TestNew_InvalidReadTimeout(t *testing.T) { } func TestNew_InvalidWriteTimeout(t *testing.T) { - logger := mockLog + logger := mockslogger.New() consumer, err := kafkaconsumer.New( kafkaconsumer.WithLogger(logger), - kafkaconsumer.WithProcessMessageFunc(mockProcessMessage), + kafkaconsumer.WithProcessMessageFunc(mockProcessMessageFunc()), kafkaconsumer.WithTopic(kafkacp.KafkaTopicIdentifierGitHub.String()), kafkaconsumer.WithKafkaBrokers("127.0.0.1:9094"), kafkaconsumer.WithWriteTimeout(-1*time.Second), @@ -173,11 +171,11 @@ func TestNew_InvalidWriteTimeout(t *testing.T) { } func TestNew_ZeroBackoff(t *testing.T) { - logger := mockLog + logger := mockslogger.New() consumer, err := kafkaconsumer.New( kafkaconsumer.WithLogger(logger), - kafkaconsumer.WithProcessMessageFunc(mockProcessMessage), + kafkaconsumer.WithProcessMessageFunc(mockProcessMessageFunc()), kafkaconsumer.WithTopic(kafkacp.KafkaTopicIdentifierGitHub.String()), kafkaconsumer.WithKafkaBrokers("127.0.0.1:9094"), kafkaconsumer.WithBackoff(0), @@ -188,11 +186,11 @@ func TestNew_ZeroBackoff(t *testing.T) { } func TestNew_InvalidBackoff(t *testing.T) { - logger := mockLog + logger := mockslogger.New() consumer, err := kafkaconsumer.New( kafkaconsumer.WithLogger(logger), - kafkaconsumer.WithProcessMessageFunc(mockProcessMessage), + kafkaconsumer.WithProcessMessageFunc(mockProcessMessageFunc()), kafkaconsumer.WithTopic(kafkacp.KafkaTopicIdentifierGitHub.String()), kafkaconsumer.WithKafkaBrokers("127.0.0.1:9094"), kafkaconsumer.WithBackoff(2*time.Minute), @@ -203,11 +201,11 @@ func TestNew_InvalidBackoff(t *testing.T) { } func TestNew_InvalidMaxRetries(t *testing.T) { - logger := mockLog + logger := mockslogger.New() consumer, err := kafkaconsumer.New( kafkaconsumer.WithLogger(logger), - kafkaconsumer.WithProcessMessageFunc(mockProcessMessage), + kafkaconsumer.WithProcessMessageFunc(mockProcessMessageFunc()), kafkaconsumer.WithTopic(kafkacp.KafkaTopicIdentifierGitHub.String()), kafkaconsumer.WithKafkaBrokers("127.0.0.1:9094"), kafkaconsumer.WithMaxRetries(256), @@ -218,11 +216,11 @@ func TestNew_InvalidMaxRetries(t *testing.T) { } func TestNew_NilSaramaConsumerFactoryFunc(t *testing.T) { - logger := mockLog + logger := mockslogger.New() consumer, err := kafkaconsumer.New( kafkaconsumer.WithLogger(logger), - kafkaconsumer.WithProcessMessageFunc(mockProcessMessage), + kafkaconsumer.WithProcessMessageFunc(mockProcessMessageFunc()), kafkaconsumer.WithTopic(kafkacp.KafkaTopicIdentifierGitHub.String()), kafkaconsumer.WithKafkaBrokers("127.0.0.1:9094"), kafkaconsumer.WithSaramaConsumerFactoryFunc(nil), @@ -233,7 +231,7 @@ func TestNew_NilSaramaConsumerFactoryFunc(t *testing.T) { } func TestNew_WithSaramaConsumerFactoryFunc_Error(t *testing.T) { - logger := mockLog + logger := mockslogger.New() mockConfig := mocks.NewTestConfig() mockSarama := mocks.NewConsumer(t, mockConfig) @@ -243,7 +241,7 @@ func TestNew_WithSaramaConsumerFactoryFunc_Error(t *testing.T) { consumer, err := kafkaconsumer.New( kafkaconsumer.WithLogger(logger), - kafkaconsumer.WithProcessMessageFunc(mockProcessMessage), + kafkaconsumer.WithProcessMessageFunc(mockProcessMessageFunc()), kafkaconsumer.WithTopic(kafkacp.KafkaTopicIdentifierGitHub.String()), kafkaconsumer.WithBackoff(100*time.Millisecond), kafkaconsumer.WithSaramaConsumerFactoryFunc(mockFactory.NewConsumer), @@ -258,7 +256,7 @@ func TestNew_WithSaramaConsumerFactoryFunc_Error(t *testing.T) { } func TestNew_WithSaramaConsumerFactoryFunc_Success(t *testing.T) { - logger := mockLog + logger := mockslogger.New() mockConfig := mocks.NewTestConfig() mockSarama := mocks.NewConsumer(t, mockConfig) @@ -269,7 +267,7 @@ func TestNew_WithSaramaConsumerFactoryFunc_Success(t *testing.T) { consumer, err := kafkaconsumer.New( kafkaconsumer.WithLogger(logger), - kafkaconsumer.WithProcessMessageFunc(mockProcessMessage), + kafkaconsumer.WithProcessMessageFunc(mockProcessMessageFunc()), kafkaconsumer.WithTopic(kafkacp.KafkaTopicIdentifierGitHub.String()), kafkaconsumer.WithPartition(0), kafkaconsumer.WithDialTimeout(10*time.Second), @@ -288,7 +286,7 @@ func TestNew_WithSaramaConsumerFactoryFunc_Success(t *testing.T) { } func TestConsumer_Consume_Success(t *testing.T) { - logger := mockLog + logger := mockslogger.New() mockConfig := mocks.NewTestConfig() mockSarama := mocks.NewConsumer(t, mockConfig) @@ -305,7 +303,7 @@ func TestConsumer_Consume_Success(t *testing.T) { consumer, err := kafkaconsumer.New( kafkaconsumer.WithLogger(logger), - kafkaconsumer.WithProcessMessageFunc(mockProcessMessage), + kafkaconsumer.WithProcessMessageFunc(mockProcessMessageFunc()), kafkaconsumer.WithTopic(kafkacp.KafkaTopicIdentifierGitHub.String()), kafkaconsumer.WithSaramaConsumerFactoryFunc(mockFactory.NewConsumer), kafkaconsumer.WithMaxRetries(1), @@ -334,7 +332,7 @@ func TestConsumer_Consume_Success(t *testing.T) { } func TestConsumer_Consume_PartitionConsumeError(t *testing.T) { - logger := mockLog + logger := mockslogger.New() mockConfig := mocks.NewTestConfig() mockSarama := mocks.NewConsumer(t, mockConfig) @@ -346,7 +344,7 @@ func TestConsumer_Consume_PartitionConsumeError(t *testing.T) { consumer, err := kafkaconsumer.New( kafkaconsumer.WithLogger(logger), - kafkaconsumer.WithProcessMessageFunc(mockProcessMessage), + kafkaconsumer.WithProcessMessageFunc(mockProcessMessageFunc()), kafkaconsumer.WithTopic(kafkacp.KafkaTopicIdentifierGitHub.String()), kafkaconsumer.WithSaramaConsumerFactoryFunc(mockFactory.NewConsumer), kafkaconsumer.WithMaxRetries(1), diff --git a/internal/kafkacp/kafkaconsumergroup/kafkaconsumergroup_test.go b/internal/kafkacp/kafkaconsumergroup/kafkaconsumergroup_test.go index 6af610a..d7a8662 100644 --- a/internal/kafkacp/kafkaconsumergroup/kafkaconsumergroup_test.go +++ b/internal/kafkacp/kafkaconsumergroup/kafkaconsumergroup_test.go @@ -3,7 +3,6 @@ package kafkaconsumergroup_test import ( "context" "errors" - "log/slog" "os" "sync" "syscall" @@ -19,10 +18,11 @@ import ( "github.com/stretchr/testify/mock" ) -var mockProcessMessage = func(ctx context.Context, msg *sarama.ConsumerMessage) error { - return nil +func mockProcessMessageFunc() kafkaconsumergroup.ProcessMessageFunc { + return func(ctx context.Context, msg *sarama.ConsumerMessage) error { + return nil + } } -var mockLog = slog.New(new(mockslogger.MockLogger)) // mockConsumerGroup ---------------------------------------------------------- type mockConsumerGroup struct { @@ -91,7 +91,7 @@ func TestNew_NilLogger(t *testing.T) { } func TestNew_NoProcessMessageFunc(t *testing.T) { - logger := mockLog + logger := mockslogger.New() consumer, err := kafkaconsumergroup.New( kafkaconsumergroup.WithLogger(logger), @@ -102,7 +102,7 @@ func TestNew_NoProcessMessageFunc(t *testing.T) { } func TestNew_NilProcessMessageFunc(t *testing.T) { - logger := mockLog + logger := mockslogger.New() consumer, err := kafkaconsumergroup.New( kafkaconsumergroup.WithLogger(logger), @@ -114,11 +114,11 @@ func TestNew_NilProcessMessageFunc(t *testing.T) { } func TestNew_NoGroupName(t *testing.T) { - logger := mockLog + logger := mockslogger.New() consumer, err := kafkaconsumergroup.New( kafkaconsumergroup.WithLogger(logger), - kafkaconsumergroup.WithProcessMessageFunc(mockProcessMessage), + kafkaconsumergroup.WithProcessMessageFunc(mockProcessMessageFunc()), ) assert.ErrorIs(t, err, cerrors.ErrValueRequired) @@ -126,11 +126,11 @@ func TestNew_NoGroupName(t *testing.T) { } func TestNew_EmptyGroupName(t *testing.T) { - logger := mockLog + logger := mockslogger.New() consumer, err := kafkaconsumergroup.New( kafkaconsumergroup.WithLogger(logger), - kafkaconsumergroup.WithProcessMessageFunc(mockProcessMessage), + kafkaconsumergroup.WithProcessMessageFunc(mockProcessMessageFunc()), kafkaconsumergroup.WithKafkaGroupName(""), ) @@ -139,11 +139,11 @@ func TestNew_EmptyGroupName(t *testing.T) { } func TestNew_NoTopic(t *testing.T) { - logger := mockLog + logger := mockslogger.New() consumer, err := kafkaconsumergroup.New( kafkaconsumergroup.WithLogger(logger), - kafkaconsumergroup.WithProcessMessageFunc(mockProcessMessage), + kafkaconsumergroup.WithProcessMessageFunc(mockProcessMessageFunc()), kafkaconsumergroup.WithKafkaGroupName("github-group"), ) @@ -152,11 +152,11 @@ func TestNew_NoTopic(t *testing.T) { } func TestNew_InvalidTopic(t *testing.T) { - logger := mockLog + logger := mockslogger.New() consumer, err := kafkaconsumergroup.New( kafkaconsumergroup.WithLogger(logger), - kafkaconsumergroup.WithProcessMessageFunc(mockProcessMessage), + kafkaconsumergroup.WithProcessMessageFunc(mockProcessMessageFunc()), kafkaconsumergroup.WithKafkaGroupName("github-group"), kafkaconsumergroup.WithTopic("invalid"), ) @@ -166,11 +166,11 @@ func TestNew_InvalidTopic(t *testing.T) { } func TestNew_InvalidBrokers(t *testing.T) { - logger := mockLog + logger := mockslogger.New() consumer, err := kafkaconsumergroup.New( kafkaconsumergroup.WithLogger(logger), - kafkaconsumergroup.WithProcessMessageFunc(mockProcessMessage), + kafkaconsumergroup.WithProcessMessageFunc(mockProcessMessageFunc()), kafkaconsumergroup.WithKafkaGroupName("github-group"), kafkaconsumergroup.WithTopic(kafkacp.KafkaTopicIdentifierGitHub.String()), kafkaconsumergroup.WithKafkaBrokers("invalid"), @@ -181,11 +181,11 @@ func TestNew_InvalidBrokers(t *testing.T) { } func TestNew_InvalidDialTimeout(t *testing.T) { - logger := mockLog + logger := mockslogger.New() consumer, err := kafkaconsumergroup.New( kafkaconsumergroup.WithLogger(logger), - kafkaconsumergroup.WithProcessMessageFunc(mockProcessMessage), + kafkaconsumergroup.WithProcessMessageFunc(mockProcessMessageFunc()), kafkaconsumergroup.WithKafkaGroupName("github-group"), kafkaconsumergroup.WithTopic(kafkacp.KafkaTopicIdentifierGitHub.String()), kafkaconsumergroup.WithKafkaBrokers(kafkacp.DefaultKafkaBrokers), @@ -197,11 +197,11 @@ func TestNew_InvalidDialTimeout(t *testing.T) { } func TestNew_InvalidReadTimeout(t *testing.T) { - logger := mockLog + logger := mockslogger.New() consumer, err := kafkaconsumergroup.New( kafkaconsumergroup.WithLogger(logger), - kafkaconsumergroup.WithProcessMessageFunc(mockProcessMessage), + kafkaconsumergroup.WithProcessMessageFunc(mockProcessMessageFunc()), kafkaconsumergroup.WithKafkaGroupName("github-group"), kafkaconsumergroup.WithTopic(kafkacp.KafkaTopicIdentifierGitHub.String()), kafkaconsumergroup.WithKafkaBrokers(kafkacp.DefaultKafkaBrokers), @@ -213,11 +213,11 @@ func TestNew_InvalidReadTimeout(t *testing.T) { } func TestNew_InvalidWriteTimeout(t *testing.T) { - logger := mockLog + logger := mockslogger.New() consumer, err := kafkaconsumergroup.New( kafkaconsumergroup.WithLogger(logger), - kafkaconsumergroup.WithProcessMessageFunc(mockProcessMessage), + kafkaconsumergroup.WithProcessMessageFunc(mockProcessMessageFunc()), kafkaconsumergroup.WithKafkaGroupName("github-group"), kafkaconsumergroup.WithTopic(kafkacp.KafkaTopicIdentifierGitHub.String()), kafkaconsumergroup.WithKafkaBrokers(kafkacp.DefaultKafkaBrokers), @@ -229,11 +229,11 @@ func TestNew_InvalidWriteTimeout(t *testing.T) { } func TestNew_ZeroBackoff(t *testing.T) { - logger := mockLog + logger := mockslogger.New() consumer, err := kafkaconsumergroup.New( kafkaconsumergroup.WithLogger(logger), - kafkaconsumergroup.WithProcessMessageFunc(mockProcessMessage), + kafkaconsumergroup.WithProcessMessageFunc(mockProcessMessageFunc()), kafkaconsumergroup.WithKafkaGroupName("github-group"), kafkaconsumergroup.WithTopic(kafkacp.KafkaTopicIdentifierGitHub.String()), kafkaconsumergroup.WithKafkaBrokers(kafkacp.DefaultKafkaBrokers), @@ -245,11 +245,11 @@ func TestNew_ZeroBackoff(t *testing.T) { } func TestNew_InvalidBackoff(t *testing.T) { - logger := mockLog + logger := mockslogger.New() consumer, err := kafkaconsumergroup.New( kafkaconsumergroup.WithLogger(logger), - kafkaconsumergroup.WithProcessMessageFunc(mockProcessMessage), + kafkaconsumergroup.WithProcessMessageFunc(mockProcessMessageFunc()), kafkaconsumergroup.WithKafkaGroupName("github-group"), kafkaconsumergroup.WithTopic(kafkacp.KafkaTopicIdentifierGitHub.String()), kafkaconsumergroup.WithKafkaBrokers(kafkacp.DefaultKafkaBrokers), @@ -261,11 +261,11 @@ func TestNew_InvalidBackoff(t *testing.T) { } func TestNew_InvalidMaxRetries(t *testing.T) { - logger := mockLog + logger := mockslogger.New() consumer, err := kafkaconsumergroup.New( kafkaconsumergroup.WithLogger(logger), - kafkaconsumergroup.WithProcessMessageFunc(mockProcessMessage), + kafkaconsumergroup.WithProcessMessageFunc(mockProcessMessageFunc()), kafkaconsumergroup.WithKafkaGroupName("github-group"), kafkaconsumergroup.WithTopic(kafkacp.KafkaTopicIdentifierGitHub.String()), kafkaconsumergroup.WithKafkaBrokers(kafkacp.DefaultKafkaBrokers), @@ -277,11 +277,11 @@ func TestNew_InvalidMaxRetries(t *testing.T) { } func TestNew_InvalidKafkaVersion(t *testing.T) { - logger := mockLog + logger := mockslogger.New() consumer, err := kafkaconsumergroup.New( kafkaconsumergroup.WithLogger(logger), - kafkaconsumergroup.WithProcessMessageFunc(mockProcessMessage), + kafkaconsumergroup.WithProcessMessageFunc(mockProcessMessageFunc()), kafkaconsumergroup.WithKafkaGroupName("github-group"), kafkaconsumergroup.WithTopic(kafkacp.KafkaTopicIdentifierGitHub.String()), kafkaconsumergroup.WithKafkaBrokers(kafkacp.DefaultKafkaBrokers), @@ -293,11 +293,11 @@ func TestNew_InvalidKafkaVersion(t *testing.T) { } func TestNew_NilSaramaConsumerGroupHandler(t *testing.T) { - logger := mockLog + logger := mockslogger.New() consumer, err := kafkaconsumergroup.New( kafkaconsumergroup.WithLogger(logger), - kafkaconsumergroup.WithProcessMessageFunc(mockProcessMessage), + kafkaconsumergroup.WithProcessMessageFunc(mockProcessMessageFunc()), kafkaconsumergroup.WithKafkaGroupName("github-group"), kafkaconsumergroup.WithTopic(kafkacp.KafkaTopicIdentifierGitHub.String()), kafkaconsumergroup.WithKafkaBrokers(kafkacp.DefaultKafkaBrokers), @@ -309,11 +309,11 @@ func TestNew_NilSaramaConsumerGroupHandler(t *testing.T) { } func TestNew_NilSaramaConsumerGroupFactoryFunc(t *testing.T) { - logger := mockLog + logger := mockslogger.New() consumer, err := kafkaconsumergroup.New( kafkaconsumergroup.WithLogger(logger), - kafkaconsumergroup.WithProcessMessageFunc(mockProcessMessage), + kafkaconsumergroup.WithProcessMessageFunc(mockProcessMessageFunc()), kafkaconsumergroup.WithKafkaGroupName("github-group"), kafkaconsumergroup.WithTopic(kafkacp.KafkaTopicIdentifierGitHub.String()), kafkaconsumergroup.WithKafkaBrokers(kafkacp.DefaultKafkaBrokers), @@ -331,7 +331,7 @@ func TestNew_NilSaramaConsumerGroupFactoryFunc(t *testing.T) { } func TestNew_SaramaConsumerGroupFactoryFunc_Error(t *testing.T) { - logger := mockLog + logger := mockslogger.New() consumerGroup := &mockConsumerGroup{} consumerGroupFactory := &mockConsumerGroupFactory{} @@ -344,7 +344,7 @@ func TestNew_SaramaConsumerGroupFactoryFunc_Error(t *testing.T) { consumer, err := kafkaconsumergroup.New( kafkaconsumergroup.WithLogger(logger), - kafkaconsumergroup.WithProcessMessageFunc(mockProcessMessage), + kafkaconsumergroup.WithProcessMessageFunc(mockProcessMessageFunc()), kafkaconsumergroup.WithKafkaGroupName("github-group"), kafkaconsumergroup.WithTopic(kafkacp.KafkaTopicIdentifierGitHub.String()), kafkaconsumergroup.WithBackoff(100*time.Millisecond), @@ -359,7 +359,7 @@ func TestNew_SaramaConsumerGroupFactoryFunc_Error(t *testing.T) { } func TestNew_SaramaConsumerGroupFactoryFunc_Success(t *testing.T) { - logger := mockLog + logger := mockslogger.New() consumerGroup := &mockConsumerGroup{} consumerGroupFactory := &mockConsumerGroupFactory{} @@ -371,7 +371,7 @@ func TestNew_SaramaConsumerGroupFactoryFunc_Success(t *testing.T) { consumer, err := kafkaconsumergroup.New( kafkaconsumergroup.WithLogger(logger), - kafkaconsumergroup.WithProcessMessageFunc(mockProcessMessage), + kafkaconsumergroup.WithProcessMessageFunc(mockProcessMessageFunc()), kafkaconsumergroup.WithKafkaGroupName("github-group"), kafkaconsumergroup.WithTopic(kafkacp.KafkaTopicIdentifierGitHub.String()), kafkaconsumergroup.WithBackoff(100*time.Millisecond), @@ -386,7 +386,7 @@ func TestNew_SaramaConsumerGroupFactoryFunc_Success(t *testing.T) { } func TestNew_Consume_Success(t *testing.T) { - logger := mockLog + logger := mockslogger.New() consumerGroup := &mockConsumerGroup{} consumerGroup.On("Errors").Return((<-chan error)(make(chan error))) @@ -403,7 +403,7 @@ func TestNew_Consume_Success(t *testing.T) { consumer, err := kafkaconsumergroup.New( kafkaconsumergroup.WithLogger(logger), - kafkaconsumergroup.WithProcessMessageFunc(mockProcessMessage), + kafkaconsumergroup.WithProcessMessageFunc(mockProcessMessageFunc()), kafkaconsumergroup.WithKafkaGroupName("github-group"), kafkaconsumergroup.WithTopic(kafkacp.KafkaTopicIdentifierGitHub.String()), kafkaconsumergroup.WithBackoff(100*time.Millisecond), diff --git a/internal/kafkacp/kafkaproducer/kafkaproducer_test.go b/internal/kafkacp/kafkaproducer/kafkaproducer_test.go index 730c32c..a1e0b22 100644 --- a/internal/kafkacp/kafkaproducer/kafkaproducer_test.go +++ b/internal/kafkacp/kafkaproducer/kafkaproducer_test.go @@ -1,7 +1,6 @@ package kafkaproducer_test import ( - "log/slog" "testing" "time" @@ -15,8 +14,6 @@ import ( "github.com/stretchr/testify/mock" ) -var mockLog = slog.New(new(mockslogger.MockLogger)) - type mockProducerFactory struct { mock.Mock } @@ -44,7 +41,7 @@ func TestNew_NilLogger(t *testing.T) { } func TestNew_InvalidKafkaBrokers(t *testing.T) { - logger := mockLog + logger := mockslogger.New() producer, err := kafkaproducer.New( kafkaproducer.WithLogger(logger), @@ -55,7 +52,7 @@ func TestNew_InvalidKafkaBrokers(t *testing.T) { } func TestNew_InvalidMaxRetries(t *testing.T) { - logger := mockLog + logger := mockslogger.New() producer, err := kafkaproducer.New( kafkaproducer.WithLogger(logger), @@ -66,7 +63,7 @@ func TestNew_InvalidMaxRetries(t *testing.T) { } func TestNew_InvalidBackoff(t *testing.T) { - logger := mockLog + logger := mockslogger.New() producer, err := kafkaproducer.New( kafkaproducer.WithLogger(logger), @@ -77,7 +74,7 @@ func TestNew_InvalidBackoff(t *testing.T) { } func TestNew_InvalidDialTimeout(t *testing.T) { - logger := mockLog + logger := mockslogger.New() producer, err := kafkaproducer.New( kafkaproducer.WithLogger(logger), @@ -88,7 +85,7 @@ func TestNew_InvalidDialTimeout(t *testing.T) { } func TestNew_InvalidReadTimeout(t *testing.T) { - logger := mockLog + logger := mockslogger.New() producer, err := kafkaproducer.New( kafkaproducer.WithLogger(logger), @@ -99,7 +96,7 @@ func TestNew_InvalidReadTimeout(t *testing.T) { } func TestNew_InvalidWriteTimeout(t *testing.T) { - logger := mockLog + logger := mockslogger.New() producer, err := kafkaproducer.New( kafkaproducer.WithLogger(logger), @@ -110,7 +107,7 @@ func TestNew_InvalidWriteTimeout(t *testing.T) { } func TestNew_WithNilProducerFactoryFunc(t *testing.T) { - logger := mockLog + logger := mockslogger.New() producer, err := kafkaproducer.New( kafkaproducer.WithLogger(logger), @@ -121,7 +118,7 @@ func TestNew_WithNilProducerFactoryFunc(t *testing.T) { } func TestNew_WithSaramaProducerFactoryFunc_Error(t *testing.T) { - logger := mockLog + logger := mockslogger.New() mockConfig := mocks.NewTestConfig() mockProducer := mocks.NewAsyncProducer(t, mockConfig) @@ -145,7 +142,7 @@ func TestNew_WithSaramaProducerFactoryFunc_Error(t *testing.T) { } func TestNew_Success(t *testing.T) { - logger := mockLog + logger := mockslogger.New() mockConfig := mocks.NewTestConfig() mockProducer := mocks.NewAsyncProducer(t, mockConfig) diff --git a/internal/slogger/mockslogger/mockslogger.go b/internal/slogger/mockslogger/mockslogger.go index 54e64d1..0a3a951 100644 --- a/internal/slogger/mockslogger/mockslogger.go +++ b/internal/slogger/mockslogger/mockslogger.go @@ -6,6 +6,8 @@ import ( "log/slog" ) +var _ slog.Handler = (*MockLogger)(nil) // compile time proof + type MockLogger struct{} func (h *MockLogger) Enabled(_ context.Context, _ slog.Level) bool { @@ -23,3 +25,7 @@ func (h *MockLogger) WithAttrs(_ []slog.Attr) slog.Handler { func (h *MockLogger) WithGroup(_ string) slog.Handler { return h } + +func New() *slog.Logger { + return slog.New(new(MockLogger)) +} diff --git a/internal/storage/githubstorage/githubstorage.go b/internal/storage/githubstorage/githubstorage.go index e16a741..9e49e23 100644 --- a/internal/storage/githubstorage/githubstorage.go +++ b/internal/storage/githubstorage/githubstorage.go @@ -63,9 +63,13 @@ func (GitHubStorage) prepareGitHubPayload(message *sarama.ConsumerMessage) (*Git githubStorage.KafkaPartition = message.Partition githubStorage.KafkaOffset = message.Offset - deliveryID, err := uuid.Parse(string(message.Key)) + messageKey := string(message.Key) + deliveryID, err := uuid.Parse(messageKey) if err != nil { - return nil, fmt.Errorf("githubstorage prepareGitHubPayload deliveryID error: [%w]", err) + return nil, fmt.Errorf( + "[githubstorage.prepareGitHubPayload] deliveryID error: ['%s' received, %w]", + messageKey, err, + ) } githubStorage.DeliveryID = deliveryID @@ -90,13 +94,19 @@ func (GitHubStorage) prepareGitHubPayload(message *sarama.ConsumerMessage) (*Git case "target-id": targetID, targetIDErr = strconv.ParseUint(value, 10, 64) if targetIDErr != nil { - return nil, fmt.Errorf("githubstorage prepareGitHubPayload targetID error: [%w]", targetIDErr) + return nil, fmt.Errorf( + "[githubstorage.prepareGitHubPayload] targetID error: ['%s' received, %w]", + value, targetIDErr, + ) } githubStorage.TargetID = targetID case "hook-id": hookID, hookIDErr = strconv.ParseUint(value, 10, 64) if hookIDErr != nil { - return nil, fmt.Errorf("githubstorage prepareGitHubPayload hookID error: [%w]", hookIDErr) + return nil, fmt.Errorf( + "[githubstorage.prepareGitHubPayload] hookID error: ['%s' received, %w]", + value, hookIDErr, + ) } githubStorage.HookID = hookID case "sender-login": @@ -104,7 +114,10 @@ func (GitHubStorage) prepareGitHubPayload(message *sarama.ConsumerMessage) (*Git case "sender-id": userID, userIDErr = strconv.ParseInt(value, 10, 64) if userIDErr != nil { - return nil, fmt.Errorf("githubstorage prepareGitHubPayload userID error: [%w]", userIDErr) + return nil, fmt.Errorf( + "[githubstorage.prepareGitHubPayload] userID error: ['%s' received, %w]", + value, userIDErr, + ) } githubStorage.UserID = userID } @@ -138,7 +151,7 @@ func (s GitHubStorage) Ping(ctx context.Context, maxRetries uint8, backoff time. } if pingErr != nil { - return fmt.Errorf("githubstorage Ping error: [%w]", pingErr) + return fmt.Errorf("[githubstorage.Ping] error: [%w]", pingErr) } return nil @@ -148,7 +161,7 @@ func (s GitHubStorage) Ping(ctx context.Context, maxRetries uint8, backoff time. func (s GitHubStorage) MessageStore(ctx context.Context, message *sarama.ConsumerMessage) error { payload, err := s.prepareGitHubPayload(message) if err != nil { - return fmt.Errorf("githubstorage Store payload error: [%w]", err) + return fmt.Errorf("[githubstorage.MessageStore] payload error: [%w]", err) } _, err = s.Pool.Exec( @@ -166,7 +179,7 @@ func (s GitHubStorage) MessageStore(ctx context.Context, message *sarama.Consume payload.Payload, ) if err != nil { - return fmt.Errorf("githubstorage Store Pool.Exec error: [%w]", err) + return fmt.Errorf("[githubstorage.MessageStore][Pool.Exec] error: [%w]", err) } return nil @@ -174,11 +187,17 @@ func (s GitHubStorage) MessageStore(ctx context.Context, message *sarama.Consume func (s GitHubStorage) checkRequired() error { if s.Logger == nil { - return fmt.Errorf("githubstorage check required, Logger error: [%w]", cerrors.ErrValueRequired) + return fmt.Errorf( + "[githubstorage.checkRequired] Logger error: [%w, 'nil' received]", + cerrors.ErrValueRequired, + ) } if s.DatabaseDSN == "" { - return fmt.Errorf("githubstorage check required, DatabaseDSN error: [%w]", cerrors.ErrValueRequired) + return fmt.Errorf( + "[githubstorage.checkRequired] DatabaseDSN error: [%w, empty string received]", + cerrors.ErrValueRequired, + ) } return nil @@ -191,7 +210,10 @@ type Option func(*GitHubStorage) error func WithLogger(l *slog.Logger) Option { return func(s *GitHubStorage) error { if l == nil { - return fmt.Errorf("githubstorage WithLogger error: [%w]", cerrors.ErrValueRequired) + return fmt.Errorf( + "[githubstorage.WithLogger] error: [%w, 'nil' received]", + cerrors.ErrValueRequired, + ) } s.Logger = l @@ -203,7 +225,10 @@ func WithLogger(l *slog.Logger) Option { func WithDatabaseDSN(dsn string) Option { return func(s *GitHubStorage) error { if dsn == "" { - return fmt.Errorf("githubstorage WithDatabaseDSN error: [%w]", cerrors.ErrValueRequired) + return fmt.Errorf( + "[githubstorage.WithDatabaseDSN] error: [%w, empty string received]", + cerrors.ErrValueRequired, + ) } s.DatabaseDSN = dsn @@ -217,7 +242,7 @@ func New(ctx context.Context, options ...Option) (*GitHubStorage, error) { for _, option := range options { if err := option(githubStorage); err != nil { - return nil, fmt.Errorf("githubstorage option error: [%w]", err) + return nil, err } } @@ -227,12 +252,12 @@ func New(ctx context.Context, options ...Option) (*GitHubStorage, error) { config, err := pgxpool.ParseConfig(githubStorage.DatabaseDSN) if err != nil { - return nil, fmt.Errorf("githubstorage pgxpool.ParseConfig error: [%w]", err) + return nil, fmt.Errorf("[githubstorage.New][pgxpool.ParseConfig] error: [%w]", err) } pool, err := pgxpool.NewWithConfig(ctx, config) if err != nil { - return nil, fmt.Errorf("githubstorage pgxpool.NewWithConfig error: [%w]", err) + return nil, fmt.Errorf("[githubstorage.New][pgxpool.NewWithConfig] error: [%w]", err) } githubStorage.Pool = pool diff --git a/internal/storage/githubstorage/githubstorage_test.go b/internal/storage/githubstorage/githubstorage_test.go index 2e3f449..bd4d5af 100644 --- a/internal/storage/githubstorage/githubstorage_test.go +++ b/internal/storage/githubstorage/githubstorage_test.go @@ -3,12 +3,12 @@ package githubstorage_test import ( "context" "errors" - "log/slog" "testing" "time" "github.com/IBM/sarama" "github.com/devchain-network/cauldron/internal/cerrors" + "github.com/devchain-network/cauldron/internal/slogger/mockslogger" "github.com/devchain-network/cauldron/internal/storage" "github.com/devchain-network/cauldron/internal/storage/githubstorage" "github.com/google/uuid" @@ -18,24 +18,6 @@ import ( "github.com/stretchr/testify/mock" ) -type mockLogger struct{} - -func (h *mockLogger) Enabled(_ context.Context, _ slog.Level) bool { - return true -} - -func (h *mockLogger) Handle(_ context.Context, record slog.Record) error { - return nil -} - -func (h *mockLogger) WithAttrs(attrs []slog.Attr) slog.Handler { - return h -} - -func (h *mockLogger) WithGroup(name string) slog.Handler { - return h -} - type MockPGPooler struct { mock.Mock } @@ -95,7 +77,7 @@ func TestNew_NilLogger(t *testing.T) { } func TestNew_NoDSN(t *testing.T) { - logger := slog.New(new(mockLogger)) + logger := mockslogger.New() ctx, cancel := context.WithTimeout(context.Background(), storage.DefaultDBPingTimeout) defer cancel() @@ -110,7 +92,7 @@ func TestNew_NoDSN(t *testing.T) { } func TestNew_EmptyDSN(t *testing.T) { - logger := slog.New(new(mockLogger)) + logger := mockslogger.New() ctx, cancel := context.WithTimeout(context.Background(), storage.DefaultDBPingTimeout) defer cancel() @@ -126,7 +108,7 @@ func TestNew_EmptyDSN(t *testing.T) { } func TestNew_InvalidDSN(t *testing.T) { - logger := slog.New(new(mockLogger)) + logger := mockslogger.New() ctx, cancel := context.WithTimeout(context.Background(), storage.DefaultDBPingTimeout) defer cancel() @@ -143,7 +125,7 @@ func TestNew_InvalidDSN(t *testing.T) { } func TestNew_Success(t *testing.T) { - logger := slog.New(new(mockLogger)) + logger := mockslogger.New() ctx, cancel := context.WithTimeout(context.Background(), storage.DefaultDBPingTimeout) defer cancel() @@ -161,7 +143,7 @@ func TestNew_Success(t *testing.T) { } func TestPing_Fail(t *testing.T) { - logger := slog.New(new(mockLogger)) + logger := mockslogger.New() ctx, cancel := context.WithTimeout(context.Background(), storage.DefaultDBPingTimeout) defer cancel() @@ -188,7 +170,7 @@ func TestPing_Fail(t *testing.T) { } func TestPing_Success(t *testing.T) { - logger := slog.New(new(mockLogger)) + logger := mockslogger.New() ctx, cancel := context.WithTimeout(context.Background(), storage.DefaultDBPingTimeout) defer cancel() @@ -214,7 +196,7 @@ func TestPing_Success(t *testing.T) { } func TestStore_Fail_EmptyMessage(t *testing.T) { - logger := slog.New(new(mockLogger)) + logger := mockslogger.New() ctx, cancel := context.WithTimeout(context.Background(), storage.DefaultDBPingTimeout) defer cancel() @@ -240,7 +222,7 @@ func TestStore_Fail_EmptyMessage(t *testing.T) { } func TestStore_Fail_Message_InvalidTargetID(t *testing.T) { - logger := slog.New(new(mockLogger)) + logger := mockslogger.New() ctx, cancel := context.WithTimeout(context.Background(), storage.DefaultDBPingTimeout) defer cancel() @@ -276,7 +258,7 @@ func TestStore_Fail_Message_InvalidTargetID(t *testing.T) { } func TestStore_Fail_Message_InvalidHookID(t *testing.T) { - logger := slog.New(new(mockLogger)) + logger := mockslogger.New() ctx, cancel := context.WithTimeout(context.Background(), storage.DefaultDBPingTimeout) defer cancel() @@ -313,7 +295,7 @@ func TestStore_Fail_Message_InvalidHookID(t *testing.T) { } func TestStore_Fail_Message_InvalidSenderID(t *testing.T) { - logger := slog.New(new(mockLogger)) + logger := mockslogger.New() ctx, cancel := context.WithTimeout(context.Background(), storage.DefaultDBPingTimeout) defer cancel() @@ -353,7 +335,7 @@ func TestStore_Fail_Message_InvalidSenderID(t *testing.T) { } func TestStore_Insert_Error(t *testing.T) { - logger := slog.New(new(mockLogger)) + logger := mockslogger.New() ctx, cancel := context.WithTimeout(context.Background(), storage.DefaultDBPingTimeout) defer cancel() @@ -399,7 +381,7 @@ func TestStore_Insert_Error(t *testing.T) { } func TestStore_Success(t *testing.T) { - logger := slog.New(new(mockLogger)) + logger := mockslogger.New() ctx, cancel := context.WithTimeout(context.Background(), storage.DefaultDBPingTimeout) defer cancel() diff --git a/internal/transport/http/githubwebhookhandler/githubwebhookhandler.go b/internal/transport/http/githubwebhookhandler/githubwebhookhandler.go index 2f065fd..1eae2a6 100644 --- a/internal/transport/http/githubwebhookhandler/githubwebhookhandler.go +++ b/internal/transport/http/githubwebhookhandler/githubwebhookhandler.go @@ -151,16 +151,28 @@ func (h Handler) Handle(ctx *fasthttp.RequestCtx) { func (h Handler) checkRequired() error { if h.Logger == nil { - return fmt.Errorf("github webhook handler check required, Logger error: [%w]", cerrors.ErrValueRequired) + return fmt.Errorf( + "[githubwebhookhandler.checkRequired] Logger error: [%w, 'nil' received]", + cerrors.ErrValueRequired, + ) } if !h.Topic.Valid() { - return fmt.Errorf("github webhook handler check required, Topic error: [%w]", cerrors.ErrInvalid) + return fmt.Errorf( + "[githubwebhookhandler.checkRequired] Topic error: [%w, '%s' received]", + cerrors.ErrInvalid, h.Topic, + ) } if h.Secret == "" { - return fmt.Errorf("github webhook handler check required, Secret error: [%w]", cerrors.ErrValueRequired) + return fmt.Errorf( + "[githubwebhookhandler.checkRequired] Secret error: [%w, empty string received]", + cerrors.ErrValueRequired, + ) } if h.MessageQueue == nil { - return fmt.Errorf("github webhook handler check required, MessageQueue error: [%w]", cerrors.ErrValueRequired) + return fmt.Errorf( + "[githubwebhookhandler.checkRequired] MessageQueue error: [%w, 'nil' received]", + cerrors.ErrValueRequired, + ) } return nil @@ -173,7 +185,10 @@ type Option func(*Handler) error func WithLogger(l *slog.Logger) Option { return func(h *Handler) error { if l == nil { - return fmt.Errorf("github webhook handler WithLogger error: [%w]", cerrors.ErrValueRequired) + return fmt.Errorf( + "[githubwebhookhandler.WithLogger] error: [%w, 'nil' received]", + cerrors.ErrValueRequired, + ) } h.Logger = l @@ -182,12 +197,16 @@ func WithLogger(l *slog.Logger) Option { } // WithTopic sets topic name to consume. -func WithTopic(s kafkacp.KafkaTopicIdentifier) Option { +func WithTopic(s string) Option { return func(h *Handler) error { - if !s.Valid() { - return fmt.Errorf("github webhook handler WithTopic h.Topic error: [%w]", cerrors.ErrInvalid) + topic := kafkacp.KafkaTopicIdentifier(s) + if !topic.Valid() { + return fmt.Errorf( + "[githubwebhookhandler.WithTopic] error: [%w, '%s' received]", + cerrors.ErrInvalid, s, + ) } - h.Topic = s + h.Topic = topic return nil } @@ -197,7 +216,10 @@ func WithTopic(s kafkacp.KafkaTopicIdentifier) Option { func WithWebhookSecret(s string) Option { return func(h *Handler) error { if s == "" { - return fmt.Errorf("github webhook handler WithWebhookSecret h.Secret error: [%w]", cerrors.ErrValueRequired) + return fmt.Errorf( + "[githubwebhookhandler.WithWebhookSecret] error: [%w, empty string received]", + cerrors.ErrValueRequired, + ) } h.Secret = s @@ -211,7 +233,7 @@ func WithProducerGitHubMessageQueue(mq chan *sarama.ProducerMessage) Option { return func(h *Handler) error { if mq == nil { return fmt.Errorf( - "github webhook handler WithProducerGitHubMessageQueue error: [%w]", + "[githubwebhookhandler.WithProducerGitHubMessageQueue] error: [%w, 'nil' received]", cerrors.ErrValueRequired, ) } @@ -227,7 +249,7 @@ func New(options ...Option) (*Handler, error) { for _, option := range options { if err := option(handler); err != nil { - return nil, fmt.Errorf("github webhook handler option error: [%w]", err) + return nil, err } } diff --git a/internal/transport/http/githubwebhookhandler/githubwebhookhandler_test.go b/internal/transport/http/githubwebhookhandler/githubwebhookhandler_test.go index 36de928..608fa64 100644 --- a/internal/transport/http/githubwebhookhandler/githubwebhookhandler_test.go +++ b/internal/transport/http/githubwebhookhandler/githubwebhookhandler_test.go @@ -1,41 +1,22 @@ package githubwebhookhandler_test import ( - "context" "crypto/hmac" "crypto/sha256" "encoding/hex" - "log/slog" "testing" "time" "github.com/IBM/sarama" "github.com/devchain-network/cauldron/internal/cerrors" "github.com/devchain-network/cauldron/internal/kafkacp" + "github.com/devchain-network/cauldron/internal/slogger/mockslogger" "github.com/devchain-network/cauldron/internal/transport/http/githubwebhookhandler" "github.com/google/uuid" "github.com/stretchr/testify/assert" "github.com/valyala/fasthttp" ) -type mockLogger struct{} - -func (h *mockLogger) Enabled(_ context.Context, _ slog.Level) bool { - return true -} - -func (h *mockLogger) Handle(_ context.Context, record slog.Record) error { - return nil -} - -func (h *mockLogger) WithAttrs(attrs []slog.Attr) slog.Handler { - return h -} - -func (h *mockLogger) WithGroup(name string) slog.Handler { - return h -} - func TestNew_NoLogger(t *testing.T) { handler, err := githubwebhookhandler.New() @@ -53,7 +34,7 @@ func TestNew_NilLogger(t *testing.T) { } func TestNew_NoTopic(t *testing.T) { - logger := slog.New(new(mockLogger)) + logger := mockslogger.New() handler, err := githubwebhookhandler.New( githubwebhookhandler.WithLogger(logger), @@ -64,11 +45,11 @@ func TestNew_NoTopic(t *testing.T) { } func TestNew_InvalidTopic(t *testing.T) { - logger := slog.New(new(mockLogger)) + logger := mockslogger.New() handler, err := githubwebhookhandler.New( githubwebhookhandler.WithLogger(logger), - githubwebhookhandler.WithTopic(kafkacp.KafkaTopicIdentifier("foo")), + githubwebhookhandler.WithTopic("foo"), ) assert.ErrorIs(t, err, cerrors.ErrInvalid) @@ -76,11 +57,11 @@ func TestNew_InvalidTopic(t *testing.T) { } func TestNew_NoSecret(t *testing.T) { - logger := slog.New(new(mockLogger)) + logger := mockslogger.New() handler, err := githubwebhookhandler.New( githubwebhookhandler.WithLogger(logger), - githubwebhookhandler.WithTopic(kafkacp.KafkaTopicIdentifier("github")), + githubwebhookhandler.WithTopic(kafkacp.KafkaTopicIdentifierGitHub.String()), ) assert.ErrorIs(t, err, cerrors.ErrValueRequired) @@ -88,11 +69,11 @@ func TestNew_NoSecret(t *testing.T) { } func TestNew_EmptySecret(t *testing.T) { - logger := slog.New(new(mockLogger)) + logger := mockslogger.New() handler, err := githubwebhookhandler.New( githubwebhookhandler.WithLogger(logger), - githubwebhookhandler.WithTopic(kafkacp.KafkaTopicIdentifier("github")), + githubwebhookhandler.WithTopic(kafkacp.KafkaTopicIdentifierGitHub.String()), githubwebhookhandler.WithWebhookSecret(""), ) @@ -101,11 +82,11 @@ func TestNew_EmptySecret(t *testing.T) { } func TestNew_NoMessageQueue(t *testing.T) { - logger := slog.New(new(mockLogger)) + logger := mockslogger.New() handler, err := githubwebhookhandler.New( githubwebhookhandler.WithLogger(logger), - githubwebhookhandler.WithTopic(kafkacp.KafkaTopicIdentifier("github")), + githubwebhookhandler.WithTopic(kafkacp.KafkaTopicIdentifierGitHub.String()), githubwebhookhandler.WithWebhookSecret("my-secret"), ) @@ -114,11 +95,11 @@ func TestNew_NoMessageQueue(t *testing.T) { } func TestNew_NilMessageQueue(t *testing.T) { - logger := slog.New(new(mockLogger)) + logger := mockslogger.New() handler, err := githubwebhookhandler.New( githubwebhookhandler.WithLogger(logger), - githubwebhookhandler.WithTopic(kafkacp.KafkaTopicIdentifier("github")), + githubwebhookhandler.WithTopic(kafkacp.KafkaTopicIdentifierGitHub.String()), githubwebhookhandler.WithWebhookSecret("my-secret"), githubwebhookhandler.WithProducerGitHubMessageQueue(nil), ) @@ -128,12 +109,12 @@ func TestNew_NilMessageQueue(t *testing.T) { } func TestNew_Success(t *testing.T) { - logger := slog.New(new(mockLogger)) + logger := mockslogger.New() messageQueue := make(chan *sarama.ProducerMessage, 10) handler, err := githubwebhookhandler.New( githubwebhookhandler.WithLogger(logger), - githubwebhookhandler.WithTopic(kafkacp.KafkaTopicIdentifier("github")), + githubwebhookhandler.WithTopic(kafkacp.KafkaTopicIdentifierGitHub.String()), githubwebhookhandler.WithWebhookSecret("my-secret"), githubwebhookhandler.WithProducerGitHubMessageQueue(messageQueue), ) @@ -143,12 +124,12 @@ func TestNew_Success(t *testing.T) { } func TestHandle_NoBody(t *testing.T) { - logger := slog.New(new(mockLogger)) + logger := mockslogger.New() messageQueue := make(chan *sarama.ProducerMessage, 10) handler, err := githubwebhookhandler.New( githubwebhookhandler.WithLogger(logger), - githubwebhookhandler.WithTopic(kafkacp.KafkaTopicIdentifier("github")), + githubwebhookhandler.WithTopic(kafkacp.KafkaTopicIdentifierGitHub.String()), githubwebhookhandler.WithWebhookSecret("my-secret"), githubwebhookhandler.WithProducerGitHubMessageQueue(messageQueue), ) @@ -163,12 +144,12 @@ func TestHandle_NoBody(t *testing.T) { } func TestHandle_NoHMAC(t *testing.T) { - logger := slog.New(new(mockLogger)) + logger := mockslogger.New() messageQueue := make(chan *sarama.ProducerMessage, 10) handler, err := githubwebhookhandler.New( githubwebhookhandler.WithLogger(logger), - githubwebhookhandler.WithTopic(kafkacp.KafkaTopicIdentifier("github")), + githubwebhookhandler.WithTopic(kafkacp.KafkaTopicIdentifierGitHub.String()), githubwebhookhandler.WithWebhookSecret("my-secret"), githubwebhookhandler.WithProducerGitHubMessageQueue(messageQueue), ) @@ -184,12 +165,12 @@ func TestHandle_NoHMAC(t *testing.T) { } func TestHandle_InvalidHMAC(t *testing.T) { - logger := slog.New(new(mockLogger)) + logger := mockslogger.New() messageQueue := make(chan *sarama.ProducerMessage, 10) handler, err := githubwebhookhandler.New( githubwebhookhandler.WithLogger(logger), - githubwebhookhandler.WithTopic(kafkacp.KafkaTopicIdentifier("github")), + githubwebhookhandler.WithTopic(kafkacp.KafkaTopicIdentifierGitHub.String()), githubwebhookhandler.WithWebhookSecret("my-secret"), githubwebhookhandler.WithProducerGitHubMessageQueue(messageQueue), ) @@ -213,12 +194,12 @@ func newMockRequestCtx() *fasthttp.RequestCtx { } func TestHandle_NoXGithubEvent(t *testing.T) { - logger := slog.New(new(mockLogger)) + logger := mockslogger.New() messageQueue := make(chan *sarama.ProducerMessage, 10) handler, err := githubwebhookhandler.New( githubwebhookhandler.WithLogger(logger), - githubwebhookhandler.WithTopic(kafkacp.KafkaTopicIdentifier("github")), + githubwebhookhandler.WithTopic(kafkacp.KafkaTopicIdentifierGitHub.String()), githubwebhookhandler.WithWebhookSecret("my-secret"), githubwebhookhandler.WithProducerGitHubMessageQueue(messageQueue), ) @@ -253,12 +234,12 @@ func TestHandle_NoXGithubEvent(t *testing.T) { } func TestHandle_NoXGithubDeliveryID(t *testing.T) { - logger := slog.New(new(mockLogger)) + logger := mockslogger.New() messageQueue := make(chan *sarama.ProducerMessage, 10) handler, err := githubwebhookhandler.New( githubwebhookhandler.WithLogger(logger), - githubwebhookhandler.WithTopic(kafkacp.KafkaTopicIdentifier("github")), + githubwebhookhandler.WithTopic(kafkacp.KafkaTopicIdentifierGitHub.String()), githubwebhookhandler.WithWebhookSecret("my-secret"), githubwebhookhandler.WithProducerGitHubMessageQueue(messageQueue), ) @@ -294,12 +275,12 @@ func TestHandle_NoXGithubDeliveryID(t *testing.T) { } func TestHandle_NoXGithubHookID(t *testing.T) { - logger := slog.New(new(mockLogger)) + logger := mockslogger.New() messageQueue := make(chan *sarama.ProducerMessage, 10) handler, err := githubwebhookhandler.New( githubwebhookhandler.WithLogger(logger), - githubwebhookhandler.WithTopic(kafkacp.KafkaTopicIdentifier("github")), + githubwebhookhandler.WithTopic(kafkacp.KafkaTopicIdentifierGitHub.String()), githubwebhookhandler.WithWebhookSecret("my-secret"), githubwebhookhandler.WithProducerGitHubMessageQueue(messageQueue), ) @@ -336,12 +317,12 @@ func TestHandle_NoXGithubHookID(t *testing.T) { } func TestHandle_NoXGithubInstallationTargetID(t *testing.T) { - logger := slog.New(new(mockLogger)) + logger := mockslogger.New() messageQueue := make(chan *sarama.ProducerMessage, 10) handler, err := githubwebhookhandler.New( githubwebhookhandler.WithLogger(logger), - githubwebhookhandler.WithTopic(kafkacp.KafkaTopicIdentifier("github")), + githubwebhookhandler.WithTopic(kafkacp.KafkaTopicIdentifierGitHub.String()), githubwebhookhandler.WithWebhookSecret("my-secret"), githubwebhookhandler.WithProducerGitHubMessageQueue(messageQueue), ) @@ -379,12 +360,12 @@ func TestHandle_NoXGithubInstallationTargetID(t *testing.T) { } func TestHandle_NoXGithubInstallationTargetType(t *testing.T) { - logger := slog.New(new(mockLogger)) + logger := mockslogger.New() messageQueue := make(chan *sarama.ProducerMessage, 10) handler, err := githubwebhookhandler.New( githubwebhookhandler.WithLogger(logger), - githubwebhookhandler.WithTopic(kafkacp.KafkaTopicIdentifier("github")), + githubwebhookhandler.WithTopic(kafkacp.KafkaTopicIdentifierGitHub.String()), githubwebhookhandler.WithWebhookSecret("my-secret"), githubwebhookhandler.WithProducerGitHubMessageQueue(messageQueue), ) @@ -423,12 +404,12 @@ func TestHandle_NoXGithubInstallationTargetType(t *testing.T) { } func TestHandle_NoSenderLogin(t *testing.T) { - logger := slog.New(new(mockLogger)) + logger := mockslogger.New() messageQueue := make(chan *sarama.ProducerMessage, 10) handler, err := githubwebhookhandler.New( githubwebhookhandler.WithLogger(logger), - githubwebhookhandler.WithTopic(kafkacp.KafkaTopicIdentifier("github")), + githubwebhookhandler.WithTopic(kafkacp.KafkaTopicIdentifierGitHub.String()), githubwebhookhandler.WithWebhookSecret("my-secret"), githubwebhookhandler.WithProducerGitHubMessageQueue(messageQueue), ) @@ -468,12 +449,12 @@ func TestHandle_NoSenderLogin(t *testing.T) { } func TestHandle_NoSenderID(t *testing.T) { - logger := slog.New(new(mockLogger)) + logger := mockslogger.New() messageQueue := make(chan *sarama.ProducerMessage, 10) handler, err := githubwebhookhandler.New( githubwebhookhandler.WithLogger(logger), - githubwebhookhandler.WithTopic(kafkacp.KafkaTopicIdentifier("github")), + githubwebhookhandler.WithTopic(kafkacp.KafkaTopicIdentifierGitHub.String()), githubwebhookhandler.WithWebhookSecret("my-secret"), githubwebhookhandler.WithProducerGitHubMessageQueue(messageQueue), ) @@ -513,12 +494,12 @@ func TestHandle_NoSenderID(t *testing.T) { } func TestMessageQueue_Scenarios(t *testing.T) { - logger := slog.New(new(mockLogger)) + logger := mockslogger.New() messageQueue := make(chan *sarama.ProducerMessage, 1) handler, err := githubwebhookhandler.New( githubwebhookhandler.WithLogger(logger), - githubwebhookhandler.WithTopic(kafkacp.KafkaTopicIdentifier("github")), + githubwebhookhandler.WithTopic(kafkacp.KafkaTopicIdentifierGitHub.String()), githubwebhookhandler.WithWebhookSecret("my-secret"), githubwebhookhandler.WithProducerGitHubMessageQueue(messageQueue), ) @@ -554,12 +535,12 @@ func TestMessageQueue_Scenarios(t *testing.T) { } func TestHandle_Success(t *testing.T) { - logger := slog.New(new(mockLogger)) + logger := mockslogger.New() messageQueue := make(chan *sarama.ProducerMessage, 10) handler, err := githubwebhookhandler.New( githubwebhookhandler.WithLogger(logger), - githubwebhookhandler.WithTopic(kafkacp.KafkaTopicIdentifier("github")), + githubwebhookhandler.WithTopic(kafkacp.KafkaTopicIdentifierGitHub.String()), githubwebhookhandler.WithWebhookSecret("my-secret"), githubwebhookhandler.WithProducerGitHubMessageQueue(messageQueue), ) diff --git a/internal/transport/http/healthcheckhandler/healthcheckhandler.go b/internal/transport/http/healthcheckhandler/healthcheckhandler.go index bf5b213..ae16ffa 100644 --- a/internal/transport/http/healthcheckhandler/healthcheckhandler.go +++ b/internal/transport/http/healthcheckhandler/healthcheckhandler.go @@ -28,7 +28,10 @@ func (h Handler) Handle(ctx *fasthttp.RequestCtx) { func (h Handler) checkRequired() error { if h.Version == "" { - return fmt.Errorf("health check handler check required, Version error: [%w]", cerrors.ErrValueRequired) + return fmt.Errorf( + "[healthcheckhandler.checkRequired], Version error: [%w, empty string received]", + cerrors.ErrValueRequired, + ) } return nil @@ -41,7 +44,10 @@ type Option func(*Handler) error func WithVersion(s string) Option { return func(h *Handler) error { if s == "" { - return fmt.Errorf("health checkhandler WithVersion error: [%w]", cerrors.ErrValueRequired) + return fmt.Errorf( + "[healthcheckhandler.checkRequired] WithVersion error: [%w, empty string received]", + cerrors.ErrValueRequired, + ) } h.Version = s @@ -55,7 +61,7 @@ func New(options ...Option) (*Handler, error) { for _, option := range options { if err := option(handler); err != nil { - return nil, fmt.Errorf("health check handler option error: [%w]", err) + return nil, err } } From ca62711d61855c1f47f2cfc3b81fcd86ede5e009 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?U=C4=9Fur=20=C3=96zy=C4=B1lmazel?= Date: Mon, 27 Jan 2025 21:53:14 +0300 Subject: [PATCH 13/13] add new docker file --- ...d-push-cauldron-github-group-consumer.yml} | 4 +- DEVELOPMENT.md | 80 ++++++++---------- Dockerfile.github-consumer-group | 31 +++++++ Rakefile | 81 ++++--------------- cmd/githubconsumergroup/main.go | 2 +- docker-compose.infra.yml | 5 +- .../kafkacp/kafkaproducer/kafkaproducer.go | 1 + 7 files changed, 88 insertions(+), 116 deletions(-) rename .github/workflows/{build-push-cauldron-github-comsumer.yml => build-push-cauldron-github-group-consumer.yml} (88%) create mode 100644 Dockerfile.github-consumer-group diff --git a/.github/workflows/build-push-cauldron-github-comsumer.yml b/.github/workflows/build-push-cauldron-github-group-consumer.yml similarity index 88% rename from .github/workflows/build-push-cauldron-github-comsumer.yml rename to .github/workflows/build-push-cauldron-github-group-consumer.yml index 0b3701c..3756c55 100644 --- a/.github/workflows/build-push-cauldron-github-comsumer.yml +++ b/.github/workflows/build-push-cauldron-github-group-consumer.yml @@ -1,4 +1,4 @@ -name: Build and push Cauldron GitHub Consumer +name: Build and push Cauldron GitHub Group Consumer on: workflow_dispatch: @@ -24,7 +24,7 @@ jobs: uses: docker/build-push-action@v6 with: context: . - file: Dockerfile.github-consumer + file: Dockerfile.github-consumer-group platforms: linux/amd64,linux/arm64 push: true provenance: false diff --git a/DEVELOPMENT.md b/DEVELOPMENT.md index 995344e..a0c71c2 100644 --- a/DEVELOPMENT.md +++ b/DEVELOPMENT.md @@ -71,7 +71,8 @@ bundle |:---------|:------------|---------| | `LOG_LEVEL` | Logging level, Valid values are: `"DEBUG"`, `"INFO"`, `"WARN"`, `"ERROR"` | `"INFO"` | | `KCP_BROKERS` | Kafka consumer/producer brokers list, comma separated | `"127.0.0.1:9094"` | -| `KC_TOPIC_GITHUB` | Topic name for GitHub webhook consumer | `github` | +| `KC_TOPIC_GITHUB` | Topic name for GitHub webhook consumer | `""` | +| `KCG_NAME` | Kafka consumer group name | `""` | | `KC_PARTITION` | Consumer partition number | `0` | | `KC_DIAL_TIMEOUT` | Initial connection timeout used by broker (shared with consumer) | "`30s`" (seconds) | | `KC_READ_TIMEOUT` | Response timeout used by broker (shared with consumer) | "`30s`" (seconds) | @@ -154,8 +155,11 @@ export KP_GITHUB_MESSAGE_QUEUE_SIZE=100 # export KP_BACKOFF="2s" # export KP_MAX_RETRIES="10" -# kafka github consumer optional values. -# export KC_TOPIC_GITHUB="github" +# kafka github consumer group values. +export KC_TOPIC_GITHUB="github" +export KCG_NAME="github-group" + +# kafka github consumer group optional values. # export KC_PARTITION="0" # export KC_DIAL_TIMEOUT="30s" # export KC_READ_TIMEOUT="30s" @@ -212,31 +216,27 @@ of `rake tasks`: ```bash rake -T -rake db:init # init database -rake db:migrate # runs rake db:migrate up (shortcut) -rake db:migrate:down # run migrate down -rake db:migrate:goto[index] # go to migration -rake db:migrate:up # run migrate up -rake db:psql # connect local db with psql -rake db:reset # reset database (drop and create) -rake default # default task, runs server -rake docker:build:github_consumer # build github consumer -rake docker:build:migrator # build migrator -rake docker:build:server # build server -rake docker:compose:infra:down # stop the infra with all components -rake docker:compose:infra:up # run the infra with all components -rake docker:compose:kafka:down # stop the kafka and kafka-ui only -rake docker:compose:kafka:up # run the kafka and kafka-ui only -rake docker:run:github_consumer # run github consumer -rake docker:run:migrator # run migrator -rake docker:run:server # run server -rake lint # run golang-ci linter -rake rubocop:autofix # lint ruby and autofix -rake rubocop:lint # lint ruby -rake run:kafka:github:consumer # run kafka github consumer -rake run:server # run server -rake test # runs tests (shortcut) -rake test:coverage # run tests and show coverage +rake db:init # init database +rake db:migrate # runs rake db:migrate up (shortcut) +rake db:migrate:down # run migrate down +rake db:migrate:goto[index] # go to migration +rake db:migrate:up # run migrate up +rake db:psql # connect local db with psql +rake db:reset # reset database (drop and create) +rake default # default task, runs server +rake docker:compose:infra:down # stop the infra with all components +rake docker:compose:infra:up # run the infra with all components +rake docker:compose:kafka:down # stop the kafka and kafka-ui only +rake docker:compose:kafka:up # run the kafka and kafka-ui only +rake lint # run golang-ci linter +rake psql:infra # connect to infra database with psql +rake rubocop:autofix # lint ruby and autofix +rake rubocop:lint # lint ruby +rake run:kafka:github:consumer # run kafka github consumer +rake run:kafka:github:consumer_group # run kafka github consumer group +rake run:server # run server +rake test # runs tests (shortcut) +rake test:coverage # run tests and show coverage ``` You can run tests: @@ -347,24 +347,12 @@ rake rubocop:autofix # lints ruby code and auto fixes. ```bash rake -T "docker:" -rake docker:build:github_consumer # build github consumer -rake docker:build:migrator # build migrator -rake docker:build:server # build server - -rake docker:compose:infra:down # stop the infra with all components -rake docker:compose:infra:up # run the infra with all components -rake docker:compose:kafka:down # stop the kafka and kafka-ui only -rake docker:compose:kafka:up # run the kafka and kafka-ui only - -rake docker:run:github_consumer # run github consumer -rake docker:run:migrator # run migrator -rake docker:run:server # run server +rake docker:compose:infra:down # stop the infra with all components +rake docker:compose:infra:up # run the infra with all components +rake docker:compose:kafka:down # stop the kafka and kafka-ui only +rake docker:compose:kafka:up # run the kafka and kafka-ui only ``` -- `docker:build:*`: builds images locally, testing purposes. -- `docker:run:*`: runs containers locally, testing purposes. -- `docker:compose:*`: ups or downs whole infrastructure with services. - --- ## Infrastructure Diagram @@ -406,10 +394,10 @@ Now you can access: - Kafka UI: `http://127.0.0.1:8080/` - Ngrok: `http://127.0.0.1:4040` -- PostgreSQL: `PGPASSWORD="${POSTGRES_PASSWORD}" psql -h localhost -p 5433 -U postgres -d devchain_webhook` +- PostgreSQL: `PGOPTIONS="--search_path=cauldron,public" PGPASSWORD="${POSTGRES_PASSWORD}" psql -h localhost -p 5433 -U postgres -d devchain_webhook` For PostgreSQL, `5433` is exposed in container to avoid conflicts with the -local PostgreSQL instance. +local PostgreSQL instance. Use `rake psql:infra` to connect your infra database. Logging for **kafka** and **kafka-ui** is set to `error` only. Due to development purposes, both were producing too much information, little clean up required. diff --git a/Dockerfile.github-consumer-group b/Dockerfile.github-consumer-group new file mode 100644 index 0000000..96ec92b --- /dev/null +++ b/Dockerfile.github-consumer-group @@ -0,0 +1,31 @@ +FROM golang:1.23-alpine AS builder + +WORKDIR /build +COPY . . + +ARG GOOS +ARG GOARCH +RUN CGO_ENABLED=0 GOOS=${GOOS} GOARCH=${GOARCH} go build -o consumer cmd/githubconsumergroup/main.go + +FROM alpine:latest AS certs +RUN apk add --update --no-cache ca-certificates + +FROM busybox:latest +ARG UID=10001 +RUN adduser \ + --disabled-password \ + --gecos "" \ + --home "/nonexistent" \ + --shell "/sbin/nologin" \ + --no-create-home \ + --uid "${UID}" \ + appuser +USER appuser +COPY --from=certs /etc/ssl/certs/ca-certificates.crt /etc/ssl/certs/ca-certificates.crt +COPY --from=builder /build/consumer /consumer + +ENTRYPOINT ["/consumer"] + +LABEL org.opencontainers.image.authors="Uğur vigo Özyılmazel " +LABEL org.opencontainers.image.licenses="MIT" +LABEL org.opencontainers.image.source="https://github.com/devchain-network/cauldron" \ No newline at end of file diff --git a/Rakefile b/Rakefile index 01c7d4a..0ce43c7 100644 --- a/Rakefile +++ b/Rakefile @@ -66,72 +66,7 @@ namespace :run do end namespace :docker do - namespace :run do - desc 'run server' - task :server do - system %{ - docker run \ - --env GITHUB_HMAC_SECRET=${GITHUB_HMAC_SECRET} \ - -p 8000:8000 \ - devchain-server:latest - } - $CHILD_STATUS&.exitstatus || 1 - rescue Interrupt - 0 - end - - desc 'run github consumer' - task :github_consumer do - system %{ - docker run \ - --env KC_TOPIC=${KC_TOPIC} \ - devchain-gh-consumer:latest - } - $CHILD_STATUS&.exitstatus || 1 - rescue Interrupt - 0 - end - - desc 'run migrator' - task :migrator do - system %{ - docker run \ - --env DATABASE_URL=${DATABASE_URL_DOCKER_TO_HOST} \ - devchain-migrator:latest - } - $CHILD_STATUS&.exitstatus || 1 - rescue Interrupt - 0 - end - end - namespace :build do - desc 'build server' - task :server do - system %{ docker build -f Dockerfile.server -t devchain-server:latest . } - $CHILD_STATUS&.exitstatus || 1 - rescue Interrupt - 0 - end - - desc 'build github consumer' - task :github_consumer do - system %{ docker build -f Dockerfile.github-consumer -t devchain-gh-consumer:latest . } - $CHILD_STATUS&.exitstatus || 1 - rescue Interrupt - 0 - end - - desc 'build migrator' - task :migrator do - system %{ docker build -f Dockerfile.migrator -t devchain-migrator:latest . } - $CHILD_STATUS&.exitstatus || 1 - rescue Interrupt - 0 - end - end - namespace :compose do - namespace :kafka do desc 'run the kafka and kafka-ui only' task :up do @@ -309,3 +244,19 @@ end desc 'runs tests (shortcut)' task test: 'test:test_all' + +INFRA_POSTGRES_PASSWORD = ENV['POSTGRES_PASSWORD'] || nil +namespace :psql do + desc 'connect to infra database with psql' + task :infra do + abort 'infra POSTGRES_PASSWORD environment variable is not set' if INFRA_POSTGRES_PASSWORD.nil? + system %{ + PGPASSWORD="#{INFRA_POSTGRES_PASSWORD}" \ + PGOPTIONS="--search_path=cauldron,public" \ + psql -h localhost -p 5433 -U postgres -d #{DATABASE_NAME} + } + $CHILD_STATUS&.exitstatus || 1 + rescue Interrupt + 0 + end +end diff --git a/cmd/githubconsumergroup/main.go b/cmd/githubconsumergroup/main.go index e4ac863..792aa5e 100644 --- a/cmd/githubconsumergroup/main.go +++ b/cmd/githubconsumergroup/main.go @@ -29,7 +29,7 @@ func Run() error { logLevel := getenv.String("LOG_LEVEL", slogger.DefaultLogLevel) brokersList := getenv.String("KCP_BROKERS", kafkacp.DefaultKafkaBrokers) kafkaTopic := getenv.String("KC_TOPIC_GITHUB", "") - kafkaConsumerGroup := getenv.String("KCG_NAME", "github-group") + kafkaConsumerGroup := getenv.String("KCG_NAME", "") kafkaDialTimeout := getenv.Duration("KC_DIAL_TIMEOUT", kafkaconsumergroup.DefaultDialTimeout) kafkaReadTimeout := getenv.Duration("KC_READ_TIMEOUT", kafkaconsumergroup.DefaultReadTimeout) kafkaWriteTimeout := getenv.Duration("KC_WRITE_TIMEOUT", kafkaconsumergroup.DefaultWriteTimeout) diff --git a/docker-compose.infra.yml b/docker-compose.infra.yml index ac57d2b..472c8f1 100644 --- a/docker-compose.infra.yml +++ b/docker-compose.infra.yml @@ -100,9 +100,10 @@ services: github-consumer: build: context: . - dockerfile: Dockerfile.github-consumer + dockerfile: Dockerfile.github-consumer-group environment: - KC_TOPIC: "github" + KC_TOPIC_GITHUB: "github" + KCG_NAME: "github-group" KCP_BROKERS: "kafka:9092" DATABASE_URL: "${DATABASE_URL_INFRA}" depends_on: diff --git a/internal/kafkacp/kafkaproducer/kafkaproducer.go b/internal/kafkacp/kafkaproducer/kafkaproducer.go index d4a5c2f..6df449b 100644 --- a/internal/kafkacp/kafkaproducer/kafkaproducer.go +++ b/internal/kafkacp/kafkaproducer/kafkaproducer.go @@ -210,6 +210,7 @@ func New(options ...Option) (sarama.AsyncProducer, error) { config.Producer.RequiredAcks = sarama.WaitForAll config.Producer.Return.Successes = true config.Producer.Return.Errors = true + config.Producer.Compression = sarama.CompressionSnappy config.Net.DialTimeout = producer.DialTimeout config.Net.ReadTimeout = producer.ReadTimeout config.Net.WriteTimeout = producer.WriteTimeout