Skip to content

Commit fb50904

Browse files
authored
fix(kafkatest): add missing fake consumer methods (#191)
1 parent 8f699f8 commit fb50904

File tree

1 file changed

+44
-23
lines changed

1 file changed

+44
-23
lines changed

kafka/kafkatest/utils.go

Lines changed: 44 additions & 23 deletions
Original file line numberDiff line numberDiff line change
@@ -6,37 +6,33 @@ import (
66
"sync"
77
"time"
88

9-
"github.com/confluentinc/confluent-kafka-go/kafka"
9+
kafkalib "github.com/confluentinc/confluent-kafka-go/kafka"
10+
"github.com/netlify/netlify-commons/kafka"
1011
"github.com/netlify/netlify-commons/util"
1112
"github.com/sirupsen/logrus"
1213
)
1314

1415
func KafkaPipe(log logrus.FieldLogger) (*FakeKafkaConsumer, *FakeKafkaProducer) {
15-
distri := make(chan *kafka.Message, 200)
16+
distri := make(chan *kafkalib.Message, 200)
1617
rdr := NewFakeKafkaConsumer(log, distri)
1718
wtr := NewFakeKafkaProducer(distri)
1819
wtr.commits = rdr.commits
1920
return rdr, wtr
2021
}
2122

2223
type FakeKafkaConsumer struct {
23-
messages []*kafka.Message
24+
messages []*kafkalib.Message
2425
msgMu sync.Mutex
2526
offset int64
2627
notify chan struct{}
27-
commits chan *kafka.Message
28+
commits chan *kafkalib.Message
2829
log logrus.FieldLogger
2930
}
3031

31-
func (f *FakeKafkaConsumer) Close() error {
32-
close(f.commits)
33-
return nil
34-
}
35-
3632
type FakeKafkaProducer struct {
37-
distris []chan<- *kafka.Message
33+
distris []chan<- *kafkalib.Message
3834
distrisMu sync.Mutex
39-
commits <-chan *kafka.Message
35+
commits <-chan *kafkalib.Message
4036
closed util.AtomicBool
4137
}
4238

@@ -53,19 +49,19 @@ func (f *FakeKafkaProducer) Close() error {
5349
return nil
5450
}
5551

56-
func NewFakeKafkaConsumer(log logrus.FieldLogger, distri <-chan *kafka.Message) *FakeKafkaConsumer {
52+
func NewFakeKafkaConsumer(log logrus.FieldLogger, distri <-chan *kafkalib.Message) *FakeKafkaConsumer {
5753
r := &FakeKafkaConsumer{
58-
messages: make([]*kafka.Message, 0),
54+
messages: make([]*kafkalib.Message, 0),
5955
offset: 0,
6056
notify: make(chan struct{}),
6157
log: log,
62-
commits: make(chan *kafka.Message, 1000),
58+
commits: make(chan *kafkalib.Message, 1000),
6359
}
6460

6561
go func() {
6662
for msg := range distri {
6763
r.msgMu.Lock()
68-
msg.TopicPartition.Offset = kafka.Offset(r.offset + 1)
64+
msg.TopicPartition.Offset = kafkalib.Offset(r.offset + 1)
6965
r.messages = append(r.messages, setMsgDefaults(msg))
7066
r.msgMu.Unlock()
7167
r.notify <- struct{}{}
@@ -75,7 +71,7 @@ func NewFakeKafkaConsumer(log logrus.FieldLogger, distri <-chan *kafka.Message)
7571
return r
7672
}
7773

78-
func (f *FakeKafkaConsumer) FetchMessage(ctx context.Context) (*kafka.Message, error) {
74+
func (f *FakeKafkaConsumer) FetchMessage(ctx context.Context) (*kafkalib.Message, error) {
7975
for {
8076
f.msgMu.Lock()
8177
if int64(len(f.messages)) > f.offset {
@@ -89,13 +85,13 @@ func (f *FakeKafkaConsumer) FetchMessage(ctx context.Context) (*kafka.Message, e
8985

9086
select {
9187
case <-ctx.Done():
92-
return &kafka.Message{}, ctx.Err()
88+
return &kafkalib.Message{}, ctx.Err()
9389
case <-f.notify:
9490
}
9591
}
9692
}
9793

98-
func (f *FakeKafkaConsumer) CommitMessage(msg *kafka.Message) error {
94+
func (f *FakeKafkaConsumer) CommitMessage(msg *kafkalib.Message) error {
9995
f.msgMu.Lock()
10096
f.log.WithField("offset", msg.TopicPartition.Offset).Trace("commiting message...")
10197
if int64(msg.TopicPartition.Offset) > f.offset {
@@ -117,27 +113,52 @@ func (f *FakeKafkaConsumer) SetInitialOffset(offset int64) error {
117113
return nil
118114
}
119115

120-
func (f *FakeKafkaConsumer) Seek(offset int64, _ time.Duration) error {
116+
func (f *FakeKafkaConsumer) Seek(offset int64) error {
121117
f.msgMu.Lock()
122118
f.offset = offset
123119
f.msgMu.Unlock()
124120
return nil
125121
}
126122

127-
func NewFakeKafkaProducer(distris ...chan<- *kafka.Message) *FakeKafkaProducer {
123+
func (f *FakeKafkaConsumer) AssignPartitionByKey(key string, algorithm kafka.PartitionerAlgorithm) error {
124+
return nil // noop
125+
}
126+
127+
func (f *FakeKafkaConsumer) AssignPartitionByID(id int32) error {
128+
return nil // noop
129+
}
130+
131+
func (f *FakeKafkaConsumer) GetMetadata(allTopics bool) (*kafkalib.Metadata, error) {
132+
return &kafkalib.Metadata{}, nil // noop
133+
}
134+
135+
func (f *FakeKafkaConsumer) GetPartitions() ([]int32, error) {
136+
return []int32{}, nil // noop
137+
}
138+
139+
func (f *FakeKafkaConsumer) SeekToTime(t time.Time) error {
140+
return nil // noop
141+
}
142+
143+
func (f *FakeKafkaConsumer) Close() error {
144+
close(f.commits)
145+
return nil
146+
}
147+
148+
func NewFakeKafkaProducer(distris ...chan<- *kafkalib.Message) *FakeKafkaProducer {
128149
return &FakeKafkaProducer{
129150
distris: distris,
130151
closed: util.NewAtomicBool(false),
131152
}
132153
}
133154

134-
func (f *FakeKafkaProducer) AddDistri(d chan<- *kafka.Message) {
155+
func (f *FakeKafkaProducer) AddDistri(d chan<- *kafkalib.Message) {
135156
f.distrisMu.Lock()
136157
f.distris = append(f.distris, d)
137158
f.distrisMu.Unlock()
138159
}
139160

140-
func (f *FakeKafkaProducer) Produce(ctx context.Context, msgs ...*kafka.Message) error {
161+
func (f *FakeKafkaProducer) Produce(ctx context.Context, msgs ...*kafkalib.Message) error {
141162
f.distrisMu.Lock()
142163
for _, msg := range msgs {
143164
for _, d := range f.distris {
@@ -162,7 +183,7 @@ func (f *FakeKafkaProducer) WaitForKey(key []byte) (gotKey bool) {
162183
return false
163184
}
164185

165-
func setMsgDefaults(msg *kafka.Message) *kafka.Message {
186+
func setMsgDefaults(msg *kafkalib.Message) *kafkalib.Message {
166187
if msg.TopicPartition.Topic == nil {
167188
topicName := "local-test"
168189
msg.TopicPartition.Topic = &topicName

0 commit comments

Comments
 (0)