blob: fd0d1d90b7fa0611c0ddfb7989d841b999a939b5 [file] [log] [blame]
khenaidooac637102019-01-14 15:44:34 -05001package sarama
2
3import (
4 "fmt"
5 "time"
6)
7
Scott Baker8461e152019-10-01 14:44:30 -07008const (
khenaidood948f772021-08-11 17:49:24 -04009 // CompressionNone no compression
Scott Baker8461e152019-10-01 14:44:30 -070010 CompressionNone CompressionCodec = iota
khenaidood948f772021-08-11 17:49:24 -040011 // CompressionGZIP compression using GZIP
Scott Baker8461e152019-10-01 14:44:30 -070012 CompressionGZIP
khenaidood948f772021-08-11 17:49:24 -040013 // CompressionSnappy compression using snappy
Scott Baker8461e152019-10-01 14:44:30 -070014 CompressionSnappy
khenaidood948f772021-08-11 17:49:24 -040015 // CompressionLZ4 compression using LZ4
Scott Baker8461e152019-10-01 14:44:30 -070016 CompressionLZ4
khenaidood948f772021-08-11 17:49:24 -040017 // CompressionZSTD compression using ZSTD
Scott Baker8461e152019-10-01 14:44:30 -070018 CompressionZSTD
khenaidooac637102019-01-14 15:44:34 -050019
Scott Baker8461e152019-10-01 14:44:30 -070020 // The lowest 3 bits contain the compression codec used for the message
21 compressionCodecMask int8 = 0x07
22
23 // Bit 3 set for "LogAppend" timestamps
24 timestampTypeMask = 0x08
25
26 // CompressionLevelDefault is the constant to use in CompressionLevel
27 // to have the default compression level for any codec. The value is picked
28 // that we don't use any existing compression levels.
29 CompressionLevelDefault = -1000
30)
William Kurkiandaa6bb22019-03-07 12:26:28 -050031
32// CompressionCodec represents the various compression codecs recognized by Kafka in messages.
33type CompressionCodec int8
34
khenaidooac637102019-01-14 15:44:34 -050035func (cc CompressionCodec) String() string {
36 return []string{
37 "none",
38 "gzip",
39 "snappy",
40 "lz4",
Scott Baker8461e152019-10-01 14:44:30 -070041 "zstd",
khenaidooac637102019-01-14 15:44:34 -050042 }[int(cc)]
43}
44
khenaidood948f772021-08-11 17:49:24 -040045// Message is a kafka message type
khenaidooac637102019-01-14 15:44:34 -050046type Message struct {
47 Codec CompressionCodec // codec used to compress the message contents
48 CompressionLevel int // compression level
William Kurkiandaa6bb22019-03-07 12:26:28 -050049 LogAppendTime bool // the used timestamp is LogAppendTime
khenaidooac637102019-01-14 15:44:34 -050050 Key []byte // the message key, may be nil
51 Value []byte // the message contents
52 Set *MessageSet // the message set a message might wrap
53 Version int8 // v1 requires Kafka 0.10
54 Timestamp time.Time // the timestamp of the message (version 1+ only)
55
56 compressedCache []byte
57 compressedSize int // used for computing the compression ratio metrics
58}
59
60func (m *Message) encode(pe packetEncoder) error {
61 pe.push(newCRC32Field(crcIEEE))
62
63 pe.putInt8(m.Version)
64
65 attributes := int8(m.Codec) & compressionCodecMask
William Kurkiandaa6bb22019-03-07 12:26:28 -050066 if m.LogAppendTime {
67 attributes |= timestampTypeMask
68 }
khenaidooac637102019-01-14 15:44:34 -050069 pe.putInt8(attributes)
70
71 if m.Version >= 1 {
72 if err := (Timestamp{&m.Timestamp}).encode(pe); err != nil {
73 return err
74 }
75 }
76
77 err := pe.putBytes(m.Key)
78 if err != nil {
79 return err
80 }
81
82 var payload []byte
83
84 if m.compressedCache != nil {
85 payload = m.compressedCache
86 m.compressedCache = nil
87 } else if m.Value != nil {
khenaidooac637102019-01-14 15:44:34 -050088 payload, err = compress(m.Codec, m.CompressionLevel, m.Value)
89 if err != nil {
90 return err
91 }
92 m.compressedCache = payload
93 // Keep in mind the compressed payload size for metric gathering
94 m.compressedSize = len(payload)
95 }
96
97 if err = pe.putBytes(payload); err != nil {
98 return err
99 }
100
101 return pe.pop()
102}
103
104func (m *Message) decode(pd packetDecoder) (err error) {
Scott Baker8461e152019-10-01 14:44:30 -0700105 crc32Decoder := acquireCrc32Field(crcIEEE)
106 defer releaseCrc32Field(crc32Decoder)
107
108 err = pd.push(crc32Decoder)
khenaidooac637102019-01-14 15:44:34 -0500109 if err != nil {
110 return err
111 }
112
113 m.Version, err = pd.getInt8()
114 if err != nil {
115 return err
116 }
117
118 if m.Version > 1 {
119 return PacketDecodingError{fmt.Sprintf("unknown magic byte (%v)", m.Version)}
120 }
121
122 attribute, err := pd.getInt8()
123 if err != nil {
124 return err
125 }
126 m.Codec = CompressionCodec(attribute & compressionCodecMask)
William Kurkiandaa6bb22019-03-07 12:26:28 -0500127 m.LogAppendTime = attribute&timestampTypeMask == timestampTypeMask
khenaidooac637102019-01-14 15:44:34 -0500128
129 if m.Version == 1 {
130 if err := (Timestamp{&m.Timestamp}).decode(pd); err != nil {
131 return err
132 }
133 }
134
135 m.Key, err = pd.getBytes()
136 if err != nil {
137 return err
138 }
139
140 m.Value, err = pd.getBytes()
141 if err != nil {
142 return err
143 }
144
145 // Required for deep equal assertion during tests but might be useful
146 // for future metrics about the compression ratio in fetch requests
147 m.compressedSize = len(m.Value)
148
khenaidood948f772021-08-11 17:49:24 -0400149 if m.Value != nil && m.Codec != CompressionNone {
khenaidooac637102019-01-14 15:44:34 -0500150 m.Value, err = decompress(m.Codec, m.Value)
151 if err != nil {
152 return err
153 }
khenaidood948f772021-08-11 17:49:24 -0400154
khenaidooac637102019-01-14 15:44:34 -0500155 if err := m.decodeSet(); err != nil {
156 return err
157 }
158 }
159
160 return pd.pop()
161}
162
William Kurkiandaa6bb22019-03-07 12:26:28 -0500163// decodes a message set from a previously encoded bulk-message
khenaidooac637102019-01-14 15:44:34 -0500164func (m *Message) decodeSet() (err error) {
165 pd := realDecoder{raw: m.Value}
166 m.Set = &MessageSet{}
167 return m.Set.decode(&pd)
168}