mirror of
				https://github.com/openimsdk/open-im-server.git
				synced 2025-10-31 08:29:33 +08:00 
			
		
		
		
	* fix: fix openim zk env set Signed-off-by: Xinwei Xiong (cubxxw) <3293172751nss@gmail.com> * fix: fix openim zk env set Signed-off-by: Xinwei Xiong (cubxxw) <3293172751nss@gmail.com> --------- Signed-off-by: Xinwei Xiong (cubxxw) <3293172751nss@gmail.com>
		
			
				
	
	
		
			189 lines
		
	
	
		
			5.7 KiB
		
	
	
	
		
			Go
		
	
	
	
	
	
			
		
		
	
	
			189 lines
		
	
	
		
			5.7 KiB
		
	
	
	
		
			Go
		
	
	
	
	
	
| // Copyright © 2023 OpenIM. All rights reserved.
 | |
| //
 | |
| // Licensed under the Apache License, Version 2.0 (the "License");
 | |
| // you may not use this file except in compliance with the License.
 | |
| // You may obtain a copy of the License at
 | |
| //
 | |
| //     http://www.apache.org/licenses/LICENSE-2.0
 | |
| //
 | |
| // Unless required by applicable law or agreed to in writing, software
 | |
| // distributed under the License is distributed on an "AS IS" BASIS,
 | |
| // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 | |
| // See the License for the specific language governing permissions and
 | |
| // limitations under the License.
 | |
| 
 | |
| package kafka
 | |
| 
 | |
| import (
 | |
| 	"bytes"
 | |
| 	"context"
 | |
| 	"errors"
 | |
| 	"strings"
 | |
| 	"time"
 | |
| 
 | |
| 	"github.com/IBM/sarama"
 | |
| 	"github.com/OpenIMSDK/protocol/constant"
 | |
| 	"github.com/OpenIMSDK/tools/log"
 | |
| 	"github.com/OpenIMSDK/tools/mcontext"
 | |
| 	"github.com/OpenIMSDK/tools/utils"
 | |
| 	"google.golang.org/protobuf/proto"
 | |
| 
 | |
| 	"github.com/openimsdk/open-im-server/v3/pkg/common/config"
 | |
| )
 | |
| 
 | |
| const maxRetry = 10 // number of retries
 | |
| 
 | |
| var errEmptyMsg = errors.New("kafka binary msg is empty")
 | |
| 
 | |
| // Producer represents a Kafka producer.
 | |
| type Producer struct {
 | |
| 	addr     []string
 | |
| 	topic    string
 | |
| 	config   *sarama.Config
 | |
| 	producer sarama.SyncProducer
 | |
| }
 | |
| 
 | |
| // NewKafkaProducer initializes a new Kafka producer.
 | |
| func NewKafkaProducer(addr []string, topic string) *Producer {
 | |
| 	p := Producer{
 | |
| 		addr:   addr,
 | |
| 		topic:  topic,
 | |
| 		config: sarama.NewConfig(),
 | |
| 	}
 | |
| 
 | |
| 	// Set producer return flags
 | |
| 	p.config.Producer.Return.Successes = true
 | |
| 	p.config.Producer.Return.Errors = true
 | |
| 
 | |
| 	// Set partitioner strategy
 | |
| 	p.config.Producer.Partitioner = sarama.NewHashPartitioner
 | |
| 
 | |
| 	// Configure producer acknowledgement level
 | |
| 	configureProducerAck(&p, config.Config.Kafka.ProducerAck)
 | |
| 
 | |
| 	// Configure message compression
 | |
| 	configureCompression(&p, config.Config.Kafka.CompressType)
 | |
| 
 | |
| 	// Get Kafka configuration from environment variables or fallback to config file
 | |
| 	kafkaUsername := getEnvOrConfig("KAFKA_USERNAME", config.Config.Kafka.Username)
 | |
| 	kafkaPassword := getEnvOrConfig("KAFKA_PASSWORD", config.Config.Kafka.Password)
 | |
| 	kafkaAddr := getKafkaAddrFromEnv(addr) // Updated to use the new function
 | |
| 
 | |
| 	// Configure SASL authentication if credentials are provided
 | |
| 	if kafkaUsername != "" && kafkaPassword != "" {
 | |
| 		p.config.Net.SASL.Enable = true
 | |
| 		p.config.Net.SASL.User = kafkaUsername
 | |
| 		p.config.Net.SASL.Password = kafkaPassword
 | |
| 	}
 | |
| 
 | |
| 	// Set the Kafka address
 | |
| 	p.addr = kafkaAddr
 | |
| 
 | |
| 	// Set up TLS configuration (if required)
 | |
| 	SetupTLSConfig(p.config)
 | |
| 
 | |
| 	// Create the producer with retries
 | |
| 	var err error
 | |
| 	for i := 0; i <= maxRetry; i++ {
 | |
| 		p.producer, err = sarama.NewSyncProducer(p.addr, p.config)
 | |
| 		if err == nil {
 | |
| 			return &p
 | |
| 		}
 | |
| 		time.Sleep(1 * time.Second) // Wait before retrying
 | |
| 	}
 | |
| 
 | |
| 	// Panic if unable to create producer after retries
 | |
| 	if err != nil {
 | |
| 		panic("Failed to create Kafka producer: " + err.Error())
 | |
| 	}
 | |
| 
 | |
| 	return &p
 | |
| }
 | |
| 
 | |
| // configureProducerAck configures the producer's acknowledgement level.
 | |
| func configureProducerAck(p *Producer, ackConfig string) {
 | |
| 	switch strings.ToLower(ackConfig) {
 | |
| 	case "no_response":
 | |
| 		p.config.Producer.RequiredAcks = sarama.NoResponse
 | |
| 	case "wait_for_local":
 | |
| 		p.config.Producer.RequiredAcks = sarama.WaitForLocal
 | |
| 	case "wait_for_all":
 | |
| 		p.config.Producer.RequiredAcks = sarama.WaitForAll
 | |
| 	default:
 | |
| 		p.config.Producer.RequiredAcks = sarama.WaitForAll
 | |
| 	}
 | |
| }
 | |
| 
 | |
| // configureCompression configures the message compression type for the producer.
 | |
| func configureCompression(p *Producer, compressType string) {
 | |
| 	var compress sarama.CompressionCodec = sarama.CompressionNone
 | |
| 	compress.UnmarshalText(bytes.ToLower([]byte(compressType)))
 | |
| 	p.config.Producer.Compression = compress
 | |
| }
 | |
| 
 | |
| // GetMQHeaderWithContext extracts message queue headers from the context.
 | |
| func GetMQHeaderWithContext(ctx context.Context) ([]sarama.RecordHeader, error) {
 | |
| 	operationID, opUserID, platform, connID, err := mcontext.GetCtxInfos(ctx)
 | |
| 	if err != nil {
 | |
| 		return nil, err
 | |
| 	}
 | |
| 	return []sarama.RecordHeader{
 | |
| 		{Key: []byte(constant.OperationID), Value: []byte(operationID)},
 | |
| 		{Key: []byte(constant.OpUserID), Value: []byte(opUserID)},
 | |
| 		{Key: []byte(constant.OpUserPlatform), Value: []byte(platform)},
 | |
| 		{Key: []byte(constant.ConnID), Value: []byte(connID)},
 | |
| 	}, nil
 | |
| }
 | |
| 
 | |
| // GetContextWithMQHeader creates a context from message queue headers.
 | |
| func GetContextWithMQHeader(header []*sarama.RecordHeader) context.Context {
 | |
| 	var values []string
 | |
| 	for _, recordHeader := range header {
 | |
| 		values = append(values, string(recordHeader.Value))
 | |
| 	}
 | |
| 	return mcontext.WithMustInfoCtx(values) // Attach extracted values to context
 | |
| }
 | |
| 
 | |
| // SendMessage sends a message to the Kafka topic configured in the Producer.
 | |
| func (p *Producer) SendMessage(ctx context.Context, key string, msg proto.Message) (int32, int64, error) {
 | |
| 	log.ZDebug(ctx, "SendMessage", "msg", msg, "topic", p.topic, "key", key)
 | |
| 
 | |
| 	// Marshal the protobuf message
 | |
| 	bMsg, err := proto.Marshal(msg)
 | |
| 	if err != nil {
 | |
| 		return 0, 0, utils.Wrap(err, "kafka proto Marshal err")
 | |
| 	}
 | |
| 	if len(bMsg) == 0 {
 | |
| 		return 0, 0, utils.Wrap(errEmptyMsg, "")
 | |
| 	}
 | |
| 
 | |
| 	// Prepare Kafka message
 | |
| 	kMsg := &sarama.ProducerMessage{
 | |
| 		Topic: p.topic,
 | |
| 		Key:   sarama.StringEncoder(key),
 | |
| 		Value: sarama.ByteEncoder(bMsg),
 | |
| 	}
 | |
| 
 | |
| 	// Validate message key and value
 | |
| 	if kMsg.Key.Length() == 0 || kMsg.Value.Length() == 0 {
 | |
| 		return 0, 0, utils.Wrap(errEmptyMsg, "")
 | |
| 	}
 | |
| 
 | |
| 	// Attach context metadata as headers
 | |
| 	header, err := GetMQHeaderWithContext(ctx)
 | |
| 	if err != nil {
 | |
| 		return 0, 0, utils.Wrap(err, "")
 | |
| 	}
 | |
| 	kMsg.Headers = header
 | |
| 
 | |
| 	// Send the message
 | |
| 	partition, offset, err := p.producer.SendMessage(kMsg)
 | |
| 	if err != nil {
 | |
| 		log.ZWarn(ctx, "p.producer.SendMessage error", err)
 | |
| 		return 0, 0, utils.Wrap(err, "")
 | |
| 	}
 | |
| 
 | |
| 	log.ZDebug(ctx, "ByteEncoder SendMessage end", "key", kMsg.Key, "key length", kMsg.Value.Length())
 | |
| 	return partition, offset, nil
 | |
| }
 |