redis add get message

This commit is contained in:
Gordon 2022-05-24 23:31:37 +08:00
parent 8d78ef5946
commit 77c7efbe89

View File

@ -41,13 +41,13 @@ type OnlineHistoryConsumerHandler struct {
msgCh chan Cmd2Value msgCh chan Cmd2Value
chArrays [ChannelNum]chan Cmd2Value chArrays [ChannelNum]chan Cmd2Value
chMongoArrays [ChannelNum]chan Cmd2Value chMongoArrays [ChannelNum]chan Cmd2Value
msgDistributionCh chan Cmd2Value //msgDistributionCh chan Cmd2Value
} }
func (och *OnlineHistoryConsumerHandler) Init(cmdCh chan Cmd2Value) { func (och *OnlineHistoryConsumerHandler) Init(cmdCh chan Cmd2Value) {
och.msgHandle = make(map[string]fcb) och.msgHandle = make(map[string]fcb)
och.msgDistributionCh = make(chan Cmd2Value) //no buffer channel //och.msgDistributionCh = make(chan Cmd2Value) //no buffer channel
go och.MessagesDistributionHandle() //go och.MessagesDistributionHandle()
och.cmdCh = cmdCh och.cmdCh = cmdCh
och.msgCh = make(chan Cmd2Value, 1000) och.msgCh = make(chan Cmd2Value, 1000)
for i := 0; i < ChannelNum; i++ { for i := 0; i < ChannelNum; i++ {
@ -180,52 +180,52 @@ func (och *OnlineHistoryConsumerHandler) MongoMessageRun(channelID int) {
} }
} }
func (och *OnlineHistoryConsumerHandler) MessagesDistributionHandle() { //func (och *OnlineHistoryConsumerHandler) MessagesDistributionHandle() {
for { // for {
UserAggregationMsgs := make(map[string][]*pbMsg.MsgDataToMQ, ChannelNum) // UserAggregationMsgs := make(map[string][]*pbMsg.MsgDataToMQ, ChannelNum)
select { // select {
case cmd := <-och.msgDistributionCh: // case cmd := <-och.msgDistributionCh:
switch cmd.Cmd { // switch cmd.Cmd {
case ConsumerMsgs: // case ConsumerMsgs:
triggerChannelValue := cmd.Value.(TriggerChannelValue) // triggerChannelValue := cmd.Value.(TriggerChannelValue)
triggerID := triggerChannelValue.triggerID // triggerID := triggerChannelValue.triggerID
consumerMessages := triggerChannelValue.cmsgList // consumerMessages := triggerChannelValue.cmsgList
//Aggregation map[userid]message list // //Aggregation map[userid]message list
log.Debug(triggerID, "batch messages come to distribution center", len(consumerMessages)) // log.Debug(triggerID, "batch messages come to distribution center", len(consumerMessages))
for i := 0; i < len(consumerMessages); i++ { // for i := 0; i < len(consumerMessages); i++ {
msgFromMQ := pbMsg.MsgDataToMQ{} // msgFromMQ := pbMsg.MsgDataToMQ{}
err := proto.Unmarshal(consumerMessages[i].Value, &msgFromMQ) // err := proto.Unmarshal(consumerMessages[i].Value, &msgFromMQ)
if err != nil { // if err != nil {
log.Error(triggerID, "msg_transfer Unmarshal msg err", "msg", string(consumerMessages[i].Value), "err", err.Error()) // log.Error(triggerID, "msg_transfer Unmarshal msg err", "msg", string(consumerMessages[i].Value), "err", err.Error())
return // return
} // }
log.Debug(triggerID, "single msg come to distribution center", msgFromMQ.String(), string(consumerMessages[i].Key)) // log.Debug(triggerID, "single msg come to distribution center", msgFromMQ.String(), string(consumerMessages[i].Key))
if oldM, ok := UserAggregationMsgs[string(consumerMessages[i].Key)]; ok { // if oldM, ok := UserAggregationMsgs[string(consumerMessages[i].Key)]; ok {
oldM = append(oldM, &msgFromMQ) // oldM = append(oldM, &msgFromMQ)
UserAggregationMsgs[string(consumerMessages[i].Key)] = oldM // UserAggregationMsgs[string(consumerMessages[i].Key)] = oldM
} else { // } else {
m := make([]*pbMsg.MsgDataToMQ, 0, 100) // m := make([]*pbMsg.MsgDataToMQ, 0, 100)
m = append(m, &msgFromMQ) // m = append(m, &msgFromMQ)
UserAggregationMsgs[string(consumerMessages[i].Key)] = m // UserAggregationMsgs[string(consumerMessages[i].Key)] = m
} // }
} // }
log.Debug(triggerID, "generate map list users len", len(UserAggregationMsgs)) // log.Debug(triggerID, "generate map list users len", len(UserAggregationMsgs))
for userID, v := range UserAggregationMsgs { // for userID, v := range UserAggregationMsgs {
if len(v) >= 0 { // if len(v) >= 0 {
hashCode := getHashCode(userID) // hashCode := getHashCode(userID)
channelID := hashCode % ChannelNum // channelID := hashCode % ChannelNum
log.Debug(triggerID, "generate channelID", hashCode, channelID, userID) // log.Debug(triggerID, "generate channelID", hashCode, channelID, userID)
//go func(cID uint32, userID string, messages []*pbMsg.MsgDataToMQ) { // //go func(cID uint32, userID string, messages []*pbMsg.MsgDataToMQ) {
och.chArrays[channelID] <- Cmd2Value{Cmd: UserMessages, Value: MsgChannelValue{userID: userID, msgList: v, triggerID: triggerID}} // och.chArrays[channelID] <- Cmd2Value{Cmd: UserMessages, Value: MsgChannelValue{userID: userID, msgList: v, triggerID: triggerID}}
//}(channelID, userID, v) // //}(channelID, userID, v)
} // }
} // }
} // }
} // }
//
} // }
//
} //}
func (mc *OnlineHistoryConsumerHandler) handleChatWs2Mongo(cMsg *sarama.ConsumerMessage, msgKey string, sess sarama.ConsumerGroupSession) { func (mc *OnlineHistoryConsumerHandler) handleChatWs2Mongo(cMsg *sarama.ConsumerMessage, msgKey string, sess sarama.ConsumerGroupSession) {
msg := cMsg.Value msg := cMsg.Value
now := time.Now() now := time.Now()
@ -385,7 +385,7 @@ func (och *OnlineHistoryConsumerHandler) ConsumeClaim(sess sarama.ConsumerGroupS
channelID := hashCode % ChannelNum channelID := hashCode % ChannelNum
log.Debug(triggerID, "generate channelID", hashCode, channelID, userID) log.Debug(triggerID, "generate channelID", hashCode, channelID, userID)
//go func(cID uint32, userID string, messages []*pbMsg.MsgDataToMQ) { //go func(cID uint32, userID string, messages []*pbMsg.MsgDataToMQ) {
och.chArrays[channelID] <- Cmd2Value{Cmd: UserMessages, Value: MsgChannelValue{userID: userID, msgList: []*pbMsg.MsgDataToMQ{&msgFromMQ}, triggerID: triggerID}} och.chArrays[channelID] <- Cmd2Value{Cmd: UserMessages, Value: MsgChannelValue{userID: userID, msgList: []*pbMsg.MsgDataToMQ{&msgFromMQ}, triggerID: msgFromMQ.OperationID}}
sess.MarkMessage(msg, "") sess.MarkMessage(msg, "")
//cMsg = append(cMsg, msg) //cMsg = append(cMsg, msg)
//och.TriggerCmd(OnlineTopicBusy) //och.TriggerCmd(OnlineTopicBusy)