mirror of
https://github.com/openimsdk/open-im-server.git
synced 2025-04-27 03:58:55 +08:00
Merge branch 'shichuang' of https://github.com/OpenIMSDK/Open-IM-Server into shichuang
This commit is contained in:
commit
3d184c95ba
@ -367,7 +367,7 @@ func GetSelfUserInfo(c *gin.Context) {
|
||||
log.NewInfo(req.OperationID, "GetUserInfo api return ", resp)
|
||||
c.JSON(http.StatusOK, resp)
|
||||
} else {
|
||||
resp := api.GetSelfUserInfoResp{CommResp: api.CommResp{ErrCode: RpcResp.CommonResp.ErrCode, ErrMsg: RpcResp.CommonResp.ErrMsg}}
|
||||
resp := api.GetSelfUserInfoResp{CommResp: api.CommResp{ErrCode: constant.ErrDB.ErrCode, ErrMsg: constant.ErrDB.ErrMsg}}
|
||||
log.NewInfo(req.OperationID, "GetUserInfo api return ", resp)
|
||||
c.JSON(http.StatusOK, resp)
|
||||
}
|
||||
|
@ -134,6 +134,7 @@ func (ws *WServer) MultiTerminalLoginRemoteChecker(userID string, platformID int
|
||||
resp, err := client.MultiTerminalLoginCheck(context.Background(), req)
|
||||
if err != nil {
|
||||
log.Error(operationID, "MultiTerminalLoginCheck failed ", err.Error())
|
||||
continue
|
||||
}
|
||||
if resp.ErrCode != 0 {
|
||||
log.Error(operationID, "MultiTerminalLoginCheck errCode, errMsg: ", resp.ErrCode, resp.ErrMsg)
|
||||
|
@ -13,6 +13,7 @@ import (
|
||||
"Open_IM/pkg/common/log"
|
||||
pbChat "Open_IM/pkg/proto/msg"
|
||||
pbPush "Open_IM/pkg/proto/push"
|
||||
"Open_IM/pkg/utils"
|
||||
"github.com/Shopify/sarama"
|
||||
"github.com/golang/protobuf/proto"
|
||||
)
|
||||
@ -43,6 +44,11 @@ func (ms *PushConsumerHandler) handleMs2PsChat(msg []byte) {
|
||||
MsgData: msgFromMQ.MsgData,
|
||||
PushToUserID: msgFromMQ.PushToUserID,
|
||||
}
|
||||
sec := msgFromMQ.MsgData.SendTime / 1000
|
||||
nowSec := utils.GetCurrentTimestampBySecond()
|
||||
if nowSec-sec > 10 {
|
||||
return
|
||||
}
|
||||
switch msgFromMQ.MsgData.SessionType {
|
||||
case constant.SuperGroupChatType:
|
||||
MsgToSuperGroupUser(pbData)
|
||||
@ -59,6 +65,7 @@ func (ms *PushConsumerHandler) ConsumeClaim(sess sarama.ConsumerGroupSession,
|
||||
for msg := range claim.Messages() {
|
||||
log.NewDebug("", "kafka get info to mysql", "msgTopic", msg.Topic, "msgPartition", msg.Partition, "msg", string(msg.Value))
|
||||
ms.msgHandle[msg.Topic](msg.Value)
|
||||
sess.MarkMessage(msg, "")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
@ -79,10 +79,14 @@ func MsgToUser(pushMsg *pbPush.PushMsgReq) {
|
||||
}
|
||||
}
|
||||
if pushMsg.MsgData.ContentType == constant.SignalingNotification {
|
||||
if err := db.DB.HandleSignalInfo(pushMsg.OperationID, pushMsg.MsgData); err != nil {
|
||||
isSend, err := db.DB.HandleSignalInfo(pushMsg.OperationID, pushMsg.MsgData, pushMsg.PushToUserID)
|
||||
if err != nil {
|
||||
log.NewError(pushMsg.OperationID, utils.GetSelfFuncName(), err.Error(), pushMsg.MsgData)
|
||||
return
|
||||
}
|
||||
if !isSend {
|
||||
return
|
||||
}
|
||||
}
|
||||
customContent := OpenIMContent{
|
||||
SessionType: int(pushMsg.MsgData.SessionType),
|
||||
|
6
internal/rpc/cache/cache.go
vendored
6
internal/rpc/cache/cache.go
vendored
@ -3,16 +3,17 @@ package cache
|
||||
import (
|
||||
"Open_IM/pkg/common/config"
|
||||
"Open_IM/pkg/common/constant"
|
||||
"Open_IM/pkg/common/db/rocks_cache"
|
||||
rocksCache "Open_IM/pkg/common/db/rocks_cache"
|
||||
"Open_IM/pkg/common/log"
|
||||
"Open_IM/pkg/grpc-etcdv3/getcdv3"
|
||||
pbCache "Open_IM/pkg/proto/cache"
|
||||
"Open_IM/pkg/utils"
|
||||
"context"
|
||||
"google.golang.org/grpc"
|
||||
"net"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"google.golang.org/grpc"
|
||||
)
|
||||
|
||||
type cacheServer struct {
|
||||
@ -67,6 +68,7 @@ func (s *cacheServer) Run() {
|
||||
log.NewError("0", "RegisterEtcd failed ", err.Error())
|
||||
return
|
||||
}
|
||||
go rocksCache.DelKeys()
|
||||
err = srv.Serve(listener)
|
||||
if err != nil {
|
||||
log.NewError("0", "Serve failed ", err.Error())
|
||||
|
@ -258,10 +258,10 @@ func (d *DataBases) CleanUpOneUserAllMsgFromRedis(userID string, operationID str
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *DataBases) HandleSignalInfo(operationID string, msg *pbCommon.MsgData) error {
|
||||
func (d *DataBases) HandleSignalInfo(operationID string, msg *pbCommon.MsgData, pushToUserID string) (isSend bool, err error) {
|
||||
req := &pbRtc.SignalReq{}
|
||||
if err := proto.Unmarshal(msg.Content, req); err != nil {
|
||||
return err
|
||||
return false, err
|
||||
}
|
||||
//log.NewDebug(pushMsg.OperationID, utils.GetSelfFuncName(), "SignalReq: ", req.String())
|
||||
var inviteeUserIDList []string
|
||||
@ -273,37 +273,40 @@ func (d *DataBases) HandleSignalInfo(operationID string, msg *pbCommon.MsgData)
|
||||
case *pbRtc.SignalReq_InviteInGroup:
|
||||
inviteeUserIDList = signalInfo.InviteInGroup.Invitation.InviteeUserIDList
|
||||
isInviteSignal = true
|
||||
if !utils.IsContain(pushToUserID, inviteeUserIDList) {
|
||||
return false, nil
|
||||
}
|
||||
case *pbRtc.SignalReq_HungUp, *pbRtc.SignalReq_Cancel, *pbRtc.SignalReq_Reject, *pbRtc.SignalReq_Accept:
|
||||
return errors.New("signalInfo do not need offlinePush")
|
||||
return false, errors.New("signalInfo do not need offlinePush")
|
||||
default:
|
||||
log2.NewDebug(operationID, utils.GetSelfFuncName(), "req invalid type", string(msg.Content))
|
||||
return nil
|
||||
return false, nil
|
||||
}
|
||||
if isInviteSignal {
|
||||
log2.NewInfo(operationID, utils.GetSelfFuncName(), "invite userID list:", inviteeUserIDList)
|
||||
log2.NewDebug(operationID, utils.GetSelfFuncName(), "invite userID list:", inviteeUserIDList)
|
||||
for _, userID := range inviteeUserIDList {
|
||||
log2.NewInfo(operationID, utils.GetSelfFuncName(), "invite userID:", userID)
|
||||
timeout, err := strconv.Atoi(config.Config.Rtc.SignalTimeout)
|
||||
if err != nil {
|
||||
return err
|
||||
return false, err
|
||||
}
|
||||
keyList := SignalListCache + userID
|
||||
err = d.RDB.LPush(context.Background(), keyList, msg.ClientMsgID).Err()
|
||||
if err != nil {
|
||||
return err
|
||||
return false, err
|
||||
}
|
||||
err = d.RDB.Expire(context.Background(), keyList, time.Duration(timeout)*time.Second).Err()
|
||||
if err != nil {
|
||||
return err
|
||||
return false, err
|
||||
}
|
||||
key := SignalCache + msg.ClientMsgID
|
||||
err = d.RDB.Set(context.Background(), key, msg.Content, time.Duration(timeout)*time.Second).Err()
|
||||
if err != nil {
|
||||
return err
|
||||
return false, err
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
return true, nil
|
||||
}
|
||||
|
||||
func (d *DataBases) GetSignalInfoFromCacheByClientMsgID(clientMsgID string) (invitationInfo *pbRtc.SignalInviteReq, err error) {
|
||||
|
@ -36,7 +36,7 @@ const (
|
||||
conversationIDListCache = "CONVERSATION_ID_LIST_CACHE:"
|
||||
)
|
||||
|
||||
func init() {
|
||||
func DelKeys() {
|
||||
fmt.Println("init to del old keys")
|
||||
for _, key := range []string{groupCache, friendRelationCache, blackListCache, userInfoCache, groupInfoCache, groupOwnerIDCache, joinedGroupListCache,
|
||||
groupMemberInfoCache, groupAllMemberInfoCache, allFriendInfoCache} {
|
||||
|
@ -6,7 +6,6 @@ import (
|
||||
"Open_IM/pkg/utils"
|
||||
"context"
|
||||
"fmt"
|
||||
|
||||
"go.etcd.io/etcd/api/v3/mvccpb"
|
||||
clientv3 "go.etcd.io/etcd/client/v3"
|
||||
|
||||
@ -277,12 +276,42 @@ func (r *Resolver) watch(prefix string, addrList []resolver.Address) {
|
||||
}
|
||||
}
|
||||
|
||||
var Conn4UniqueList []*grpc.ClientConn
|
||||
var Conn4UniqueListMtx sync.RWMutex
|
||||
var IsUpdateStart bool
|
||||
var IsUpdateStartMtx sync.RWMutex
|
||||
|
||||
func GetDefaultGatewayConn4Unique(schema, etcdaddr, operationID string) []*grpc.ClientConn {
|
||||
grpcConns := getConn4Unique(schema, etcdaddr, config.Config.RpcRegisterName.OpenImRelayName)
|
||||
IsUpdateStartMtx.Lock()
|
||||
if IsUpdateStart == false {
|
||||
Conn4UniqueList = getConn4Unique(schema, etcdaddr, config.Config.RpcRegisterName.OpenImRelayName)
|
||||
go func() {
|
||||
for {
|
||||
select {
|
||||
case <-time.After(time.Second * time.Duration(30)):
|
||||
Conn4UniqueListMtx.Lock()
|
||||
Conn4UniqueList = getConn4Unique(schema, etcdaddr, config.Config.RpcRegisterName.OpenImRelayName)
|
||||
Conn4UniqueListMtx.Unlock()
|
||||
}
|
||||
}
|
||||
}()
|
||||
}
|
||||
IsUpdateStart = true
|
||||
IsUpdateStartMtx.Unlock()
|
||||
|
||||
Conn4UniqueListMtx.Lock()
|
||||
var clientConnList []*grpc.ClientConn
|
||||
for _, v := range Conn4UniqueList {
|
||||
clientConnList = append(clientConnList, v)
|
||||
}
|
||||
Conn4UniqueListMtx.Unlock()
|
||||
|
||||
//grpcConns := getConn4Unique(schema, etcdaddr, config.Config.RpcRegisterName.OpenImRelayName)
|
||||
grpcConns := clientConnList
|
||||
if len(grpcConns) > 0 {
|
||||
return grpcConns
|
||||
}
|
||||
log.NewWarn(operationID, utils.GetSelfFuncName(), " len(grpcConns) < 0 ", schema, etcdaddr, config.Config.RpcRegisterName.OpenImRelayName)
|
||||
log.NewWarn(operationID, utils.GetSelfFuncName(), " len(grpcConns) == 0 ", schema, etcdaddr, config.Config.RpcRegisterName.OpenImRelayName)
|
||||
grpcConns = GetDefaultGatewayConn4UniqueFromcfg(operationID)
|
||||
log.NewDebug(operationID, utils.GetSelfFuncName(), config.Config.RpcRegisterName.OpenImRelayName, grpcConns)
|
||||
return grpcConns
|
||||
|
Loading…
x
Reference in New Issue
Block a user