mirror of
https://github.com/openimsdk/open-im-server.git
synced 2025-04-06 04:15:46 +08:00
Merge branch 'errcode' of github.com:OpenIMSDK/Open-IM-Server into errcode
This commit is contained in:
commit
2baf208c02
@ -1 +1 @@
|
||||
Subproject commit 48fd66a6eea7ff4c5ccacddd7929bf8d7b5b40fa
|
||||
Subproject commit 94da8cc1074e9b6d14a94a41bf37885b27253a2d
|
@ -1,8 +1,10 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"fmt"
|
||||
"gopkg.in/yaml.v3"
|
||||
"net"
|
||||
"strconv"
|
||||
|
||||
@ -37,6 +39,13 @@ func run(port int) error {
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
buf := bytes.NewBuffer(nil)
|
||||
if err := yaml.NewEncoder(buf).Encode(config.Config); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := zk.RegisterConf2Registry(constant.OpenIMCommonConfigKey, buf.Bytes()); err != nil {
|
||||
return err
|
||||
}
|
||||
log.NewPrivateLog(constant.LogFileName)
|
||||
router := api.NewGinRouter(zk, rdb)
|
||||
var address string
|
||||
|
2
go.mod
2
go.mod
@ -4,7 +4,7 @@ go 1.18
|
||||
|
||||
require (
|
||||
firebase.google.com/go v3.13.0+incompatible
|
||||
github.com/OpenIMSDK/openKeeper v0.0.4
|
||||
github.com/OpenIMSDK/openKeeper v0.0.6
|
||||
github.com/OpenIMSDK/open_utils v1.0.8
|
||||
github.com/Shopify/sarama v1.32.0
|
||||
github.com/antonfisher/nested-logrus-formatter v1.3.1
|
||||
|
5
go.sum
5
go.sum
@ -375,8 +375,8 @@ firebase.google.com/go v3.13.0+incompatible/go.mod h1:xlah6XbEyW6tbfSklcfe5FHJIw
|
||||
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
|
||||
github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo=
|
||||
github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU=
|
||||
github.com/OpenIMSDK/openKeeper v0.0.4 h1:WwieeVzNCMYvd6HWRFh0cqrem3OoXDSJePA7taZ1ahQ=
|
||||
github.com/OpenIMSDK/openKeeper v0.0.4/go.mod h1:RvyRXEcvWbonkmHLtT8KxGSCNlXY7OfDohhu53E6INU=
|
||||
github.com/OpenIMSDK/openKeeper v0.0.6 h1:0eseL/h+evxZAM5Dqs85FEDbDFkTjpVwHBSINQOca5M=
|
||||
github.com/OpenIMSDK/openKeeper v0.0.6/go.mod h1:HwHgo+p3E+aAeiLOSyDwVB2zFJtu+QvnFNlfNjFG9vA=
|
||||
github.com/OpenIMSDK/open_utils v1.0.8 h1:IopxWgJwEF5ZAPsRuiZZOfcxNOQOCt/p8VDENcHN9r4=
|
||||
github.com/OpenIMSDK/open_utils v1.0.8/go.mod h1:FLoaQblWUVKQgqt2LrNzfSZLT6D3DICBn1kcOMDLUOI=
|
||||
github.com/Shopify/sarama v1.29.0 h1:ARid8o8oieau9XrHI55f/L3EoRAhm9px6sonbD7yuUE=
|
||||
@ -751,7 +751,6 @@ github.com/rogpeppe/go-internal v1.8.0/go.mod h1:WmiCO8CzOY8rg0OYDC4/i/2WRWAB6po
|
||||
github.com/rs/xid v1.2.1 h1:mhH9Nq+C1fY2l1XIpgxIiUOfNpRBYH1kKcr+qfKgjRc=
|
||||
github.com/rs/xid v1.2.1/go.mod h1:+uKXf+4Djp6Md1KODXJxgGQPKngRmWyn10oCKFzNHOQ=
|
||||
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
|
||||
github.com/samuel/go-zookeeper v0.0.0-20201211165307-7117e9ea2414/go.mod h1:gi+0XIa01GRL2eRQVjQkKGqKF3SF9vZR/HnPullcV2E=
|
||||
github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo=
|
||||
github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE=
|
||||
github.com/sirupsen/logrus v1.6.0/go.mod h1:7uNnSEd1DgxDLC74fIahvMZmmYsHGZGEOFrfsX/uA88=
|
||||
|
23
internal/api/custom_validator.go
Normal file
23
internal/api/custom_validator.go
Normal file
@ -0,0 +1,23 @@
|
||||
package api
|
||||
|
||||
import (
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/constant"
|
||||
"github.com/go-playground/validator/v10"
|
||||
)
|
||||
|
||||
func RequiredIf(fl validator.FieldLevel) bool {
|
||||
sessionType := fl.Parent().FieldByName("SessionType").Int()
|
||||
switch sessionType {
|
||||
case constant.SingleChatType, constant.NotificationChatType:
|
||||
if fl.FieldName() == "RecvID" {
|
||||
return fl.Field().String() != ""
|
||||
}
|
||||
case constant.GroupChatType, constant.SuperGroupChatType:
|
||||
if fl.FieldName() == "GroupID" {
|
||||
return fl.Field().String() != ""
|
||||
}
|
||||
default:
|
||||
return true
|
||||
}
|
||||
return true
|
||||
}
|
@ -2,14 +2,13 @@ package api
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/a2r"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/apiresp"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/apistruct"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/config"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/constant"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/log"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/tracelog"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/mcontext"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/discoveryregistry"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/errs"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/proto/msg"
|
||||
@ -23,23 +22,23 @@ import (
|
||||
|
||||
var _ context.Context // 解决goland编辑器bug
|
||||
|
||||
func NewMsg(c discoveryregistry.SvcDiscoveryRegistry) *Msg {
|
||||
return &Msg{c: c, validate: validator.New()}
|
||||
func NewMsg(c discoveryregistry.SvcDiscoveryRegistry) *Message {
|
||||
return &Message{c: c, validate: validator.New()}
|
||||
}
|
||||
|
||||
type Msg struct {
|
||||
type Message struct {
|
||||
c discoveryregistry.SvcDiscoveryRegistry
|
||||
validate *validator.Validate
|
||||
}
|
||||
|
||||
func (Msg) SetOptions(options map[string]bool, value bool) {
|
||||
func (Message) SetOptions(options map[string]bool, value bool) {
|
||||
utils.SetSwitchFromOptions(options, constant.IsHistory, value)
|
||||
utils.SetSwitchFromOptions(options, constant.IsPersistent, value)
|
||||
utils.SetSwitchFromOptions(options, constant.IsSenderSync, value)
|
||||
utils.SetSwitchFromOptions(options, constant.IsConversationUpdate, value)
|
||||
}
|
||||
|
||||
func (m Msg) newUserSendMsgReq(c *gin.Context, params *apistruct.ManagementSendMsgReq) *msg.SendMsgReq {
|
||||
func (m Message) newUserSendMsgReq(c *gin.Context, params *apistruct.ManagementSendMsgReq) *msg.SendMsgReq {
|
||||
var newContent string
|
||||
var err error
|
||||
switch params.ContentType {
|
||||
@ -100,13 +99,13 @@ func (m Msg) newUserSendMsgReq(c *gin.Context, params *apistruct.ManagementSendM
|
||||
tips.JsonDetail = utils.StructToJsonString(params.Content)
|
||||
pbData.MsgData.Content, err = proto.Marshal(&tips)
|
||||
if err != nil {
|
||||
log.Error(tracelog.GetOperationID(c), "Marshal failed ", err.Error(), tips.String())
|
||||
log.Error(mcontext.GetOperationID(c), "Marshal failed ", err.Error(), tips.String())
|
||||
}
|
||||
}
|
||||
return &pbData
|
||||
}
|
||||
|
||||
func (m *Msg) client() (msg.MsgClient, error) {
|
||||
func (m *Message) client() (msg.MsgClient, error) {
|
||||
conn, err := m.c.GetConn(config.Config.RpcRegisterName.OpenImMsgName)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
@ -114,48 +113,49 @@ func (m *Msg) client() (msg.MsgClient, error) {
|
||||
return msg.NewMsgClient(conn), nil
|
||||
}
|
||||
|
||||
func (m *Msg) GetSeq(c *gin.Context) {
|
||||
func (m *Message) GetSeq(c *gin.Context) {
|
||||
a2r.Call(msg.MsgClient.GetMaxAndMinSeq, m.client, c)
|
||||
}
|
||||
|
||||
func (m *Msg) PullMsgBySeqs(c *gin.Context) {
|
||||
func (m *Message) PullMsgBySeqs(c *gin.Context) {
|
||||
a2r.Call(msg.MsgClient.PullMessageBySeqs, m.client, c)
|
||||
}
|
||||
|
||||
func (m *Msg) DelMsg(c *gin.Context) {
|
||||
func (m *Message) DelMsg(c *gin.Context) {
|
||||
a2r.Call(msg.MsgClient.DelMsgs, m.client, c)
|
||||
}
|
||||
|
||||
func (m *Msg) DelSuperGroupMsg(c *gin.Context) {
|
||||
func (m *Message) DelSuperGroupMsg(c *gin.Context) {
|
||||
a2r.Call(msg.MsgClient.DelSuperGroupMsg, m.client, c)
|
||||
}
|
||||
|
||||
func (m *Msg) ClearMsg(c *gin.Context) {
|
||||
func (m *Message) ClearMsg(c *gin.Context) {
|
||||
a2r.Call(msg.MsgClient.ClearMsg, m.client, c)
|
||||
}
|
||||
|
||||
func (m *Msg) SetMessageReactionExtensions(c *gin.Context) {
|
||||
func (m *Message) SetMessageReactionExtensions(c *gin.Context) {
|
||||
a2r.Call(msg.MsgClient.SetMessageReactionExtensions, m.client, c)
|
||||
}
|
||||
|
||||
func (m *Msg) GetMessageListReactionExtensions(c *gin.Context) {
|
||||
func (m *Message) GetMessageListReactionExtensions(c *gin.Context) {
|
||||
a2r.Call(msg.MsgClient.GetMessagesReactionExtensions, m.client, c)
|
||||
}
|
||||
|
||||
func (m *Msg) AddMessageReactionExtensions(c *gin.Context) {
|
||||
func (m *Message) AddMessageReactionExtensions(c *gin.Context) {
|
||||
a2r.Call(msg.MsgClient.AddMessageReactionExtensions, m.client, c)
|
||||
}
|
||||
|
||||
func (m *Msg) DeleteMessageReactionExtensions(c *gin.Context) {
|
||||
func (m *Message) DeleteMessageReactionExtensions(c *gin.Context) {
|
||||
a2r.Call(msg.MsgClient.DeleteMessageReactionExtensions, m.client, c)
|
||||
}
|
||||
|
||||
func (m *Msg) SendMsg(c *gin.Context) {
|
||||
func (m *Message) SendMessage(c *gin.Context) {
|
||||
params := apistruct.ManagementSendMsgReq{}
|
||||
if err := c.BindJSON(¶ms); err != nil {
|
||||
apiresp.GinError(c, err)
|
||||
apiresp.GinError(c, errs.ErrArgs.WithDetail(err.Error()).Wrap())
|
||||
return
|
||||
}
|
||||
|
||||
var data interface{}
|
||||
switch params.ContentType {
|
||||
case constant.Text:
|
||||
@ -181,32 +181,17 @@ func (m *Msg) SendMsg(c *gin.Context) {
|
||||
data = apistruct.CustomElem{}
|
||||
case constant.CustomOnlineOnly:
|
||||
data = apistruct.CustomElem{}
|
||||
//case constant.HasReadReceipt:
|
||||
//case constant.Typing:
|
||||
//case constant.Quote:
|
||||
default:
|
||||
apiresp.GinError(c, errors.New("wrong contentType"))
|
||||
apiresp.GinError(c, errs.ErrArgs.WithDetail("not support err contentType").Wrap())
|
||||
return
|
||||
}
|
||||
if err := mapstructure.WeakDecode(params.Content, &data); err != nil {
|
||||
apiresp.GinError(c, errs.ErrData)
|
||||
apiresp.GinError(c, errs.ErrArgs.Wrap(err.Error()))
|
||||
return
|
||||
} else if err := m.validate.Struct(data); err != nil {
|
||||
apiresp.GinError(c, errs.ErrData)
|
||||
apiresp.GinError(c, errs.ErrArgs.Wrap(err.Error()))
|
||||
return
|
||||
}
|
||||
switch params.SessionType {
|
||||
case constant.SingleChatType:
|
||||
if len(params.RecvID) == 0 {
|
||||
apiresp.GinError(c, errs.ErrData)
|
||||
return
|
||||
}
|
||||
case constant.GroupChatType, constant.SuperGroupChatType:
|
||||
if len(params.GroupID) == 0 {
|
||||
apiresp.GinError(c, errs.ErrData)
|
||||
return
|
||||
}
|
||||
}
|
||||
pbReq := m.newUserSendMsgReq(c, ¶ms)
|
||||
conn, err := m.c.GetConn(config.Config.RpcRegisterName.OpenImMsgName)
|
||||
if err != nil {
|
||||
@ -226,24 +211,24 @@ func (m *Msg) SendMsg(c *gin.Context) {
|
||||
Status: int32(status),
|
||||
})
|
||||
if err != nil {
|
||||
log.NewError(tracelog.GetOperationID(c), "SetSendMsgStatus failed")
|
||||
log.NewError(mcontext.GetOperationID(c), "SetSendMsgStatus failed")
|
||||
}
|
||||
resp := apistruct.ManagementSendMsgResp{ResultList: sdkws.UserSendMsgResp{ServerMsgID: respPb.ServerMsgID, ClientMsgID: respPb.ClientMsgID, SendTime: respPb.SendTime}}
|
||||
apiresp.GinSuccess(c, resp)
|
||||
//resp := apistruct.ManagementSendMsgResp{ResultList: sdkws.UserSendMsgResp{ServerMsgID: respPb.ServerMsgID, ClientMsgID: respPb.ClientMsgID, SendTime: respPb.SendTime}}
|
||||
apiresp.GinSuccess(c, respPb)
|
||||
}
|
||||
|
||||
func (m *Msg) ManagementBatchSendMsg(c *gin.Context) {
|
||||
func (m *Message) ManagementBatchSendMsg(c *gin.Context) {
|
||||
a2r.Call(msg.MsgClient.SendMsg, m.client, c)
|
||||
}
|
||||
|
||||
func (m *Msg) CheckMsgIsSendSuccess(c *gin.Context) {
|
||||
func (m *Message) CheckMsgIsSendSuccess(c *gin.Context) {
|
||||
a2r.Call(msg.MsgClient.GetSendMsgStatus, m.client, c)
|
||||
}
|
||||
|
||||
func (m *Msg) GetUsersOnlineStatus(c *gin.Context) {
|
||||
func (m *Message) GetUsersOnlineStatus(c *gin.Context) {
|
||||
a2r.Call(msg.MsgClient.GetSendMsgStatus, m.client, c)
|
||||
}
|
||||
|
||||
func (m *Msg) AccountCheck(c *gin.Context) {
|
||||
func (m *Message) AccountCheck(c *gin.Context) {
|
||||
a2r.Call(msg.MsgClient.GetSendMsgStatus, m.client, c)
|
||||
}
|
||||
|
@ -7,6 +7,8 @@ import (
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/prome"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/discoveryregistry"
|
||||
"github.com/gin-gonic/gin"
|
||||
"github.com/gin-gonic/gin/binding"
|
||||
"github.com/go-playground/validator/v10"
|
||||
"github.com/go-redis/redis/v8"
|
||||
"google.golang.org/grpc"
|
||||
"google.golang.org/grpc/credentials/insecure"
|
||||
@ -19,6 +21,9 @@ func NewGinRouter(zk discoveryregistry.SvcDiscoveryRegistry, rdb redis.Universal
|
||||
//gin.DefaultWriter = io.MultiWriter(f)
|
||||
//gin.SetMode(gin.DebugMode)
|
||||
r := gin.New()
|
||||
if v, ok := binding.Validator.Engine().(*validator.Validate); ok {
|
||||
_ = v.RegisterValidation("required_if", RequiredIf)
|
||||
}
|
||||
log.Info("load config: ", config.Config)
|
||||
r.Use(gin.Recovery(), mw.CorsHandler(), mw.GinParseOperationID())
|
||||
if config.Config.Prometheus.Enable {
|
||||
@ -126,7 +131,7 @@ func NewGinRouter(zk discoveryregistry.SvcDiscoveryRegistry, rdb redis.Universal
|
||||
m := NewMsg(zk)
|
||||
msgGroup.Use(mw.GinParseToken(rdb))
|
||||
msgGroup.POST("/newest_seq", m.GetSeq)
|
||||
msgGroup.POST("/send_msg", m.SendMsg)
|
||||
msgGroup.POST("/send_msg", m.SendMessage)
|
||||
msgGroup.POST("/pull_msg_by_seq", m.PullMsgBySeqs)
|
||||
msgGroup.POST("/del_msg", m.DelMsg)
|
||||
msgGroup.POST("/del_super_group_msg", m.DelSuperGroupMsg)
|
||||
|
@ -6,7 +6,7 @@ import (
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/config"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/constant"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/http"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/tracelog"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/mcontext"
|
||||
"time"
|
||||
)
|
||||
|
||||
@ -22,7 +22,7 @@ func CallbackUserOnline(ctx context.Context, userID string, platformID int, isAp
|
||||
UserStatusCallbackReq: cbapi.UserStatusCallbackReq{
|
||||
UserStatusBaseCallback: cbapi.UserStatusBaseCallback{
|
||||
CallbackCommand: constant.CallbackUserOnlineCommand,
|
||||
OperationID: tracelog.GetOperationID(ctx),
|
||||
OperationID: mcontext.GetOperationID(ctx),
|
||||
PlatformID: platformID,
|
||||
Platform: constant.PlatformIDToName(platformID),
|
||||
},
|
||||
@ -44,7 +44,7 @@ func CallbackUserOffline(ctx context.Context, userID string, platformID int, con
|
||||
UserStatusCallbackReq: cbapi.UserStatusCallbackReq{
|
||||
UserStatusBaseCallback: cbapi.UserStatusBaseCallback{
|
||||
CallbackCommand: constant.CallbackUserOfflineCommand,
|
||||
OperationID: tracelog.GetOperationID(ctx),
|
||||
OperationID: mcontext.GetOperationID(ctx),
|
||||
PlatformID: platformID,
|
||||
Platform: constant.PlatformIDToName(platformID),
|
||||
},
|
||||
@ -65,7 +65,7 @@ func CallbackUserKickOff(ctx context.Context, userID string, platformID int) err
|
||||
UserStatusCallbackReq: cbapi.UserStatusCallbackReq{
|
||||
UserStatusBaseCallback: cbapi.UserStatusBaseCallback{
|
||||
CallbackCommand: constant.CallbackUserKickOffCommand,
|
||||
OperationID: tracelog.GetOperationID(ctx),
|
||||
OperationID: mcontext.GetOperationID(ctx),
|
||||
PlatformID: platformID,
|
||||
Platform: constant.PlatformIDToName(platformID),
|
||||
},
|
||||
|
@ -4,6 +4,9 @@ import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/constant"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/log"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/mcontext"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/proto/sdkws"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/utils"
|
||||
"runtime/debug"
|
||||
@ -36,7 +39,7 @@ type Client struct {
|
||||
isCompress bool
|
||||
userID string
|
||||
isBackground bool
|
||||
connID string
|
||||
ctx *UserConnContext
|
||||
onlineAt int64 // 上线时间戳(毫秒)
|
||||
longConnServer LongConnServer
|
||||
closed bool
|
||||
@ -49,7 +52,7 @@ func newClient(ctx *UserConnContext, conn LongConn, isCompress bool) *Client {
|
||||
platformID: utils.StringToInt(ctx.GetPlatformID()),
|
||||
isCompress: isCompress,
|
||||
userID: ctx.GetUserID(),
|
||||
connID: ctx.GetConnID(),
|
||||
ctx: ctx,
|
||||
onlineAt: utils.GetCurrentTimestampByMill(),
|
||||
}
|
||||
}
|
||||
@ -59,7 +62,7 @@ func (c *Client) ResetClient(ctx *UserConnContext, conn LongConn, isCompress boo
|
||||
c.platformID = utils.StringToInt(ctx.GetPlatformID())
|
||||
c.isCompress = isCompress
|
||||
c.userID = ctx.GetUserID()
|
||||
c.connID = ctx.GetConnID()
|
||||
c.ctx = ctx
|
||||
c.onlineAt = utils.GetCurrentTimestampByMill()
|
||||
c.longConnServer = longConnServer
|
||||
}
|
||||
@ -68,7 +71,7 @@ func (c *Client) readMessage() {
|
||||
if r := recover(); r != nil {
|
||||
fmt.Println("socket have panic err:", r, string(debug.Stack()))
|
||||
}
|
||||
//c.close()
|
||||
c.close()
|
||||
}()
|
||||
//var returnErr error
|
||||
for {
|
||||
@ -91,6 +94,7 @@ func (c *Client) readMessage() {
|
||||
}
|
||||
returnErr = c.handleMessage(message)
|
||||
if returnErr != nil {
|
||||
log.ZError(context.Background(), "WSGetNewestSeq", returnErr)
|
||||
break
|
||||
}
|
||||
|
||||
@ -117,16 +121,14 @@ func (c *Client) handleMessage(message []byte) error {
|
||||
if binaryReq.SendID != c.userID {
|
||||
return errors.New("exception conn userID not same to req userID")
|
||||
}
|
||||
ctx := context.Background()
|
||||
ctx = context.WithValue(ctx, ConnID, c.connID)
|
||||
ctx = context.WithValue(ctx, OperationID, binaryReq.OperationID)
|
||||
ctx = context.WithValue(ctx, CommonUserID, binaryReq.SendID)
|
||||
ctx = context.WithValue(ctx, PlatformID, c.platformID)
|
||||
ctx := mcontext.WithMustInfoCtx([]string{binaryReq.OperationID, binaryReq.SendID, constant.PlatformIDToName(c.platformID), c.ctx.GetConnID()})
|
||||
var messageErr error
|
||||
var resp []byte
|
||||
switch binaryReq.ReqIdentifier {
|
||||
case WSGetNewestSeq:
|
||||
resp, messageErr = c.longConnServer.GetSeq(ctx, binaryReq)
|
||||
log.ZError(ctx, "WSGetNewestSeq", messageErr, "resp", resp)
|
||||
|
||||
case WSSendMsg:
|
||||
resp, messageErr = c.longConnServer.SendMessage(ctx, binaryReq)
|
||||
case WSSendSignalMsg:
|
||||
|
@ -1,9 +1,11 @@
|
||||
package msggateway
|
||||
|
||||
import (
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/constant"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/utils"
|
||||
"net/http"
|
||||
"strconv"
|
||||
"time"
|
||||
)
|
||||
|
||||
type UserConnContext struct {
|
||||
@ -12,6 +14,36 @@ type UserConnContext struct {
|
||||
Path string
|
||||
Method string
|
||||
RemoteAddr string
|
||||
ConnID string
|
||||
}
|
||||
|
||||
func (c *UserConnContext) Deadline() (deadline time.Time, ok bool) {
|
||||
return
|
||||
}
|
||||
|
||||
func (c *UserConnContext) Done() <-chan struct{} {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *UserConnContext) Err() error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *UserConnContext) Value(key any) any {
|
||||
switch key {
|
||||
case constant.OpUserID:
|
||||
return c.GetUserID()
|
||||
case constant.OperationID:
|
||||
return c.GetOperationID()
|
||||
case constant.ConnID:
|
||||
return c.GetConnID()
|
||||
case constant.OpUserPlatform:
|
||||
return constant.PlatformIDToName(utils.StringToInt(c.GetPlatformID()))
|
||||
case constant.RemoteAddr:
|
||||
return c.RemoteAddr
|
||||
default:
|
||||
return ""
|
||||
}
|
||||
}
|
||||
|
||||
func newContext(respWriter http.ResponseWriter, req *http.Request) *UserConnContext {
|
||||
@ -21,6 +53,7 @@ func newContext(respWriter http.ResponseWriter, req *http.Request) *UserConnCont
|
||||
Path: req.URL.Path,
|
||||
Method: req.Method,
|
||||
RemoteAddr: req.RemoteAddr,
|
||||
ConnID: utils.Md5(req.RemoteAddr + "_" + strconv.Itoa(int(utils.GetCurrentTimestampByMill()))),
|
||||
}
|
||||
}
|
||||
func (c *UserConnContext) Query(key string) (string, bool) {
|
||||
@ -44,7 +77,7 @@ func (c *UserConnContext) ErrReturn(error string, code int) {
|
||||
http.Error(c.RespWriter, error, code)
|
||||
}
|
||||
func (c *UserConnContext) GetConnID() string {
|
||||
return utils.Md5(c.RemoteAddr + "_" + strconv.Itoa(int(utils.GetCurrentTimestampByMill())))
|
||||
return c.ConnID
|
||||
}
|
||||
func (c *UserConnContext) GetUserID() string {
|
||||
return c.Req.URL.Query().Get(WsUserID)
|
||||
@ -52,3 +85,6 @@ func (c *UserConnContext) GetUserID() string {
|
||||
func (c *UserConnContext) GetPlatformID() string {
|
||||
return c.Req.URL.Query().Get(PlatformID)
|
||||
}
|
||||
func (c *UserConnContext) GetOperationID() string {
|
||||
return c.Req.URL.Query().Get(OperationID)
|
||||
}
|
||||
|
@ -9,21 +9,24 @@ import (
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/discoveryregistry"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/errs"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/proto/msggateway"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/rpcclient/notification"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/startrpc"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/utils"
|
||||
"google.golang.org/grpc"
|
||||
)
|
||||
|
||||
func Start(client discoveryregistry.SvcDiscoveryRegistry, server *grpc.Server) error {
|
||||
msggateway.RegisterMsgGatewayServer(server, &Server{})
|
||||
func (s *Server) InitServer(client discoveryregistry.SvcDiscoveryRegistry, server *grpc.Server) error {
|
||||
s.LongConnServer.SetMessageHandler(notification.NewCheck(client))
|
||||
msggateway.RegisterMsgGatewayServer(server, s)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *Server) Start() error {
|
||||
return startrpc.Start(s.rpcPort, config.Config.RpcRegisterName.OpenImMessageGatewayName, s.prometheusPort, Start)
|
||||
return startrpc.Start(s.rpcPort, config.Config.RpcRegisterName.OpenImMessageGatewayName, s.prometheusPort, s.InitServer)
|
||||
}
|
||||
|
||||
type Server struct {
|
||||
notification *notification.Check
|
||||
rpcPort int
|
||||
prometheusPort int
|
||||
LongConnServer LongConnServer
|
||||
@ -31,6 +34,14 @@ type Server struct {
|
||||
//rpcServer *RpcServer
|
||||
}
|
||||
|
||||
func (s *Server) SetLongConnServer(LongConnServer LongConnServer) {
|
||||
s.LongConnServer = LongConnServer
|
||||
}
|
||||
|
||||
func (s *Server) Notification() *notification.Check {
|
||||
return s.notification
|
||||
}
|
||||
|
||||
func NewServer(rpcPort int, longConnServer LongConnServer) *Server {
|
||||
return &Server{rpcPort: rpcPort, LongConnServer: longConnServer, pushTerminal: []int{constant.IOSPlatformID, constant.AndroidPlatformID}}
|
||||
}
|
||||
@ -56,7 +67,7 @@ func (s *Server) GetUsersOnlineStatus(ctx context.Context, req *msggateway.GetUs
|
||||
ps := new(msggateway.GetUsersOnlineStatusResp_SuccessDetail)
|
||||
ps.Platform = constant.PlatformIDToName(client.platformID)
|
||||
ps.Status = constant.OnlineStatus
|
||||
ps.ConnID = client.connID
|
||||
ps.ConnID = client.ctx.GetConnID()
|
||||
ps.IsBackground = client.isBackground
|
||||
temp.Status = constant.OnlineStatus
|
||||
temp.DetailPlatformStatus = append(temp.DetailPlatformStatus, ps)
|
||||
|
@ -5,13 +5,10 @@ import (
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/config"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/constant"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/log"
|
||||
"sync"
|
||||
"time"
|
||||
)
|
||||
|
||||
func RunWsAndServer(rpcPort, wsPort, prometheusPort int) error {
|
||||
var wg sync.WaitGroup
|
||||
wg.Add(1)
|
||||
log.NewPrivateLog(constant.LogFileName)
|
||||
fmt.Println("start rpc/msg_gateway server, port: ", rpcPort, wsPort, prometheusPort, ", OpenIM version: ", config.Version)
|
||||
longServer, err := NewWsServer(
|
||||
@ -24,7 +21,6 @@ func RunWsAndServer(rpcPort, wsPort, prometheusPort int) error {
|
||||
}
|
||||
hubServer := NewServer(rpcPort, longServer)
|
||||
go hubServer.Start()
|
||||
go hubServer.LongConnServer.Run()
|
||||
wg.Wait()
|
||||
hubServer.LongConnServer.Run()
|
||||
return nil
|
||||
}
|
||||
|
@ -2,6 +2,7 @@ package msggateway
|
||||
|
||||
import (
|
||||
"context"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/log"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/proto/msg"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/proto/sdkws"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/rpcclient/notification"
|
||||
@ -53,6 +54,7 @@ func (g GrpcHandler) GetSeq(context context.Context, data Req) ([]byte, error) {
|
||||
if err := g.validate.Struct(req); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
log.ZDebug(context, "msggateway GetSeq", "notification", g.notification, "msg", g.notification.Msg)
|
||||
resp, err := g.notification.Msg.GetMaxAndMinSeq(context, &req)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
@ -2,9 +2,10 @@ package msggateway
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/log"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/tokenverify"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/errs"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/rpcclient/notification"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/utils"
|
||||
"github.com/go-playground/validator/v10"
|
||||
"net/http"
|
||||
@ -19,6 +20,7 @@ type LongConnServer interface {
|
||||
GetUserAllCons(userID string) ([]*Client, bool)
|
||||
GetUserPlatformCons(userID string, platform int) ([]*Client, bool, bool)
|
||||
Validate(s interface{}) error
|
||||
SetMessageHandler(rpcClient *notification.Check)
|
||||
UnRegister(c *Client)
|
||||
Compressor
|
||||
Encoder
|
||||
@ -42,12 +44,17 @@ type WsServer struct {
|
||||
onlineUserConnNum int64
|
||||
handshakeTimeout time.Duration
|
||||
readBufferSize, WriteBufferSize int
|
||||
hubServer *Server
|
||||
validate *validator.Validate
|
||||
Compressor
|
||||
Encoder
|
||||
MessageHandler
|
||||
}
|
||||
|
||||
func (ws *WsServer) SetMessageHandler(rpcClient *notification.Check) {
|
||||
ws.MessageHandler = NewGrpcHandler(ws.validate, rpcClient)
|
||||
}
|
||||
|
||||
func (ws *WsServer) UnRegister(c *Client) {
|
||||
ws.unregisterChan <- c
|
||||
}
|
||||
@ -90,8 +97,6 @@ func NewWsServer(opts ...Option) (*WsServer, error) {
|
||||
clients: newUserMap(),
|
||||
Compressor: NewGzipCompressor(),
|
||||
Encoder: NewGobEncoder(),
|
||||
MessageHandler: NewGrpcHandler(v, nil),
|
||||
//handler: NewGrpcHandler(validate),
|
||||
}, nil
|
||||
}
|
||||
func (ws *WsServer) Run() error {
|
||||
@ -121,8 +126,7 @@ func (ws *WsServer) registerClient(client *Client) {
|
||||
ws.clients.Set(client.userID, client)
|
||||
atomic.AddInt64(&ws.onlineUserNum, 1)
|
||||
atomic.AddInt64(&ws.onlineUserConnNum, 1)
|
||||
fmt.Println("R在线用户数量:", ws.onlineUserNum)
|
||||
fmt.Println("R在线用户连接数量:", ws.onlineUserConnNum)
|
||||
|
||||
} else {
|
||||
if clientOK { //已经有同平台的连接存在
|
||||
ws.clients.Set(client.userID, client)
|
||||
@ -130,11 +134,9 @@ func (ws *WsServer) registerClient(client *Client) {
|
||||
} else {
|
||||
ws.clients.Set(client.userID, client)
|
||||
atomic.AddInt64(&ws.onlineUserConnNum, 1)
|
||||
fmt.Println("R在线用户数量:", ws.onlineUserNum)
|
||||
fmt.Println("R在线用户连接数量:", ws.onlineUserConnNum)
|
||||
}
|
||||
}
|
||||
|
||||
log.ZInfo(client.ctx, "user online", "online user Num", ws.onlineUserNum, "online user conn Num", ws.onlineUserConnNum)
|
||||
}
|
||||
|
||||
func (ws *WsServer) multiTerminalLoginChecker(client []*Client) {
|
||||
@ -147,8 +149,7 @@ func (ws *WsServer) unregisterClient(client *Client) {
|
||||
atomic.AddInt64(&ws.onlineUserNum, -1)
|
||||
}
|
||||
atomic.AddInt64(&ws.onlineUserConnNum, -1)
|
||||
fmt.Println("R在线用户数量:", ws.onlineUserNum)
|
||||
fmt.Println("R在线用户连接数量:", ws.onlineUserConnNum)
|
||||
log.ZInfo(client.ctx, "user offline", "online user Num", ws.onlineUserNum, "online user conn Num", ws.onlineUserConnNum)
|
||||
}
|
||||
|
||||
func (ws *WsServer) wsHandler(w http.ResponseWriter, r *http.Request) {
|
||||
|
@ -10,7 +10,7 @@ import (
|
||||
unRelationTb "github.com/OpenIMSDK/Open-IM-Server/pkg/common/db/table/unrelation"
|
||||
kfk "github.com/OpenIMSDK/Open-IM-Server/pkg/common/kafka"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/log"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/tracelog"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/mcontext"
|
||||
pbMsg "github.com/OpenIMSDK/Open-IM-Server/pkg/proto/msg"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/utils"
|
||||
"github.com/Shopify/sarama"
|
||||
@ -41,7 +41,7 @@ func (mmc *ModifyMsgConsumerHandler) ConsumeClaim(sess sarama.ConsumerGroupSessi
|
||||
for msg := range claim.Messages() {
|
||||
log.NewDebug("", "kafka get info to mysql", "ModifyMsgConsumerHandler", msg.Topic, "msgPartition", msg.Partition, "msg", string(msg.Value), "key", string(msg.Key))
|
||||
if len(msg.Value) != 0 {
|
||||
ctx := mmc.modifyMsgConsumerGroup.GetContextFromMsg(msg, "modify consumer")
|
||||
ctx := mmc.modifyMsgConsumerGroup.GetContextFromMsg(msg)
|
||||
mmc.ModifyMsg(ctx, msg, string(msg.Key), sess)
|
||||
} else {
|
||||
log.Error("", "msg get from kafka but is nil", msg.Key)
|
||||
@ -54,7 +54,7 @@ func (mmc *ModifyMsgConsumerHandler) ConsumeClaim(sess sarama.ConsumerGroupSessi
|
||||
func (mmc *ModifyMsgConsumerHandler) ModifyMsg(ctx context.Context, cMsg *sarama.ConsumerMessage, msgKey string, _ sarama.ConsumerGroupSession) {
|
||||
log.NewInfo("msg come here ModifyMsg!!!", "", "msg", string(cMsg.Value), msgKey)
|
||||
msgFromMQ := pbMsg.MsgDataToModifyByMQ{}
|
||||
operationID := tracelog.GetOperationID(ctx)
|
||||
operationID := mcontext.GetOperationID(ctx)
|
||||
err := proto.Unmarshal(cMsg.Value, &msgFromMQ)
|
||||
if err != nil {
|
||||
log.NewError(msgFromMQ.TriggerID, "msg_transfer Unmarshal msg err", "msg", string(cMsg.Value), "err", err.Error())
|
||||
@ -66,7 +66,7 @@ func (mmc *ModifyMsgConsumerHandler) ModifyMsg(ctx context.Context, cMsg *sarama
|
||||
if !isReactionFromCache {
|
||||
continue
|
||||
}
|
||||
tracelog.SetOperationID(ctx, operationID)
|
||||
ctx = mcontext.SetOperationID(ctx, operationID)
|
||||
if msgDataToMQ.MsgData.ContentType == constant.ReactionMessageModifier {
|
||||
notification := &apistruct.ReactionMessageModifierNotification{}
|
||||
if err := json.Unmarshal(msgDataToMQ.MsgData.Content, notification); err != nil {
|
||||
|
@ -1,12 +1,13 @@
|
||||
package msgtransfer
|
||||
|
||||
import (
|
||||
"context"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/config"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/constant"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/db/controller"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/kafka"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/log"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/tracelog"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/mcontext"
|
||||
pbMsg "github.com/OpenIMSDK/Open-IM-Server/pkg/proto/msg"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/utils"
|
||||
"github.com/Shopify/sarama"
|
||||
@ -22,20 +23,24 @@ const ChannelNum = 100
|
||||
|
||||
type MsgChannelValue struct {
|
||||
aggregationID string //maybe userID or super groupID
|
||||
triggerID string
|
||||
msgList []*pbMsg.MsgDataToMQ
|
||||
ctx context.Context
|
||||
ctxMsgList []*ContextMsg
|
||||
lastSeq uint64
|
||||
}
|
||||
|
||||
type TriggerChannelValue struct {
|
||||
triggerID string
|
||||
cMsgList []*sarama.ConsumerMessage
|
||||
ctx context.Context
|
||||
cMsgList []*sarama.ConsumerMessage
|
||||
}
|
||||
|
||||
type Cmd2Value struct {
|
||||
Cmd int
|
||||
Value interface{}
|
||||
}
|
||||
type ContextMsg struct {
|
||||
message *pbMsg.MsgDataToMQ
|
||||
ctx context.Context
|
||||
}
|
||||
|
||||
type OnlineHistoryRedisConsumerHandler struct {
|
||||
historyConsumerGroup *kafka.MConsumerGroup
|
||||
@ -80,38 +85,39 @@ func (och *OnlineHistoryRedisConsumerHandler) Run(channelID int) {
|
||||
switch cmd.Cmd {
|
||||
case AggregationMessages:
|
||||
msgChannelValue := cmd.Value.(MsgChannelValue)
|
||||
msgList := msgChannelValue.msgList
|
||||
triggerID := msgChannelValue.triggerID
|
||||
ctxMsgList := msgChannelValue.ctxMsgList
|
||||
ctx := msgChannelValue.ctx
|
||||
storageMsgList := make([]*pbMsg.MsgDataToMQ, 0, 80)
|
||||
notStoragePushMsgList := make([]*pbMsg.MsgDataToMQ, 0, 80)
|
||||
log.Debug(triggerID, "msg arrived channel", "channel id", channelID, msgList, msgChannelValue.aggregationID, len(msgList))
|
||||
storagePushMsgList := make([]*ContextMsg, 0, 80)
|
||||
notStoragePushMsgList := make([]*ContextMsg, 0, 80)
|
||||
log.ZDebug(ctx, "msg arrived channel", "channel id", channelID, "msgList length", len(ctxMsgList), "aggregationID", msgChannelValue.aggregationID)
|
||||
var modifyMsgList []*pbMsg.MsgDataToMQ
|
||||
ctx := tracelog.NewCtx("redis consumer")
|
||||
tracelog.SetOperationID(ctx, triggerID)
|
||||
for _, v := range msgList {
|
||||
log.Debug(triggerID, "msg come to storage center", v.String())
|
||||
isHistory := utils.GetSwitchFromOptions(v.MsgData.Options, constant.IsHistory)
|
||||
isSenderSync := utils.GetSwitchFromOptions(v.MsgData.Options, constant.IsSenderSync)
|
||||
//ctx := mcontext.NewCtx("redis consumer")
|
||||
//mcontext.SetOperationID(ctx, triggerID)
|
||||
for _, v := range ctxMsgList {
|
||||
log.ZDebug(ctx, "msg come to storage center", "message", v.message.String())
|
||||
isHistory := utils.GetSwitchFromOptions(v.message.MsgData.Options, constant.IsHistory)
|
||||
isSenderSync := utils.GetSwitchFromOptions(v.message.MsgData.Options, constant.IsSenderSync)
|
||||
if isHistory {
|
||||
storageMsgList = append(storageMsgList, v)
|
||||
//log.NewWarn(triggerID, "storageMsgList to mongodb client msgID: ", v.MsgData.ClientMsgID)
|
||||
storageMsgList = append(storageMsgList, v.message)
|
||||
storagePushMsgList = append(storagePushMsgList, v)
|
||||
} else {
|
||||
if !(!isSenderSync && msgChannelValue.aggregationID == v.MsgData.SendID) {
|
||||
if !(!isSenderSync && msgChannelValue.aggregationID == v.message.MsgData.SendID) {
|
||||
notStoragePushMsgList = append(notStoragePushMsgList, v)
|
||||
}
|
||||
}
|
||||
if v.MsgData.ContentType == constant.ReactionMessageModifier || v.MsgData.ContentType == constant.ReactionMessageDeleter {
|
||||
modifyMsgList = append(modifyMsgList, v)
|
||||
if v.message.MsgData.ContentType == constant.ReactionMessageModifier || v.message.MsgData.ContentType == constant.ReactionMessageDeleter {
|
||||
modifyMsgList = append(modifyMsgList, v.message)
|
||||
}
|
||||
}
|
||||
if len(modifyMsgList) > 0 {
|
||||
och.msgDatabase.MsgToModifyMQ(ctx, msgChannelValue.aggregationID, triggerID, modifyMsgList)
|
||||
och.msgDatabase.MsgToModifyMQ(ctx, msgChannelValue.aggregationID, "", modifyMsgList)
|
||||
}
|
||||
log.Debug(triggerID, "msg storage length", len(storageMsgList), "push length", len(notStoragePushMsgList))
|
||||
log.ZDebug(ctx, "msg storage length", "storageMsgList", len(storageMsgList), "push length", len(notStoragePushMsgList))
|
||||
if len(storageMsgList) > 0 {
|
||||
lastSeq, err := och.msgDatabase.BatchInsertChat2Cache(ctx, msgChannelValue.aggregationID, storageMsgList)
|
||||
if err != nil {
|
||||
log.NewError(triggerID, "single data insert to redis err", err.Error(), storageMsgList)
|
||||
log.ZError(ctx, "batch data insert to redis err", err, "storageMsgList", storageMsgList)
|
||||
och.singleMsgFailedCountMutex.Lock()
|
||||
och.singleMsgFailedCount += uint64(len(storageMsgList))
|
||||
och.singleMsgFailedCountMutex.Unlock()
|
||||
@ -119,18 +125,20 @@ func (och *OnlineHistoryRedisConsumerHandler) Run(channelID int) {
|
||||
och.singleMsgSuccessCountMutex.Lock()
|
||||
och.singleMsgSuccessCount += uint64(len(storageMsgList))
|
||||
och.singleMsgSuccessCountMutex.Unlock()
|
||||
och.msgDatabase.MsgToMongoMQ(ctx, msgChannelValue.aggregationID, triggerID, storageMsgList, lastSeq)
|
||||
for _, v := range storageMsgList {
|
||||
och.msgDatabase.MsgToPushMQ(ctx, msgChannelValue.aggregationID, v)
|
||||
och.msgDatabase.MsgToMongoMQ(ctx, msgChannelValue.aggregationID, "", storageMsgList, lastSeq)
|
||||
for _, v := range storagePushMsgList {
|
||||
och.msgDatabase.MsgToPushMQ(v.ctx, msgChannelValue.aggregationID, v.message)
|
||||
}
|
||||
for _, v := range notStoragePushMsgList {
|
||||
och.msgDatabase.MsgToPushMQ(ctx, msgChannelValue.aggregationID, v)
|
||||
och.msgDatabase.MsgToPushMQ(v.ctx, msgChannelValue.aggregationID, v.message)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
for _, v := range notStoragePushMsgList {
|
||||
och.msgDatabase.MsgToPushMQ(ctx, msgChannelValue.aggregationID, v)
|
||||
|
||||
p, o, err := och.msgDatabase.MsgToPushMQ(v.ctx, msgChannelValue.aggregationID, v.message)
|
||||
if err != nil {
|
||||
log.ZError(v.ctx, "kafka send failed", err, "msg", v.message.String(), "pid", p, "offset", o)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -140,40 +148,43 @@ func (och *OnlineHistoryRedisConsumerHandler) Run(channelID int) {
|
||||
|
||||
func (och *OnlineHistoryRedisConsumerHandler) MessagesDistributionHandle() {
|
||||
for {
|
||||
aggregationMsgs := make(map[string][]*pbMsg.MsgDataToMQ, ChannelNum)
|
||||
aggregationMsgs := make(map[string][]*ContextMsg, ChannelNum)
|
||||
select {
|
||||
case cmd := <-och.msgDistributionCh:
|
||||
switch cmd.Cmd {
|
||||
case ConsumerMsgs:
|
||||
triggerChannelValue := cmd.Value.(TriggerChannelValue)
|
||||
triggerID := triggerChannelValue.triggerID
|
||||
ctx := triggerChannelValue.ctx
|
||||
consumerMessages := triggerChannelValue.cMsgList
|
||||
//Aggregation map[userid]message list
|
||||
log.Debug(triggerID, "batch messages come to distribution center", len(consumerMessages))
|
||||
log.ZDebug(ctx, "batch messages come to distribution center", "length", len(consumerMessages))
|
||||
for i := 0; i < len(consumerMessages); i++ {
|
||||
ctxMsg := &ContextMsg{}
|
||||
msgFromMQ := pbMsg.MsgDataToMQ{}
|
||||
err := proto.Unmarshal(consumerMessages[i].Value, &msgFromMQ)
|
||||
if err != nil {
|
||||
log.Error(triggerID, "msg_transfer Unmarshal msg err", "msg", string(consumerMessages[i].Value), "err", err.Error())
|
||||
log.ZError(ctx, "msg_transfer Unmarshal msg err", err, string(consumerMessages[i].Value))
|
||||
return
|
||||
}
|
||||
log.Debug(triggerID, "single msg come to distribution center", msgFromMQ.String(), string(consumerMessages[i].Key))
|
||||
ctxMsg.ctx = kafka.GetContextWithMQHeader(consumerMessages[i].Headers)
|
||||
ctxMsg.message = &msgFromMQ
|
||||
log.ZDebug(ctx, "single msg come to distribution center", msgFromMQ.String(), string(consumerMessages[i].Key))
|
||||
if oldM, ok := aggregationMsgs[string(consumerMessages[i].Key)]; ok {
|
||||
oldM = append(oldM, &msgFromMQ)
|
||||
oldM = append(oldM, ctxMsg)
|
||||
aggregationMsgs[string(consumerMessages[i].Key)] = oldM
|
||||
} else {
|
||||
m := make([]*pbMsg.MsgDataToMQ, 0, 100)
|
||||
m = append(m, &msgFromMQ)
|
||||
m := make([]*ContextMsg, 0, 100)
|
||||
m = append(m, ctxMsg)
|
||||
aggregationMsgs[string(consumerMessages[i].Key)] = m
|
||||
}
|
||||
}
|
||||
log.Debug(triggerID, "generate map list users len", len(aggregationMsgs))
|
||||
log.ZDebug(ctx, "generate map list users len", "length", len(aggregationMsgs))
|
||||
for aggregationID, v := range aggregationMsgs {
|
||||
if len(v) >= 0 {
|
||||
hashCode := utils.GetHashCode(aggregationID)
|
||||
channelID := hashCode % ChannelNum
|
||||
log.Debug(triggerID, "generate channelID", hashCode, channelID, aggregationID)
|
||||
och.chArrays[channelID] <- Cmd2Value{Cmd: AggregationMessages, Value: MsgChannelValue{aggregationID: aggregationID, msgList: v, triggerID: triggerID}}
|
||||
log.ZDebug(ctx, "generate channelID", "hashCode", hashCode, "channelID", channelID, "aggregationID", aggregationID)
|
||||
och.chArrays[channelID] <- Cmd2Value{Cmd: AggregationMessages, Value: MsgChannelValue{aggregationID: aggregationID, ctxMsgList: v, ctx: ctx}}
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -181,8 +192,10 @@ func (och *OnlineHistoryRedisConsumerHandler) MessagesDistributionHandle() {
|
||||
}
|
||||
}
|
||||
|
||||
func (OnlineHistoryRedisConsumerHandler) Setup(_ sarama.ConsumerGroupSession) error { return nil }
|
||||
func (OnlineHistoryRedisConsumerHandler) Cleanup(_ sarama.ConsumerGroupSession) error { return nil }
|
||||
func (och *OnlineHistoryRedisConsumerHandler) Setup(_ sarama.ConsumerGroupSession) error { return nil }
|
||||
func (och *OnlineHistoryRedisConsumerHandler) Cleanup(_ sarama.ConsumerGroupSession) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (och *OnlineHistoryRedisConsumerHandler) ConsumeClaim(sess sarama.ConsumerGroupSession, claim sarama.ConsumerGroupClaim) error { // a instance in the consumer group
|
||||
for {
|
||||
@ -194,10 +207,10 @@ func (och *OnlineHistoryRedisConsumerHandler) ConsumeClaim(sess sarama.ConsumerG
|
||||
}
|
||||
}
|
||||
rwLock := new(sync.RWMutex)
|
||||
log.NewDebug("", "online new session msg come", claim.HighWaterMarkOffset(), claim.Topic(), claim.Partition())
|
||||
log.ZDebug(context.Background(), "online new session msg come", "highWaterMarkOffset",
|
||||
claim.HighWaterMarkOffset(), "topic", claim.Topic(), "partition", claim.Partition())
|
||||
cMsg := make([]*sarama.ConsumerMessage, 0, 1000)
|
||||
t := time.NewTicker(time.Duration(100) * time.Millisecond)
|
||||
var triggerID string
|
||||
go func() {
|
||||
for {
|
||||
select {
|
||||
@ -211,18 +224,18 @@ func (och *OnlineHistoryRedisConsumerHandler) ConsumeClaim(sess sarama.ConsumerG
|
||||
cMsg = make([]*sarama.ConsumerMessage, 0, 1000)
|
||||
rwLock.Unlock()
|
||||
split := 1000
|
||||
triggerID = utils.OperationIDGenerator()
|
||||
log.Debug(triggerID, "timer trigger msg consumer start", len(ccMsg))
|
||||
ctx := mcontext.WithTriggerIDContext(context.Background(), utils.OperationIDGenerator())
|
||||
log.ZDebug(ctx, "timer trigger msg consumer start", "length", len(ccMsg))
|
||||
for i := 0; i < len(ccMsg)/split; i++ {
|
||||
//log.Debug()
|
||||
och.msgDistributionCh <- Cmd2Value{Cmd: ConsumerMsgs, Value: TriggerChannelValue{
|
||||
triggerID: triggerID, cMsgList: ccMsg[i*split : (i+1)*split]}}
|
||||
ctx: ctx, cMsgList: ccMsg[i*split : (i+1)*split]}}
|
||||
}
|
||||
if (len(ccMsg) % split) > 0 {
|
||||
och.msgDistributionCh <- Cmd2Value{Cmd: ConsumerMsgs, Value: TriggerChannelValue{
|
||||
triggerID: triggerID, cMsgList: ccMsg[split*(len(ccMsg)/split):]}}
|
||||
ctx: ctx, cMsgList: ccMsg[split*(len(ccMsg)/split):]}}
|
||||
}
|
||||
log.Debug(triggerID, "timer trigger msg consumer end", len(cMsg))
|
||||
log.ZDebug(ctx, "timer trigger msg consumer end", "length", len(ccMsg))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -7,7 +7,7 @@ import (
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/db/controller"
|
||||
kfk "github.com/OpenIMSDK/Open-IM-Server/pkg/common/kafka"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/log"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/tracelog"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/mcontext"
|
||||
pbMsg "github.com/OpenIMSDK/Open-IM-Server/pkg/proto/msg"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/proto/sdkws"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/utils"
|
||||
@ -33,7 +33,7 @@ func NewOnlineHistoryMongoConsumerHandler(database controller.MsgDatabase) *Onli
|
||||
func (mc *OnlineHistoryMongoConsumerHandler) handleChatWs2Mongo(ctx context.Context, cMsg *sarama.ConsumerMessage, msgKey string, session sarama.ConsumerGroupSession) {
|
||||
msg := cMsg.Value
|
||||
msgFromMQ := pbMsg.MsgDataToMongoByMQ{}
|
||||
operationID := tracelog.GetOperationID(ctx)
|
||||
operationID := mcontext.GetOperationID(ctx)
|
||||
err := proto.Unmarshal(msg, &msgFromMQ)
|
||||
if err != nil {
|
||||
log.Error("msg_transfer Unmarshal msg err", "", "msg", string(msg), "err", err.Error())
|
||||
@ -78,7 +78,7 @@ func (mc *OnlineHistoryMongoConsumerHandler) ConsumeClaim(sess sarama.ConsumerGr
|
||||
for msg := range claim.Messages() {
|
||||
log.NewDebug("", "kafka get info to mongo", "msgTopic", msg.Topic, "msgPartition", msg.Partition, "msg", string(msg.Value), "key", string(msg.Key))
|
||||
if len(msg.Value) != 0 {
|
||||
ctx := mc.historyConsumerGroup.GetContextFromMsg(msg, "mongoDB consumer")
|
||||
ctx := mc.historyConsumerGroup.GetContextFromMsg(msg)
|
||||
mc.handleChatWs2Mongo(ctx, msg, string(msg.Key), sess)
|
||||
} else {
|
||||
log.Error("", "mongo msg get from kafka but is nil", msg.Key)
|
||||
|
@ -14,7 +14,7 @@ import (
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/db/controller"
|
||||
kfk "github.com/OpenIMSDK/Open-IM-Server/pkg/common/kafka"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/log"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/tracelog"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/mcontext"
|
||||
pbMsg "github.com/OpenIMSDK/Open-IM-Server/pkg/proto/msg"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/utils"
|
||||
|
||||
@ -38,7 +38,7 @@ func NewPersistentConsumerHandler(database controller.ChatLogDatabase) *Persiste
|
||||
|
||||
func (pc *PersistentConsumerHandler) handleChatWs2Mysql(ctx context.Context, cMsg *sarama.ConsumerMessage, msgKey string, _ sarama.ConsumerGroupSession) {
|
||||
msg := cMsg.Value
|
||||
operationID := tracelog.GetOperationID(ctx)
|
||||
operationID := mcontext.GetOperationID(ctx)
|
||||
log.NewInfo("msg come here mysql!!!", "", "msg", string(msg), msgKey)
|
||||
var tag bool
|
||||
msgFromMQ := pbMsg.MsgDataToMQ{}
|
||||
@ -79,7 +79,7 @@ func (pc *PersistentConsumerHandler) ConsumeClaim(sess sarama.ConsumerGroupSessi
|
||||
for msg := range claim.Messages() {
|
||||
log.NewDebug("", "kafka get info to mysql", "msgTopic", msg.Topic, "msgPartition", msg.Partition, "msg", string(msg.Value), "key", string(msg.Key))
|
||||
if len(msg.Value) != 0 {
|
||||
ctx := pc.persistentConsumerGroup.GetContextFromMsg(msg, "mysql consumer")
|
||||
ctx := pc.persistentConsumerGroup.GetContextFromMsg(msg)
|
||||
pc.handleChatWs2Mysql(ctx, msg, string(msg.Key), sess)
|
||||
} else {
|
||||
log.Error("", "msg get from kafka but is nil", msg.Key)
|
||||
|
@ -6,7 +6,7 @@ import (
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/config"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/constant"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/http"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/tracelog"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/mcontext"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/proto/sdkws"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/utils"
|
||||
)
|
||||
@ -23,7 +23,7 @@ func callbackOfflinePush(ctx context.Context, userIDs []string, msg *sdkws.MsgDa
|
||||
UserStatusBatchCallbackReq: callbackstruct.UserStatusBatchCallbackReq{
|
||||
UserStatusBaseCallback: callbackstruct.UserStatusBaseCallback{
|
||||
CallbackCommand: constant.CallbackOfflinePushCommand,
|
||||
OperationID: tracelog.GetOperationID(ctx),
|
||||
OperationID: mcontext.GetOperationID(ctx),
|
||||
PlatformID: int(msg.SenderPlatformID),
|
||||
Platform: constant.PlatformIDToName(int(msg.SenderPlatformID)),
|
||||
},
|
||||
@ -60,7 +60,7 @@ func callbackOnlinePush(ctx context.Context, userIDs []string, msg *sdkws.MsgDat
|
||||
UserStatusBatchCallbackReq: callbackstruct.UserStatusBatchCallbackReq{
|
||||
UserStatusBaseCallback: callbackstruct.UserStatusBaseCallback{
|
||||
CallbackCommand: constant.CallbackOnlinePushCommand,
|
||||
OperationID: tracelog.GetOperationID(ctx),
|
||||
OperationID: mcontext.GetOperationID(ctx),
|
||||
PlatformID: int(msg.SenderPlatformID),
|
||||
Platform: constant.PlatformIDToName(int(msg.SenderPlatformID)),
|
||||
},
|
||||
@ -85,7 +85,7 @@ func callbackBeforeSuperGroupOnlinePush(ctx context.Context, groupID string, msg
|
||||
req := callbackstruct.CallbackBeforeSuperGroupOnlinePushReq{
|
||||
UserStatusBaseCallback: callbackstruct.UserStatusBaseCallback{
|
||||
CallbackCommand: constant.CallbackSuperGroupOnlinePushCommand,
|
||||
OperationID: tracelog.GetOperationID(ctx),
|
||||
OperationID: mcontext.GetOperationID(ctx),
|
||||
PlatformID: int(msg.SenderPlatformID),
|
||||
Platform: constant.PlatformIDToName(int(msg.SenderPlatformID)),
|
||||
},
|
||||
|
@ -1,17 +1,7 @@
|
||||
/*
|
||||
** description("").
|
||||
** copyright('open-im,www.open-im.io').
|
||||
** author("fg,Gordon@open-im.io").
|
||||
** time(2021/3/22 15:33).
|
||||
*/
|
||||
package push
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/config"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/constant"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/prome"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/statistics"
|
||||
)
|
||||
|
||||
type Consumer struct {
|
||||
@ -31,6 +21,6 @@ func (c *Consumer) initPrometheus() {
|
||||
}
|
||||
|
||||
func (c *Consumer) Start() {
|
||||
statistics.NewStatistics(&c.successCount, config.Config.ModuleName.PushName, fmt.Sprintf("%d second push to msg_gateway count", constant.StatisticsTimeInterval), constant.StatisticsTimeInterval)
|
||||
//statistics.NewStatistics(&c.successCount, config.Config.ModuleName.PushName, fmt.Sprintf("%d second push to msg_gateway count", constant.StatisticsTimeInterval), constant.StatisticsTimeInterval)
|
||||
go c.pushCh.pushConsumerGroup.RegisterHandleAndConsumer(&c.pushCh)
|
||||
}
|
||||
|
@ -6,7 +6,7 @@ import (
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/db/cache"
|
||||
http2 "github.com/OpenIMSDK/Open-IM-Server/pkg/common/http"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/log"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/tracelog"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/mcontext"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/utils/splitter"
|
||||
"github.com/go-redis/redis/v8"
|
||||
"sync"
|
||||
@ -71,7 +71,7 @@ func (g *Client) Push(ctx context.Context, userIDs []string, title, content stri
|
||||
go func(index int, userIDs []string) {
|
||||
defer wg.Done()
|
||||
if err2 := g.batchPush(ctx, token, userIDs, pushReq); err2 != nil {
|
||||
log.NewError(tracelog.GetOperationID(ctx), "batchPush failed", i, token, pushReq)
|
||||
log.NewError(mcontext.GetOperationID(ctx), "batchPush failed", i, token, pushReq)
|
||||
err = err2
|
||||
}
|
||||
}(i, v.Item)
|
||||
@ -132,7 +132,7 @@ func (g *Client) batchPush(ctx context.Context, token string, userIDs []string,
|
||||
}
|
||||
|
||||
func (g *Client) singlePush(ctx context.Context, token, userID string, pushReq PushReq) error {
|
||||
operationID := tracelog.GetOperationID(ctx)
|
||||
operationID := mcontext.GetOperationID(ctx)
|
||||
pushReq.RequestID = &operationID
|
||||
pushReq.Audience = &Audience{Alias: []string{userID}}
|
||||
return g.request(ctx, pushURL, pushReq, token, nil)
|
||||
|
@ -1,9 +1,3 @@
|
||||
/*
|
||||
** description("").
|
||||
** copyright('OpenIM,www.OpenIM.io').
|
||||
** author("fg,Gordon@tuoyun.net").
|
||||
** time(2021/5/13 10:33).
|
||||
*/
|
||||
package push
|
||||
|
||||
import (
|
||||
@ -13,7 +7,6 @@ import (
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/constant"
|
||||
kfk "github.com/OpenIMSDK/Open-IM-Server/pkg/common/kafka"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/log"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/tracelog"
|
||||
pbChat "github.com/OpenIMSDK/Open-IM-Server/pkg/proto/msg"
|
||||
pbPush "github.com/OpenIMSDK/Open-IM-Server/pkg/proto/push"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/utils"
|
||||
@ -36,10 +29,9 @@ func NewConsumerHandler(pusher *Pusher) *ConsumerHandler {
|
||||
}
|
||||
|
||||
func (c *ConsumerHandler) handleMs2PsChat(ctx context.Context, msg []byte) {
|
||||
log.NewDebug("", "msg come from kafka And push!!!", "msg", string(msg))
|
||||
msgFromMQ := pbChat.PushMsgDataToMQ{}
|
||||
if err := proto.Unmarshal(msg, &msgFromMQ); err != nil {
|
||||
log.Error("", "push Unmarshal msg err", "msg", string(msg), "err", err.Error())
|
||||
log.ZError(ctx, "push Unmarshal msg err", err, "msg", string(msg))
|
||||
return
|
||||
}
|
||||
pbData := &pbPush.PushMsgReq{
|
||||
@ -51,7 +43,6 @@ func (c *ConsumerHandler) handleMs2PsChat(ctx context.Context, msg []byte) {
|
||||
if nowSec-sec > 10 {
|
||||
return
|
||||
}
|
||||
tracelog.SetOperationID(ctx, "")
|
||||
var err error
|
||||
switch msgFromMQ.MsgData.SessionType {
|
||||
case constant.SuperGroupChatType:
|
||||
@ -60,7 +51,7 @@ func (c *ConsumerHandler) handleMs2PsChat(ctx context.Context, msg []byte) {
|
||||
err = c.pusher.MsgToUser(ctx, pbData.SourceID, pbData.MsgData)
|
||||
}
|
||||
if err != nil {
|
||||
log.NewError("", "push failed", pbData)
|
||||
log.ZError(ctx, "push failed", err, "msg", pbData.String())
|
||||
}
|
||||
}
|
||||
func (ConsumerHandler) Setup(_ sarama.ConsumerGroupSession) error { return nil }
|
||||
@ -68,8 +59,7 @@ func (ConsumerHandler) Cleanup(_ sarama.ConsumerGroupSession) error { return nil
|
||||
func (c *ConsumerHandler) ConsumeClaim(sess sarama.ConsumerGroupSession,
|
||||
claim sarama.ConsumerGroupClaim) error {
|
||||
for msg := range claim.Messages() {
|
||||
log.NewDebug("", "kafka get info to mysql", "msgTopic", msg.Topic, "msgPartition", msg.Partition, "msg", string(msg.Value))
|
||||
ctx := c.pushConsumerGroup.GetContextFromMsg(msg, "push consumer")
|
||||
ctx := c.pushConsumerGroup.GetContextFromMsg(msg)
|
||||
c.handleMs2PsChat(ctx, msg.Value)
|
||||
sess.MarkMessage(msg, "")
|
||||
}
|
||||
|
@ -22,7 +22,6 @@ func Start(client discoveryregistry.SvcDiscoveryRegistry, server *grpc.Server) e
|
||||
return err
|
||||
}
|
||||
cacheModel := cache.NewCacheModel(rdb)
|
||||
|
||||
offlinePusher := NewOfflinePusher(cacheModel)
|
||||
database := controller.NewPushDatabase(cacheModel)
|
||||
pusher := NewPusher(client, offlinePusher, database, localcache.NewGroupLocalCache(client), localcache.NewConversationLocalCache(client))
|
||||
|
@ -1,9 +1,3 @@
|
||||
/*
|
||||
** description("").
|
||||
** copyright('open-im,www.open-im.io').
|
||||
** author("fg,Gordon@open-im.io").
|
||||
** time(2021/3/5 14:31).
|
||||
*/
|
||||
package push
|
||||
|
||||
import (
|
||||
@ -19,8 +13,8 @@ import (
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/db/controller"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/db/localcache"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/log"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/mcontext"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/prome"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/tracelog"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/discoveryregistry"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/errs"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/proto/msggateway"
|
||||
@ -64,9 +58,8 @@ func NewOfflinePusher(cache cache.Model) offlinepush.OfflinePusher {
|
||||
}
|
||||
|
||||
func (p *Pusher) MsgToUser(ctx context.Context, userID string, msg *sdkws.MsgData) error {
|
||||
operationID := tracelog.GetOperationID(ctx)
|
||||
var userIDs = []string{userID}
|
||||
log.Debug(operationID, "Get msg from msg_transfer And push msg", msg.String(), userID)
|
||||
log.ZDebug(ctx, "Get msg from msg_transfer And push msg", "userID", userID, "msg", msg.String())
|
||||
// callback
|
||||
if err := callbackOnlinePush(ctx, userIDs, msg); err != nil && err != errs.ErrCallbackContinue {
|
||||
return err
|
||||
@ -77,7 +70,8 @@ func (p *Pusher) MsgToUser(ctx context.Context, userID string, msg *sdkws.MsgDat
|
||||
return err
|
||||
}
|
||||
isOfflinePush := utils.GetSwitchFromOptions(msg.Options, constant.IsOfflinePush)
|
||||
log.NewInfo(operationID, "push_result", wsResults, "sendData", msg, "isOfflinePush", isOfflinePush)
|
||||
//log.NewInfo(operationID, "push_result", wsResults, "sendData", msg, "isOfflinePush", isOfflinePush)
|
||||
log.ZDebug(ctx, "push_result", "ws push result", wsResults, "sendData", msg, "isOfflinePush", isOfflinePush, "push_to_userID", userID)
|
||||
p.successCount++
|
||||
if isOfflinePush && userID != msg.SendID {
|
||||
// save invitation info for offline push
|
||||
@ -107,7 +101,7 @@ func (p *Pusher) MsgToUser(ctx context.Context, userID string, msg *sdkws.MsgDat
|
||||
}
|
||||
|
||||
func (p *Pusher) MsgToSuperGroupUser(ctx context.Context, groupID string, msg *sdkws.MsgData) (err error) {
|
||||
operationID := tracelog.GetOperationID(ctx)
|
||||
operationID := mcontext.GetOperationID(ctx)
|
||||
log.Debug(operationID, "Get super group msg from msg_transfer And push msg", msg.String(), groupID)
|
||||
var pushToUserIDs []string
|
||||
if err := callbackBeforeSuperGroupOnlinePush(ctx, groupID, msg, &pushToUserIDs); err != nil && err != errs.ErrCallbackContinue {
|
||||
@ -183,6 +177,7 @@ func (p *Pusher) MsgToSuperGroupUser(ctx context.Context, groupID string, msg *s
|
||||
|
||||
func (p *Pusher) GetConnsAndOnlinePush(ctx context.Context, msg *sdkws.MsgData, pushToUserIDs []string) (wsResults []*msggateway.SingleMsgToUserResults, err error) {
|
||||
conns, err := p.client.GetConns(config.Config.RpcRegisterName.OpenImMessageGatewayName)
|
||||
log.ZDebug(ctx, "get gateway conn", "conn length", len(conns))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@ -191,7 +186,6 @@ func (p *Pusher) GetConnsAndOnlinePush(ctx context.Context, msg *sdkws.MsgData,
|
||||
msgClient := msggateway.NewMsgGatewayClient(v)
|
||||
reply, err := msgClient.SuperGroupOnlineBatchPushOneMsg(ctx, &msggateway.OnlineBatchPushOneMsgReq{MsgData: msg, PushToUserIDs: pushToUserIDs})
|
||||
if err != nil {
|
||||
log.NewError(tracelog.GetOperationID(ctx), msg, len(pushToUserIDs), "err", err)
|
||||
continue
|
||||
}
|
||||
if reply != nil && reply.SinglePushResult != nil {
|
||||
|
@ -6,8 +6,8 @@ import (
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/constant"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/db/cache"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/db/controller"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/mcontext"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/tokenverify"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/tracelog"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/discoveryregistry"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/errs"
|
||||
pbAuth "github.com/OpenIMSDK/Open-IM-Server/pkg/proto/auth"
|
||||
@ -92,7 +92,7 @@ func (s *authServer) ForceLogout(ctx context.Context, req *pbAuth.ForceLogoutReq
|
||||
if err := tokenverify.CheckAdmin(ctx); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if err := s.forceKickOff(ctx, req.UserID, req.PlatformID, tracelog.GetOperationID(ctx)); err != nil {
|
||||
if err := s.forceKickOff(ctx, req.UserID, req.PlatformID, mcontext.GetOperationID(ctx)); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &resp, nil
|
||||
|
@ -5,8 +5,8 @@ import (
|
||||
"time"
|
||||
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/db/table/relation"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/mcontext"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/tokenverify"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/tracelog"
|
||||
pbFriend "github.com/OpenIMSDK/Open-IM-Server/pkg/proto/friend"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/rpcclient/convert"
|
||||
)
|
||||
@ -63,7 +63,7 @@ func (s *friendServer) AddBlack(ctx context.Context, req *pbFriend.AddBlackReq)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
black := relation.BlackModel{OwnerUserID: req.OwnerUserID, BlockUserID: req.BlackUserID, OperatorUserID: tracelog.GetOpUserID(ctx), CreateTime: time.Now()}
|
||||
black := relation.BlackModel{OwnerUserID: req.OwnerUserID, BlockUserID: req.BlackUserID, OperatorUserID: mcontext.GetOpUserID(ctx), CreateTime: time.Now()}
|
||||
if err := s.BlackDatabase.Create(ctx, []*relation.BlackModel{&black}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
@ -6,7 +6,7 @@ import (
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/config"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/constant"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/http"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/tracelog"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/mcontext"
|
||||
pbfriend "github.com/OpenIMSDK/Open-IM-Server/pkg/proto/friend"
|
||||
)
|
||||
|
||||
@ -19,7 +19,7 @@ func CallbackBeforeAddFriend(ctx context.Context, req *pbfriend.ApplyToAddFriend
|
||||
FromUserID: req.FromUserID,
|
||||
ToUserID: req.ToUserID,
|
||||
ReqMsg: req.ReqMsg,
|
||||
OperationID: tracelog.GetOperationID(ctx),
|
||||
OperationID: mcontext.GetOperationID(ctx),
|
||||
}
|
||||
resp := &cbapi.CallbackBeforeAddFriendResp{}
|
||||
return http.CallBackPostReturn(config.Config.Callback.CallbackUrl, cbReq, resp, config.Config.Callback.CallbackBeforeAddFriend)
|
||||
|
@ -8,7 +8,7 @@ import (
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/constant"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/db/table/relation"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/http"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/tracelog"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/mcontext"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/proto/group"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/proto/wrapperspb"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/utils"
|
||||
@ -21,7 +21,7 @@ func CallbackBeforeCreateGroup(ctx context.Context, req *group.CreateGroupReq) (
|
||||
}
|
||||
cbReq := &callbackstruct.CallbackBeforeCreateGroupReq{
|
||||
CallbackCommand: constant.CallbackBeforeCreateGroupCommand,
|
||||
OperationID: tracelog.GetOperationID(ctx),
|
||||
OperationID: mcontext.GetOperationID(ctx),
|
||||
GroupInfo: *req.GroupInfo,
|
||||
}
|
||||
cbReq.InitMemberList = append(cbReq.InitMemberList, &apistruct.GroupAddMemberInfo{
|
||||
@ -66,7 +66,7 @@ func CallbackBeforeMemberJoinGroup(ctx context.Context, groupMember *relation.Gr
|
||||
}
|
||||
callbackReq := &callbackstruct.CallbackBeforeMemberJoinGroupReq{
|
||||
CallbackCommand: constant.CallbackBeforeMemberJoinGroupCommand,
|
||||
OperationID: tracelog.GetOperationID(ctx),
|
||||
OperationID: mcontext.GetOperationID(ctx),
|
||||
GroupID: groupMember.GroupID,
|
||||
UserID: groupMember.UserID,
|
||||
Ex: groupMember.Ex,
|
||||
@ -93,7 +93,7 @@ func CallbackBeforeSetGroupMemberInfo(ctx context.Context, req *group.SetGroupMe
|
||||
}
|
||||
callbackReq := callbackstruct.CallbackBeforeSetGroupMemberInfoReq{
|
||||
CallbackCommand: constant.CallbackBeforeSetGroupMemberInfoCommand,
|
||||
OperationID: tracelog.GetOperationID(ctx),
|
||||
OperationID: mcontext.GetOperationID(ctx),
|
||||
GroupID: req.GroupID,
|
||||
UserID: req.UserID,
|
||||
}
|
||||
|
@ -16,8 +16,8 @@ import (
|
||||
relationTb "github.com/OpenIMSDK/Open-IM-Server/pkg/common/db/table/relation"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/db/unrelation"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/log"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/mcontext"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/tokenverify"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/tracelog"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/discoveryregistry"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/errs"
|
||||
pbConversation "github.com/OpenIMSDK/Open-IM-Server/pkg/proto/conversation"
|
||||
@ -63,7 +63,7 @@ type groupServer struct {
|
||||
|
||||
func (s *groupServer) CheckGroupAdmin(ctx context.Context, groupID string) error {
|
||||
if !tokenverify.IsAppManagerUid(ctx) {
|
||||
groupMember, err := s.GroupDatabase.TakeGroupMember(ctx, groupID, tracelog.GetOpUserID(ctx))
|
||||
groupMember, err := s.GroupDatabase.TakeGroupMember(ctx, groupID, mcontext.GetOpUserID(ctx))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@ -103,7 +103,7 @@ func (s *groupServer) GenGroupID(ctx context.Context, groupID *string) error {
|
||||
}
|
||||
}
|
||||
for i := 0; i < 10; i++ {
|
||||
id := utils.Md5(strings.Join([]string{tracelog.GetOperationID(ctx), strconv.FormatInt(time.Now().UnixNano(), 10), strconv.Itoa(rand.Int())}, ",;,"))
|
||||
id := utils.Md5(strings.Join([]string{mcontext.GetOperationID(ctx), strconv.FormatInt(time.Now().UnixNano(), 10), strconv.Itoa(rand.Int())}, ",;,"))
|
||||
bi := big.NewInt(0)
|
||||
bi.SetString(id[0:8], 16)
|
||||
id = bi.String()
|
||||
@ -148,9 +148,9 @@ func (s *groupServer) CreateGroup(ctx context.Context, req *pbGroup.CreateGroupR
|
||||
groupMember.Nickname = ""
|
||||
groupMember.GroupID = group.GroupID
|
||||
groupMember.RoleLevel = roleLevel
|
||||
groupMember.OperatorUserID = tracelog.GetOpUserID(ctx)
|
||||
groupMember.OperatorUserID = mcontext.GetOpUserID(ctx)
|
||||
groupMember.JoinSource = constant.JoinByInvitation
|
||||
groupMember.InviterUserID = tracelog.GetOpUserID(ctx)
|
||||
groupMember.InviterUserID = mcontext.GetOpUserID(ctx)
|
||||
groupMember.JoinTime = time.Now()
|
||||
if err := CallbackBeforeMemberJoinGroup(ctx, groupMember, group.Ex); err != nil && err != errs.ErrCallbackContinue {
|
||||
return err
|
||||
@ -265,7 +265,7 @@ func (s *groupServer) InviteUserToGroup(ctx context.Context, req *pbGroup.Invite
|
||||
}
|
||||
if group.NeedVerification == constant.AllNeedVerification {
|
||||
if !tokenverify.IsAppManagerUid(ctx) {
|
||||
opUserID := tracelog.GetOpUserID(ctx)
|
||||
opUserID := mcontext.GetOpUserID(ctx)
|
||||
member, ok := memberMap[opUserID]
|
||||
if !ok {
|
||||
return nil, errs.ErrNoPermission.Wrap("not in group")
|
||||
@ -303,7 +303,7 @@ func (s *groupServer) InviteUserToGroup(ctx context.Context, req *pbGroup.Invite
|
||||
s.Notification.SuperGroupNotification(ctx, userID, userID)
|
||||
}
|
||||
} else {
|
||||
opUserID := tracelog.GetOpUserID(ctx)
|
||||
opUserID := mcontext.GetOpUserID(ctx)
|
||||
var groupMembers []*relationTb.GroupMemberModel
|
||||
for _, userID := range req.InvitedUserIDs {
|
||||
member := PbToDbGroupMember(userMap[userID])
|
||||
@ -388,7 +388,7 @@ func (s *groupServer) KickGroupMember(ctx context.Context, req *pbGroup.KickGrou
|
||||
if utils.IsDuplicateStringSlice(req.KickedUserIDs) {
|
||||
return nil, errs.ErrArgs.Wrap("KickedUserIDs duplicate")
|
||||
}
|
||||
opUserID := tracelog.GetOpUserID(ctx)
|
||||
opUserID := mcontext.GetOpUserID(ctx)
|
||||
if utils.IsContain(opUserID, req.KickedUserIDs) {
|
||||
return nil, errs.ErrArgs.Wrap("opUserID in KickedUserIDs")
|
||||
}
|
||||
@ -593,7 +593,7 @@ func (s *groupServer) GroupApplicationResponse(ctx context.Context, req *pbGroup
|
||||
JoinTime: time.Now(),
|
||||
JoinSource: groupRequest.JoinSource,
|
||||
InviterUserID: groupRequest.InviterUserID,
|
||||
OperatorUserID: tracelog.GetOpUserID(ctx),
|
||||
OperatorUserID: mcontext.GetOpUserID(ctx),
|
||||
Ex: groupRequest.Ex,
|
||||
}
|
||||
if err = CallbackBeforeMemberJoinGroup(ctx, member, group.Ex); err != nil && err != errs.ErrCallbackContinue {
|
||||
@ -616,7 +616,7 @@ func (s *groupServer) GroupApplicationResponse(ctx context.Context, req *pbGroup
|
||||
|
||||
func (s *groupServer) JoinGroup(ctx context.Context, req *pbGroup.JoinGroupReq) (*pbGroup.JoinGroupResp, error) {
|
||||
resp := &pbGroup.JoinGroupResp{}
|
||||
if _, err := s.UserCheck.GetPublicUserInfo(ctx, tracelog.GetOpUserID(ctx)); err != nil {
|
||||
if _, err := s.UserCheck.GetPublicUserInfo(ctx, mcontext.GetOpUserID(ctx)); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
group, err := s.GroupDatabase.TakeGroup(ctx, req.GroupID)
|
||||
@ -630,27 +630,27 @@ func (s *groupServer) JoinGroup(ctx context.Context, req *pbGroup.JoinGroupReq)
|
||||
if group.GroupType == constant.SuperGroup {
|
||||
return nil, errs.ErrGroupTypeNotSupport.Wrap()
|
||||
}
|
||||
user, err := s.UserCheck.GetUserInfo(ctx, tracelog.GetOpUserID(ctx))
|
||||
user, err := s.UserCheck.GetUserInfo(ctx, mcontext.GetOpUserID(ctx))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
groupMember := PbToDbGroupMember(user)
|
||||
groupMember.GroupID = group.GroupID
|
||||
groupMember.RoleLevel = constant.GroupOrdinaryUsers
|
||||
groupMember.OperatorUserID = tracelog.GetOpUserID(ctx)
|
||||
groupMember.OperatorUserID = mcontext.GetOpUserID(ctx)
|
||||
groupMember.JoinSource = constant.JoinByInvitation
|
||||
groupMember.InviterUserID = tracelog.GetOpUserID(ctx)
|
||||
groupMember.InviterUserID = mcontext.GetOpUserID(ctx)
|
||||
if err := CallbackBeforeMemberJoinGroup(ctx, groupMember, group.Ex); err != nil && err != errs.ErrCallbackContinue {
|
||||
return nil, err
|
||||
}
|
||||
if err := s.GroupDatabase.CreateGroup(ctx, nil, []*relationTb.GroupMemberModel{groupMember}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
s.Notification.MemberEnterDirectlyNotification(ctx, req.GroupID, tracelog.GetOpUserID(ctx), tracelog.GetOperationID(ctx))
|
||||
s.Notification.MemberEnterDirectlyNotification(ctx, req.GroupID, mcontext.GetOpUserID(ctx), mcontext.GetOperationID(ctx))
|
||||
return resp, nil
|
||||
}
|
||||
groupRequest := relationTb.GroupRequestModel{
|
||||
UserID: tracelog.GetOpUserID(ctx),
|
||||
UserID: mcontext.GetOpUserID(ctx),
|
||||
ReqMsg: req.ReqMessage,
|
||||
GroupID: req.GroupID,
|
||||
JoinSource: req.JoinSource,
|
||||
@ -670,12 +670,12 @@ func (s *groupServer) QuitGroup(ctx context.Context, req *pbGroup.QuitGroupReq)
|
||||
return nil, err
|
||||
}
|
||||
if group.GroupType == constant.SuperGroup {
|
||||
if err := s.GroupDatabase.DeleteSuperGroupMember(ctx, req.GroupID, []string{tracelog.GetOpUserID(ctx)}); err != nil {
|
||||
if err := s.GroupDatabase.DeleteSuperGroupMember(ctx, req.GroupID, []string{mcontext.GetOpUserID(ctx)}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
s.Notification.SuperGroupNotification(ctx, tracelog.GetOpUserID(ctx), tracelog.GetOpUserID(ctx))
|
||||
s.Notification.SuperGroupNotification(ctx, mcontext.GetOpUserID(ctx), mcontext.GetOpUserID(ctx))
|
||||
} else {
|
||||
_, err := s.GroupDatabase.TakeGroupMember(ctx, req.GroupID, tracelog.GetOpUserID(ctx))
|
||||
_, err := s.GroupDatabase.TakeGroupMember(ctx, req.GroupID, mcontext.GetOpUserID(ctx))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@ -687,7 +687,7 @@ func (s *groupServer) QuitGroup(ctx context.Context, req *pbGroup.QuitGroupReq)
|
||||
func (s *groupServer) SetGroupInfo(ctx context.Context, req *pbGroup.SetGroupInfoReq) (*pbGroup.SetGroupInfoResp, error) {
|
||||
resp := &pbGroup.SetGroupInfoResp{}
|
||||
if !tokenverify.IsAppManagerUid(ctx) {
|
||||
groupMember, err := s.GroupDatabase.TakeGroupMember(ctx, req.GroupInfoForSet.GroupID, tracelog.GetOpUserID(ctx))
|
||||
groupMember, err := s.GroupDatabase.TakeGroupMember(ctx, req.GroupInfoForSet.GroupID, mcontext.GetOpUserID(ctx))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@ -721,7 +721,7 @@ func (s *groupServer) SetGroupInfo(ctx context.Context, req *pbGroup.SetGroupInf
|
||||
if req.GroupInfoForSet.Notification != "" {
|
||||
args := pbConversation.ModifyConversationFieldReq{
|
||||
Conversation: &pbConversation.Conversation{
|
||||
OwnerUserID: tracelog.GetOpUserID(ctx),
|
||||
OwnerUserID: mcontext.GetOpUserID(ctx),
|
||||
ConversationID: utils.GetConversationIDBySessionType(group.GroupID, constant.GroupChatType),
|
||||
ConversationType: constant.GroupChatType,
|
||||
GroupID: group.GroupID,
|
||||
@ -772,8 +772,8 @@ func (s *groupServer) TransferGroupOwner(ctx context.Context, req *pbGroup.Trans
|
||||
if oldOwner == nil {
|
||||
return nil, errs.ErrArgs.Wrap("OldOwnerUser not in group " + req.NewOwnerUserID)
|
||||
}
|
||||
if oldOwner.GroupID != tracelog.GetOpUserID(ctx) {
|
||||
return nil, errs.ErrNoPermission.Wrap(fmt.Sprintf("user %s no permission transfer group owner", tracelog.GetOpUserID(ctx)))
|
||||
if oldOwner.GroupID != mcontext.GetOpUserID(ctx) {
|
||||
return nil, errs.ErrNoPermission.Wrap(fmt.Sprintf("user %s no permission transfer group owner", mcontext.GetOpUserID(ctx)))
|
||||
}
|
||||
}
|
||||
if err := s.GroupDatabase.TransferGroupOwner(ctx, req.GroupID, req.OldOwnerUserID, req.NewOwnerUserID, newOwner.RoleLevel); err != nil {
|
||||
@ -922,7 +922,7 @@ func (s *groupServer) MuteGroupMember(ctx context.Context, req *pbGroup.MuteGrou
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if !(tracelog.GetOpUserID(ctx) == req.UserID || tokenverify.IsAppManagerUid(ctx)) {
|
||||
if !(mcontext.GetOpUserID(ctx) == req.UserID || tokenverify.IsAppManagerUid(ctx)) {
|
||||
opMember, err := s.GroupDatabase.TakeGroupMember(ctx, req.GroupID, req.UserID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
@ -945,8 +945,8 @@ func (s *groupServer) CancelMuteGroupMember(ctx context.Context, req *pbGroup.Ca
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if !(tracelog.GetOpUserID(ctx) == req.UserID || tokenverify.IsAppManagerUid(ctx)) {
|
||||
opMember, err := s.GroupDatabase.TakeGroupMember(ctx, req.GroupID, tracelog.GetOpUserID(ctx))
|
||||
if !(mcontext.GetOpUserID(ctx) == req.UserID || tokenverify.IsAppManagerUid(ctx)) {
|
||||
opMember, err := s.GroupDatabase.TakeGroupMember(ctx, req.GroupID, mcontext.GetOpUserID(ctx))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@ -1005,7 +1005,7 @@ func (s *groupServer) SetGroupMemberInfo(ctx context.Context, req *pbGroup.SetGr
|
||||
}
|
||||
groupIDs := utils.Keys(groupIDMap)
|
||||
userIDs := utils.Keys(userIDMap)
|
||||
members, err := s.GroupDatabase.FindGroupMember(ctx, groupIDs, append(userIDs, tracelog.GetOpUserID(ctx)), nil)
|
||||
members, err := s.GroupDatabase.FindGroupMember(ctx, groupIDs, append(userIDs, mcontext.GetOpUserID(ctx)), nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@ -1021,7 +1021,7 @@ func (s *groupServer) SetGroupMemberInfo(ctx context.Context, req *pbGroup.SetGr
|
||||
return [...]string{e.GroupID, e.UserID}
|
||||
})
|
||||
if !tokenverify.IsAppManagerUid(ctx) {
|
||||
opUserID := tracelog.GetOpUserID(ctx)
|
||||
opUserID := mcontext.GetOpUserID(ctx)
|
||||
for _, member := range members {
|
||||
if member.UserID == opUserID {
|
||||
continue
|
||||
|
@ -6,7 +6,7 @@ import (
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/config"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/constant"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/http"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/tracelog"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/mcontext"
|
||||
pbChat "github.com/OpenIMSDK/Open-IM-Server/pkg/proto/msg"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/utils"
|
||||
)
|
||||
@ -21,7 +21,7 @@ func toCommonCallback(ctx context.Context, msg *pbChat.SendMsgReq, command strin
|
||||
ServerMsgID: msg.MsgData.ServerMsgID,
|
||||
CallbackCommand: command,
|
||||
ClientMsgID: msg.MsgData.ClientMsgID,
|
||||
OperationID: tracelog.GetOperationID(ctx),
|
||||
OperationID: mcontext.GetOperationID(ctx),
|
||||
SenderPlatformID: msg.MsgData.SenderPlatformID,
|
||||
SenderNickname: msg.MsgData.SenderNickname,
|
||||
SessionType: msg.MsgData.SessionType,
|
||||
|
@ -6,7 +6,7 @@ import (
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/config"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/constant"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/http"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/tracelog"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/mcontext"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/proto/msg"
|
||||
)
|
||||
|
||||
@ -15,10 +15,10 @@ func CallbackSetMessageReactionExtensions(ctx context.Context, setReq *msg.SetMe
|
||||
return nil
|
||||
}
|
||||
req := &cbapi.CallbackBeforeSetMessageReactionExtReq{
|
||||
OperationID: tracelog.GetOperationID(ctx),
|
||||
OperationID: mcontext.GetOperationID(ctx),
|
||||
CallbackCommand: constant.CallbackBeforeSetMessageReactionExtensionCommand,
|
||||
SourceID: setReq.SourceID,
|
||||
OpUserID: tracelog.GetOpUserID(ctx),
|
||||
OpUserID: mcontext.GetOpUserID(ctx),
|
||||
SessionType: setReq.SessionType,
|
||||
ReactionExtensionList: setReq.ReactionExtensions,
|
||||
ClientMsgID: setReq.ClientMsgID,
|
||||
@ -58,10 +58,10 @@ func CallbackGetMessageListReactionExtensions(ctx context.Context, getReq *msg.G
|
||||
return nil
|
||||
}
|
||||
req := &cbapi.CallbackGetMessageListReactionExtReq{
|
||||
OperationID: tracelog.GetOperationID(ctx),
|
||||
OperationID: mcontext.GetOperationID(ctx),
|
||||
CallbackCommand: constant.CallbackGetMessageListReactionExtensionsCommand,
|
||||
SourceID: getReq.SourceID,
|
||||
OpUserID: tracelog.GetOperationID(ctx),
|
||||
OpUserID: mcontext.GetOperationID(ctx),
|
||||
SessionType: getReq.SessionType,
|
||||
TypeKeyList: getReq.TypeKeys,
|
||||
}
|
||||
@ -71,10 +71,10 @@ func CallbackGetMessageListReactionExtensions(ctx context.Context, getReq *msg.G
|
||||
|
||||
func CallbackAddMessageReactionExtensions(ctx context.Context, setReq *msg.ModifyMessageReactionExtensionsReq) error {
|
||||
req := &cbapi.CallbackAddMessageReactionExtReq{
|
||||
OperationID: tracelog.GetOperationID(ctx),
|
||||
OperationID: mcontext.GetOperationID(ctx),
|
||||
CallbackCommand: constant.CallbackAddMessageListReactionExtensionsCommand,
|
||||
SourceID: setReq.SourceID,
|
||||
OpUserID: tracelog.GetOperationID(ctx),
|
||||
OpUserID: mcontext.GetOperationID(ctx),
|
||||
SessionType: setReq.SessionType,
|
||||
ReactionExtensionList: setReq.ReactionExtensions,
|
||||
ClientMsgID: setReq.ClientMsgID,
|
||||
|
61
internal/rpc/msg/message_interceptor.go
Normal file
61
internal/rpc/msg/message_interceptor.go
Normal file
@ -0,0 +1,61 @@
|
||||
package msg
|
||||
|
||||
import (
|
||||
"context"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/config"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/constant"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/log"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/errs"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/proto/msg"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/proto/sdkws"
|
||||
)
|
||||
|
||||
type MessageInterceptorFunc func(ctx context.Context, req *msg.SendMsgReq) (*sdkws.MsgData, error)
|
||||
|
||||
func MessageHasReadEnabled(_ context.Context, req *msg.SendMsgReq) (*sdkws.MsgData, error) {
|
||||
switch req.MsgData.ContentType {
|
||||
case constant.HasReadReceipt:
|
||||
if config.Config.SingleMessageHasReadReceiptEnable {
|
||||
return req.MsgData, nil
|
||||
} else {
|
||||
return nil, errs.ErrMessageHasReadDisable.Wrap()
|
||||
}
|
||||
case constant.GroupHasReadReceipt:
|
||||
if config.Config.GroupMessageHasReadReceiptEnable {
|
||||
return req.MsgData, nil
|
||||
} else {
|
||||
return nil, errs.ErrMessageHasReadDisable.Wrap()
|
||||
}
|
||||
}
|
||||
return req.MsgData, nil
|
||||
}
|
||||
func MessageModifyCallback(ctx context.Context, req *msg.SendMsgReq) (*sdkws.MsgData, error) {
|
||||
if err := CallbackMsgModify(ctx, req); err != nil && err != errs.ErrCallbackContinue {
|
||||
log.ZWarn(ctx, "CallbackMsgModify failed", err, "req", req.String())
|
||||
return nil, err
|
||||
}
|
||||
return req.MsgData, nil
|
||||
}
|
||||
func MessageBeforeSendCallback(ctx context.Context, req *msg.SendMsgReq) (*sdkws.MsgData, error) {
|
||||
switch req.MsgData.SessionType {
|
||||
case constant.SingleChatType:
|
||||
if err := CallbackBeforeSendSingleMsg(ctx, req); err != nil && err != errs.ErrCallbackContinue {
|
||||
log.ZWarn(ctx, "CallbackBeforeSendSingleMsg failed", err, "req", req.String())
|
||||
return nil, err
|
||||
}
|
||||
case constant.GroupChatType:
|
||||
if err := CallbackBeforeSendGroupMsg(ctx, req); err != nil && err != errs.ErrCallbackContinue {
|
||||
log.ZWarn(ctx, "CallbackBeforeSendGroupMsg failed", err, "req", req.String())
|
||||
return nil, err
|
||||
}
|
||||
case constant.NotificationChatType:
|
||||
case constant.SuperGroupChatType:
|
||||
if err := CallbackBeforeSendGroupMsg(ctx, req); err != nil && err != errs.ErrCallbackContinue {
|
||||
log.ZWarn(ctx, "CallbackBeforeSendGroupMsg failed", err, "req", req.String())
|
||||
return nil, err
|
||||
}
|
||||
default:
|
||||
return nil, errs.ErrArgs.Wrap("unknown sessionType")
|
||||
}
|
||||
return req.MsgData, nil
|
||||
}
|
@ -3,13 +3,13 @@ package msg
|
||||
import (
|
||||
"context"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/constant"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/tracelog"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/mcontext"
|
||||
pbMsg "github.com/OpenIMSDK/Open-IM-Server/pkg/proto/msg"
|
||||
)
|
||||
|
||||
func (m *msgServer) SetSendMsgStatus(ctx context.Context, req *pbMsg.SetSendMsgStatusReq) (*pbMsg.SetSendMsgStatusResp, error) {
|
||||
resp := &pbMsg.SetSendMsgStatusResp{}
|
||||
if err := m.MsgDatabase.SetSendMsgStatus(ctx, tracelog.GetOperationID(ctx), req.Status); err != nil {
|
||||
if err := m.MsgDatabase.SetSendMsgStatus(ctx, mcontext.GetOperationID(ctx), req.Status); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return resp, nil
|
||||
@ -17,7 +17,7 @@ func (m *msgServer) SetSendMsgStatus(ctx context.Context, req *pbMsg.SetSendMsgS
|
||||
|
||||
func (m *msgServer) GetSendMsgStatus(ctx context.Context, req *pbMsg.GetSendMsgStatusReq) (*pbMsg.GetSendMsgStatusResp, error) {
|
||||
resp := &pbMsg.GetSendMsgStatusResp{}
|
||||
status, err := m.MsgDatabase.GetSendMsgStatus(ctx, tracelog.GetOperationID(ctx))
|
||||
status, err := m.MsgDatabase.GetSendMsgStatus(ctx, mcontext.GetOperationID(ctx))
|
||||
if IsNotFound(err) {
|
||||
resp.Status = constant.MsgStatusNotExist
|
||||
return resp, nil
|
||||
|
@ -270,9 +270,10 @@ func (m *msgServer) modifyMessageByUserMessageReceiveOpt(ctx context.Context, us
|
||||
}
|
||||
conversationID := utils.GetConversationIDBySessionType(sourceID, sessionType)
|
||||
singleOpt, err := m.Conversation.GetSingleConversationRecvMsgOpt(ctx, userID, conversationID)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
//if err != nil {
|
||||
// return false, err
|
||||
//}
|
||||
return true, nil
|
||||
switch singleOpt {
|
||||
case constant.ReceiveMessage:
|
||||
return true, nil
|
||||
|
@ -16,11 +16,6 @@ import (
|
||||
func (m *msgServer) sendMsgSuperGroupChat(ctx context.Context, req *msg.SendMsgReq) (resp *msg.SendMsgResp, err error) {
|
||||
resp = &msg.SendMsgResp{}
|
||||
promePkg.Inc(promePkg.WorkSuperGroupChatMsgRecvSuccessCounter)
|
||||
// callback
|
||||
if err = CallbackBeforeSendGroupMsg(ctx, req); err != nil && err != errs.ErrCallbackContinue {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if _, err = m.messageVerification(ctx, req); err != nil {
|
||||
promePkg.Inc(promePkg.WorkSuperGroupChatMsgProcessFailedCounter)
|
||||
return nil, err
|
||||
@ -63,9 +58,6 @@ func (m *msgServer) sendMsgNotification(ctx context.Context, req *msg.SendMsgReq
|
||||
|
||||
func (m *msgServer) sendMsgSingleChat(ctx context.Context, req *msg.SendMsgReq) (resp *msg.SendMsgResp, err error) {
|
||||
promePkg.Inc(promePkg.SingleChatMsgRecvSuccessCounter)
|
||||
if err = CallbackBeforeSendSingleMsg(ctx, req); err != nil && err != errs.ErrCallbackContinue {
|
||||
return nil, err
|
||||
}
|
||||
_, err = m.messageVerification(ctx, req)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
@ -103,10 +95,6 @@ func (m *msgServer) sendMsgSingleChat(ctx context.Context, req *msg.SendMsgReq)
|
||||
func (m *msgServer) sendMsgGroupChat(ctx context.Context, req *msg.SendMsgReq) (resp *msg.SendMsgResp, err error) {
|
||||
// callback
|
||||
promePkg.Inc(promePkg.GroupChatMsgRecvSuccessCounter)
|
||||
err = CallbackBeforeSendGroupMsg(ctx, req)
|
||||
if err != nil && err != errs.ErrCallbackContinue {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var memberUserIDList []string
|
||||
if memberUserIDList, err = m.messageVerification(ctx, req); err != nil {
|
||||
@ -231,79 +219,3 @@ func (m *msgServer) sendMsgGroupChat(ctx context.Context, req *msg.SendMsgReq) (
|
||||
resp.ClientMsgID = msgToMQSingle.MsgData.ClientMsgID
|
||||
return resp, nil
|
||||
}
|
||||
|
||||
func (m *msgServer) SendMsg(ctx context.Context, req *msg.SendMsgReq) (resp *msg.SendMsgResp, error error) {
|
||||
resp = &msg.SendMsgResp{}
|
||||
flag := isMessageHasReadEnabled(req.MsgData)
|
||||
if !flag {
|
||||
return nil, errs.ErrMessageHasReadDisable.Wrap()
|
||||
}
|
||||
m.encapsulateMsgData(req.MsgData)
|
||||
if err := CallbackMsgModify(ctx, req); err != nil && err != errs.ErrCallbackContinue {
|
||||
return nil, err
|
||||
}
|
||||
switch req.MsgData.SessionType {
|
||||
case constant.SingleChatType:
|
||||
return m.sendMsgSingleChat(ctx, req)
|
||||
case constant.GroupChatType:
|
||||
return m.sendMsgGroupChat(ctx, req)
|
||||
case constant.NotificationChatType:
|
||||
return m.sendMsgNotification(ctx, req)
|
||||
case constant.SuperGroupChatType:
|
||||
return m.sendMsgSuperGroupChat(ctx, req)
|
||||
default:
|
||||
return nil, errs.ErrArgs.Wrap("unknown sessionType")
|
||||
}
|
||||
}
|
||||
|
||||
func (m *msgServer) GetMaxAndMinSeq(ctx context.Context, req *sdkws.GetMaxAndMinSeqReq) (*sdkws.GetMaxAndMinSeqResp, error) {
|
||||
resp := new(sdkws.GetMaxAndMinSeqResp)
|
||||
m2 := make(map[string]*sdkws.MaxAndMinSeq)
|
||||
maxSeq, err := m.MsgDatabase.GetUserMaxSeq(ctx, req.UserID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
minSeq, err := m.MsgDatabase.GetUserMinSeq(ctx, req.UserID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
resp.MaxSeq = maxSeq
|
||||
resp.MinSeq = minSeq
|
||||
if len(req.GroupIDs) > 0 {
|
||||
resp.GroupMaxAndMinSeq = make(map[string]*sdkws.MaxAndMinSeq)
|
||||
for _, groupID := range req.GroupIDs {
|
||||
maxSeq, err := m.MsgDatabase.GetGroupMaxSeq(ctx, groupID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
minSeq, err := m.MsgDatabase.GetGroupMinSeq(ctx, groupID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
m2[groupID] = &sdkws.MaxAndMinSeq{
|
||||
MaxSeq: maxSeq,
|
||||
MinSeq: minSeq,
|
||||
}
|
||||
}
|
||||
}
|
||||
return resp, nil
|
||||
}
|
||||
|
||||
func (m *msgServer) PullMessageBySeqs(ctx context.Context, req *sdkws.PullMessageBySeqsReq) (*sdkws.PullMessageBySeqsResp, error) {
|
||||
resp := &sdkws.PullMessageBySeqsResp{GroupMsgDataList: make(map[string]*sdkws.MsgDataList)}
|
||||
msgs, err := m.MsgDatabase.GetMsgBySeqs(ctx, req.UserID, req.Seqs)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
resp.List = msgs
|
||||
for groupID, list := range req.GroupSeqs {
|
||||
msgs, err := m.MsgDatabase.GetSuperGroupMsgBySeqs(ctx, groupID, list.Seqs)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
resp.GroupMsgDataList[groupID] = &sdkws.MsgDataList{
|
||||
MsgDataList: msgs,
|
||||
}
|
||||
}
|
||||
return resp, nil
|
||||
}
|
||||
|
@ -1,18 +1,25 @@
|
||||
package msg
|
||||
|
||||
import (
|
||||
"context"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/constant"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/db/cache"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/db/controller"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/db/localcache"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/db/tx"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/db/unrelation"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/prome"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/tokenverify"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/discoveryregistry"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/errs"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/proto/msg"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/proto/sdkws"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/rpcclient/check"
|
||||
"github.com/go-redis/redis/v8"
|
||||
"google.golang.org/grpc"
|
||||
)
|
||||
|
||||
type MessageInterceptorChain []MessageInterceptorFunc
|
||||
type msgServer struct {
|
||||
RegisterCenter discoveryregistry.SvcDiscoveryRegistry
|
||||
MsgDatabase controller.MsgDatabase
|
||||
@ -24,8 +31,22 @@ type msgServer struct {
|
||||
*localcache.GroupLocalCache
|
||||
black *check.BlackChecker
|
||||
MessageLocker MessageLocker
|
||||
Handlers MessageInterceptorChain
|
||||
}
|
||||
|
||||
func (m *msgServer) addInterceptorHandler(interceptorFunc ...MessageInterceptorFunc) {
|
||||
m.Handlers = append(m.Handlers, interceptorFunc...)
|
||||
}
|
||||
func (m *msgServer) execInterceptorHandler(ctx context.Context, req *msg.SendMsgReq) error {
|
||||
for _, handler := range m.Handlers {
|
||||
msgData, err := handler(ctx, req)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
req.MsgData = msgData
|
||||
}
|
||||
return nil
|
||||
}
|
||||
func Start(client discoveryregistry.SvcDiscoveryRegistry, server *grpc.Server) error {
|
||||
rdb, err := cache.NewRedis()
|
||||
if err != nil {
|
||||
@ -35,7 +56,6 @@ func Start(client discoveryregistry.SvcDiscoveryRegistry, server *grpc.Server) e
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
cacheModel := cache.NewCacheModel(rdb)
|
||||
msgDocModel := unrelation.NewMsgMongoDriver(mongo.GetDatabase())
|
||||
extendMsgModel := unrelation.NewExtendMsgSetMongoDriver(mongo.GetDatabase())
|
||||
@ -55,6 +75,7 @@ func Start(client discoveryregistry.SvcDiscoveryRegistry, server *grpc.Server) e
|
||||
friend: check.NewFriendChecker(client),
|
||||
MessageLocker: NewLockerMessage(cacheModel),
|
||||
}
|
||||
s.addInterceptorHandler(MessageHasReadEnabled, MessageModifyCallback)
|
||||
s.initPrometheus()
|
||||
msg.RegisterMsgServer(server, s)
|
||||
return nil
|
||||
@ -75,3 +96,81 @@ func (m *msgServer) initPrometheus() {
|
||||
prome.NewWorkSuperGroupChatMsgProcessSuccessCounter()
|
||||
prome.NewWorkSuperGroupChatMsgProcessFailedCounter()
|
||||
}
|
||||
func (m *msgServer) SendMsg(ctx context.Context, req *msg.SendMsgReq) (resp *msg.SendMsgResp, error error) {
|
||||
resp = &msg.SendMsgResp{}
|
||||
flag := isMessageHasReadEnabled(req.MsgData)
|
||||
if !flag {
|
||||
return nil, errs.ErrMessageHasReadDisable.Wrap()
|
||||
}
|
||||
m.encapsulateMsgData(req.MsgData)
|
||||
if err := CallbackMsgModify(ctx, req); err != nil && err != errs.ErrCallbackContinue {
|
||||
return nil, err
|
||||
}
|
||||
switch req.MsgData.SessionType {
|
||||
case constant.SingleChatType:
|
||||
return m.sendMsgSingleChat(ctx, req)
|
||||
case constant.GroupChatType:
|
||||
return m.sendMsgGroupChat(ctx, req)
|
||||
case constant.NotificationChatType:
|
||||
return m.sendMsgNotification(ctx, req)
|
||||
case constant.SuperGroupChatType:
|
||||
return m.sendMsgSuperGroupChat(ctx, req)
|
||||
default:
|
||||
return nil, errs.ErrArgs.Wrap("unknown sessionType")
|
||||
}
|
||||
}
|
||||
|
||||
func (m *msgServer) GetMaxAndMinSeq(ctx context.Context, req *sdkws.GetMaxAndMinSeqReq) (*sdkws.GetMaxAndMinSeqResp, error) {
|
||||
if err := tokenverify.CheckAccessV3(ctx, req.UserID); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
resp := new(sdkws.GetMaxAndMinSeqResp)
|
||||
m2 := make(map[string]*sdkws.MaxAndMinSeq)
|
||||
maxSeq, err := m.MsgDatabase.GetUserMaxSeq(ctx, req.UserID)
|
||||
if err != nil && errs.Unwrap(err) != redis.Nil {
|
||||
return nil, err
|
||||
}
|
||||
minSeq, err := m.MsgDatabase.GetUserMinSeq(ctx, req.UserID)
|
||||
if err != nil && errs.Unwrap(err) != redis.Nil {
|
||||
return nil, err
|
||||
}
|
||||
resp.MaxSeq = maxSeq
|
||||
resp.MinSeq = minSeq
|
||||
if len(req.GroupIDs) > 0 {
|
||||
for _, groupID := range req.GroupIDs {
|
||||
maxSeq, err := m.MsgDatabase.GetGroupMaxSeq(ctx, groupID)
|
||||
if err != nil && errs.Unwrap(err) != redis.Nil {
|
||||
return nil, err
|
||||
}
|
||||
minSeq, err := m.MsgDatabase.GetGroupMinSeq(ctx, groupID)
|
||||
if err != nil && errs.Unwrap(err) != redis.Nil {
|
||||
return nil, err
|
||||
}
|
||||
m2[groupID] = &sdkws.MaxAndMinSeq{
|
||||
MaxSeq: maxSeq,
|
||||
MinSeq: minSeq,
|
||||
}
|
||||
}
|
||||
}
|
||||
resp.GroupMaxAndMinSeq = m2
|
||||
return resp, nil
|
||||
}
|
||||
|
||||
func (m *msgServer) PullMessageBySeqs(ctx context.Context, req *sdkws.PullMessageBySeqsReq) (*sdkws.PullMessageBySeqsResp, error) {
|
||||
resp := &sdkws.PullMessageBySeqsResp{GroupMsgDataList: make(map[string]*sdkws.MsgDataList)}
|
||||
msgs, err := m.MsgDatabase.GetMsgBySeqs(ctx, req.UserID, req.Seqs)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
resp.List = msgs
|
||||
for groupID, list := range req.GroupSeqs {
|
||||
msgs, err := m.MsgDatabase.GetSuperGroupMsgBySeqs(ctx, groupID, list.Seqs)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
resp.GroupMsgDataList[groupID] = &sdkws.MsgDataList{
|
||||
MsgDataList: msgs,
|
||||
}
|
||||
}
|
||||
return resp, nil
|
||||
}
|
||||
|
@ -8,8 +8,8 @@ import (
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/db/controller"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/db/relation"
|
||||
tablerelation "github.com/OpenIMSDK/Open-IM-Server/pkg/common/db/table/relation"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/mcontext"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/tokenverify"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/tracelog"
|
||||
registry "github.com/OpenIMSDK/Open-IM-Server/pkg/discoveryregistry"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/errs"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/proto/sdkws"
|
||||
@ -92,10 +92,10 @@ func (s *userServer) UpdateUserInfo(ctx context.Context, req *pbuser.UpdateUserI
|
||||
}
|
||||
go func() {
|
||||
for _, v := range friends {
|
||||
s.notification.FriendInfoUpdatedNotification(ctx, req.UserInfo.UserID, v, tracelog.GetOpUserID(ctx))
|
||||
s.notification.FriendInfoUpdatedNotification(ctx, req.UserInfo.UserID, v, mcontext.GetOpUserID(ctx))
|
||||
}
|
||||
}()
|
||||
s.notification.UserInfoUpdatedNotification(ctx, tracelog.GetOpUserID(ctx), req.UserInfo.UserID)
|
||||
s.notification.UserInfoUpdatedNotification(ctx, mcontext.GetOpUserID(ctx), req.UserInfo.UserID)
|
||||
return resp, nil
|
||||
}
|
||||
|
||||
|
@ -11,7 +11,7 @@ import (
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/db/relation"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/db/unrelation"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/log"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/tracelog"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/mcontext"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/utils"
|
||||
"github.com/go-redis/redis/v8"
|
||||
"math"
|
||||
@ -59,7 +59,7 @@ func (c *MsgTool) getCronTaskOperationID() string {
|
||||
|
||||
func (c *MsgTool) AllUserClearMsgAndFixSeq() {
|
||||
operationID := "AllUserAndGroupClearMsgAndFixSeq"
|
||||
ctx := tracelog.NewCtx(utils.GetSelfFuncName())
|
||||
ctx := mcontext.NewCtx(utils.GetSelfFuncName())
|
||||
log.NewInfo(operationID, "============================ start del cron task ============================")
|
||||
var err error
|
||||
userIDList, err := c.userDatabase.GetAllUserID(ctx)
|
||||
@ -81,7 +81,7 @@ func (c *MsgTool) AllUserClearMsgAndFixSeq() {
|
||||
func (c *MsgTool) ClearUsersMsg(ctx context.Context, userIDs []string) {
|
||||
for _, userID := range userIDs {
|
||||
if err := c.msgDatabase.DeleteUserMsgsAndSetMinSeq(ctx, userID, int64(config.Config.Mongo.DBRetainChatRecords*24*60*60)); err != nil {
|
||||
log.NewError(tracelog.GetOperationID(ctx), utils.GetSelfFuncName(), err.Error(), userID)
|
||||
log.NewError(mcontext.GetOperationID(ctx), utils.GetSelfFuncName(), err.Error(), userID)
|
||||
}
|
||||
maxSeqCache, maxSeqMongo, err := c.GetAndFixUserSeqs(ctx, userID)
|
||||
if err != nil {
|
||||
@ -95,14 +95,14 @@ func (c *MsgTool) ClearSuperGroupMsg(ctx context.Context, superGroupIDs []string
|
||||
for _, groupID := range superGroupIDs {
|
||||
userIDs, err := c.groupDatabase.FindGroupMemberUserID(ctx, groupID)
|
||||
if err != nil {
|
||||
log.NewError(tracelog.GetOperationID(ctx), utils.GetSelfFuncName(), "FindGroupMemberUserID", err.Error(), groupID)
|
||||
log.NewError(mcontext.GetOperationID(ctx), utils.GetSelfFuncName(), "FindGroupMemberUserID", err.Error(), groupID)
|
||||
continue
|
||||
}
|
||||
if err := c.msgDatabase.DeleteUserSuperGroupMsgsAndSetMinSeq(ctx, groupID, userIDs, int64(config.Config.Mongo.DBRetainChatRecords*24*60*60)); err != nil {
|
||||
log.NewError(tracelog.GetOperationID(ctx), utils.GetSelfFuncName(), err.Error(), "DeleteUserSuperGroupMsgsAndSetMinSeq failed", groupID, userIDs, config.Config.Mongo.DBRetainChatRecords)
|
||||
log.NewError(mcontext.GetOperationID(ctx), utils.GetSelfFuncName(), err.Error(), "DeleteUserSuperGroupMsgsAndSetMinSeq failed", groupID, userIDs, config.Config.Mongo.DBRetainChatRecords)
|
||||
}
|
||||
if err := c.fixGroupSeq(ctx, groupID, userIDs); err != nil {
|
||||
log.NewError(tracelog.GetOperationID(ctx), utils.GetSelfFuncName(), err.Error(), groupID, userIDs)
|
||||
log.NewError(mcontext.GetOperationID(ctx), utils.GetSelfFuncName(), err.Error(), groupID, userIDs)
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -129,7 +129,7 @@ func (c *MsgTool) fixGroupSeq(ctx context.Context, groupID string, userIDs []str
|
||||
}
|
||||
}
|
||||
if err := c.CheckMaxSeqWithMongo(ctx, groupID, maxSeqCache, maxSeqMongo, constant.WriteDiffusion); err != nil {
|
||||
log.NewWarn(tracelog.GetOperationID(ctx), "cache max seq and mongo max seq is diff > 10", groupID, maxSeqCache, maxSeqMongo, constant.WriteDiffusion)
|
||||
log.NewWarn(mcontext.GetOperationID(ctx), "cache max seq and mongo max seq is diff > 10", groupID, maxSeqCache, maxSeqMongo, constant.WriteDiffusion)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
@ -138,16 +138,16 @@ func (c *MsgTool) GetAndFixUserSeqs(ctx context.Context, userID string) (maxSeqC
|
||||
minSeqMongo, maxSeqMongo, minSeqCache, maxSeqCache, err := c.msgDatabase.GetUserMinMaxSeqInMongoAndCache(ctx, userID)
|
||||
if err != nil {
|
||||
if err != unrelation.ErrMsgNotFound {
|
||||
log.NewError(tracelog.GetOperationID(ctx), utils.GetSelfFuncName(), err.Error(), "GetUserMinMaxSeqInMongoAndCache failed", userID)
|
||||
log.NewError(mcontext.GetOperationID(ctx), utils.GetSelfFuncName(), err.Error(), "GetUserMinMaxSeqInMongoAndCache failed", userID)
|
||||
}
|
||||
return 0, 0, err
|
||||
}
|
||||
log.NewDebug(tracelog.GetOperationID(ctx), userID, minSeqMongo, maxSeqMongo, minSeqCache, maxSeqCache)
|
||||
log.NewDebug(mcontext.GetOperationID(ctx), userID, minSeqMongo, maxSeqMongo, minSeqCache, maxSeqCache)
|
||||
if minSeqCache > maxSeqCache {
|
||||
if err := c.msgDatabase.SetUserMinSeq(ctx, userID, maxSeqCache); err != nil {
|
||||
log.NewError(tracelog.GetOperationID(ctx), "SetUserMinSeq failed", userID, minSeqCache, maxSeqCache)
|
||||
log.NewError(mcontext.GetOperationID(ctx), "SetUserMinSeq failed", userID, minSeqCache, maxSeqCache)
|
||||
} else {
|
||||
log.NewWarn(tracelog.GetOperationID(ctx), "SetUserMinSeq success", userID, minSeqCache, maxSeqCache)
|
||||
log.NewWarn(mcontext.GetOperationID(ctx), "SetUserMinSeq success", userID, minSeqCache, maxSeqCache)
|
||||
}
|
||||
}
|
||||
return maxSeqCache, maxSeqMongo, nil
|
||||
@ -156,14 +156,14 @@ func (c *MsgTool) GetAndFixUserSeqs(ctx context.Context, userID string) (maxSeqC
|
||||
func (c *MsgTool) GetAndFixGroupUserSeq(ctx context.Context, userID string, groupID string, maxSeqCache int64) (minSeqCache int64, err error) {
|
||||
minSeqCache, err = c.msgDatabase.GetGroupUserMinSeq(ctx, groupID, userID)
|
||||
if err != nil {
|
||||
log.NewError(tracelog.GetOperationID(ctx), "GetGroupUserMinSeq failed", groupID, userID)
|
||||
log.NewError(mcontext.GetOperationID(ctx), "GetGroupUserMinSeq failed", groupID, userID)
|
||||
return 0, err
|
||||
}
|
||||
if minSeqCache > maxSeqCache {
|
||||
if err := c.msgDatabase.SetGroupUserMinSeq(ctx, groupID, userID, maxSeqCache); err != nil {
|
||||
log.NewError(tracelog.GetOperationID(ctx), "SetGroupUserMinSeq failed", userID, minSeqCache, maxSeqCache)
|
||||
log.NewError(mcontext.GetOperationID(ctx), "SetGroupUserMinSeq failed", userID, minSeqCache, maxSeqCache)
|
||||
} else {
|
||||
log.NewWarn(tracelog.GetOperationID(ctx), "SetGroupUserMinSeq success", userID, minSeqCache, maxSeqCache)
|
||||
log.NewWarn(mcontext.GetOperationID(ctx), "SetGroupUserMinSeq success", userID, minSeqCache, maxSeqCache)
|
||||
}
|
||||
}
|
||||
return minSeqCache, nil
|
||||
|
@ -4,7 +4,7 @@ import (
|
||||
"context"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/constant"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/db/cache"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/tracelog"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/mcontext"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/proto/sdkws"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/utils"
|
||||
"github.com/golang/protobuf/proto"
|
||||
@ -66,7 +66,7 @@ func TestDeleteMongoMsgAndResetRedisSeq(t *testing.T) {
|
||||
mongoClient := mgo.GetDatabase().Collection(unRelationTb.MsgDocModel{}.TableName())
|
||||
|
||||
ctx := context.Background()
|
||||
tracelog.SetOperationID(ctx, operationID)
|
||||
ctx = mcontext.SetOperationID(ctx, operationID)
|
||||
testUID1 := "test_del_id1"
|
||||
_, err = mongoClient.DeleteOne(ctx, bson.M{"uid": testUID1 + ":" + strconv.Itoa(0)})
|
||||
if err != nil {
|
||||
|
@ -42,7 +42,11 @@ func apiSuccess(data any) *apiResponse {
|
||||
func apiError(err error) *apiResponse {
|
||||
unwrap := errs.Unwrap(err)
|
||||
if codeErr, ok := unwrap.(errs.CodeError); ok {
|
||||
return &apiResponse{ErrCode: codeErr.Code(), ErrMsg: codeErr.Msg(), ErrDlt: codeErr.Detail()}
|
||||
resp := apiResponse{ErrCode: codeErr.Code(), ErrMsg: codeErr.Msg(), ErrDlt: codeErr.Detail()}
|
||||
if resp.ErrDlt == "" {
|
||||
resp.ErrDlt = err.Error()
|
||||
}
|
||||
return &resp
|
||||
}
|
||||
return &apiResponse{ErrCode: errs.ServerInternalError, ErrMsg: err.Error()}
|
||||
}
|
||||
|
@ -34,7 +34,7 @@ type AccountCheckResp struct {
|
||||
|
||||
type ManagementSendMsg struct {
|
||||
SendID string `json:"sendID" binding:"required"`
|
||||
GroupID string `json:"groupID" `
|
||||
GroupID string `json:"groupID" binding:"required_if=SessionType 2|required_if=SessionType 3"`
|
||||
SenderNickname string `json:"senderNickname" `
|
||||
SenderFaceURL string `json:"senderFaceURL" `
|
||||
SenderPlatformID int32 `json:"senderPlatformID"`
|
||||
@ -47,8 +47,18 @@ type ManagementSendMsg struct {
|
||||
}
|
||||
|
||||
type ManagementSendMsgReq struct {
|
||||
ManagementSendMsg
|
||||
RecvID string `json:"recvID" `
|
||||
SendID string `json:"sendID" binding:"required"`
|
||||
RecvID string `json:"recvID" binding:"required_if" message:"recvID is required if sessionType is SingleChatType or NotificationChatType"`
|
||||
GroupID string `json:"groupID" binding:"required_if" message:"groupID is required if sessionType is GroupChatType or SuperGroupChatType"`
|
||||
SenderNickname string `json:"senderNickname" `
|
||||
SenderFaceURL string `json:"senderFaceURL" `
|
||||
SenderPlatformID int32 `json:"senderPlatformID"`
|
||||
Content map[string]interface{} `json:"content" binding:"required" swaggerignore:"true"`
|
||||
ContentType int32 `json:"contentType" binding:"required"`
|
||||
SessionType int32 `json:"sessionType" binding:"required"`
|
||||
IsOnlineOnly bool `json:"isOnlineOnly"`
|
||||
NotOfflinePush bool `json:"notOfflinePush"`
|
||||
OfflinePushInfo *sdkws.OfflinePushInfo `json:"offlinePushInfo"`
|
||||
}
|
||||
|
||||
type ManagementSendMsgResp struct {
|
||||
|
@ -273,8 +273,12 @@ const (
|
||||
const OperationID = "operationID"
|
||||
const OpUserID = "opUserID"
|
||||
const ConnID = "connID"
|
||||
const OpUserIDPlatformID = "platformID"
|
||||
const OpUserPlatform = "platform"
|
||||
const Token = "token"
|
||||
const RpcCustomHeader = "customHeader" // rpc中间件自定义ctx参数
|
||||
const CheckKey = "CheckKey"
|
||||
const TriggerID = "triggerID"
|
||||
const RemoteAddr = "remoteAddr"
|
||||
|
||||
const (
|
||||
UnreliableNotification = 1
|
||||
@ -325,3 +329,5 @@ const (
|
||||
FlagPrometheusPort = "prometheus_port"
|
||||
FlagConf = "config_folder_path"
|
||||
)
|
||||
|
||||
const OpenIMCommonConfigKey = "OpenIMServerConfig"
|
||||
|
1
pkg/common/db/cache/redis.go
vendored
1
pkg/common/db/cache/redis.go
vendored
@ -10,7 +10,6 @@ import (
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/config"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/constant"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/log"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/tracelog"
|
||||
pbMsg "github.com/OpenIMSDK/Open-IM-Server/pkg/proto/msg"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/proto/sdkws"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/utils"
|
||||
|
@ -8,7 +8,7 @@ import (
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/db/cache"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/db/table/relation"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/db/tx"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/tracelog"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/mcontext"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/errs"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/utils"
|
||||
"gorm.io/gorm"
|
||||
@ -106,7 +106,7 @@ func (f *friendDatabase) BecomeFriends(ctx context.Context, ownerUserID string,
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
opUserID := tracelog.GetOperationID(ctx)
|
||||
opUserID := mcontext.GetOperationID(ctx)
|
||||
for _, v := range friendUserIDs {
|
||||
fs1 = append(fs1, &relation.FriendModel{OwnerUserID: ownerUserID, FriendUserID: v, AddSource: addSource, OperatorUserID: opUserID})
|
||||
}
|
||||
|
@ -9,8 +9,8 @@ import (
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/db/unrelation"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/kafka"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/log"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/mcontext"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/prome"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/tracelog"
|
||||
"github.com/gogo/protobuf/sortkeys"
|
||||
"sync"
|
||||
"time"
|
||||
@ -77,7 +77,7 @@ type MsgDatabase interface {
|
||||
|
||||
MsgToMQ(ctx context.Context, key string, msg2mq *pbMsg.MsgDataToMQ) error
|
||||
MsgToModifyMQ(ctx context.Context, aggregationID string, triggerID string, messages []*pbMsg.MsgDataToMQ) error
|
||||
MsgToPushMQ(ctx context.Context, sourceID string, msg2mq *pbMsg.MsgDataToMQ) error
|
||||
MsgToPushMQ(ctx context.Context, sourceID string, msg2mq *pbMsg.MsgDataToMQ) (int32, int64, error)
|
||||
MsgToMongoMQ(ctx context.Context, aggregationID string, triggerID string, messages []*pbMsg.MsgDataToMQ, lastSeq int64) error
|
||||
}
|
||||
|
||||
@ -193,10 +193,9 @@ func (db *msgDatabase) MsgToModifyMQ(ctx context.Context, aggregationID string,
|
||||
return nil
|
||||
}
|
||||
|
||||
func (db *msgDatabase) MsgToPushMQ(ctx context.Context, key string, msg2mq *pbMsg.MsgDataToMQ) error {
|
||||
func (db *msgDatabase) MsgToPushMQ(ctx context.Context, key string, msg2mq *pbMsg.MsgDataToMQ) (int32, int64, error) {
|
||||
mqPushMsg := pbMsg.PushMsgDataToMQ{MsgData: msg2mq.MsgData, SourceID: key}
|
||||
_, _, err := db.producerToPush.SendMessage(ctx, key, &mqPushMsg)
|
||||
return err
|
||||
return db.producerToPush.SendMessage(ctx, key, &mqPushMsg)
|
||||
}
|
||||
|
||||
func (db *msgDatabase) MsgToMongoMQ(ctx context.Context, aggregationID string, triggerID string, messages []*pbMsg.MsgDataToMQ, lastSeq int64) error {
|
||||
@ -498,7 +497,7 @@ func (db *msgDatabase) GetMsgBySeqs(ctx context.Context, userID string, seqs []i
|
||||
if err != nil {
|
||||
if err != redis.Nil {
|
||||
prome.Add(prome.MsgPullFromRedisFailedCounter, len(failedSeqs))
|
||||
log.Error(tracelog.GetOperationID(ctx), "get message from redis exception", err.Error(), failedSeqs)
|
||||
log.Error(mcontext.GetOperationID(ctx), "get message from redis exception", err.Error(), failedSeqs)
|
||||
}
|
||||
}
|
||||
prome.Add(prome.MsgPullFromRedisSuccessCounter, len(successMsgs))
|
||||
@ -519,7 +518,7 @@ func (db *msgDatabase) GetSuperGroupMsgBySeqs(ctx context.Context, groupID strin
|
||||
if err != nil {
|
||||
if err != redis.Nil {
|
||||
prome.Add(prome.MsgPullFromRedisFailedCounter, len(failedSeqs))
|
||||
log.Error(tracelog.GetOperationID(ctx), "get message from redis exception", err.Error(), failedSeqs)
|
||||
log.Error(mcontext.GetOperationID(ctx), "get message from redis exception", err.Error(), failedSeqs)
|
||||
}
|
||||
}
|
||||
prome.Add(prome.MsgPullFromRedisSuccessCounter, len(successMsgs))
|
||||
@ -604,7 +603,7 @@ func (db *msgDatabase) deleteMsgRecursion(ctx context.Context, sourceID string,
|
||||
if err != nil || msgs.DocID == "" {
|
||||
if err != nil {
|
||||
if err == unrelation.ErrMsgListNotExist {
|
||||
log.NewDebug(tracelog.GetOperationID(ctx), utils.GetSelfFuncName(), "ID:", sourceID, "index:", index, err.Error())
|
||||
log.NewDebug(mcontext.GetOperationID(ctx), utils.GetSelfFuncName(), "ID:", sourceID, "index:", index, err.Error())
|
||||
} else {
|
||||
//log.NewError(operationID, utils.GetSelfFuncName(), "GetUserMsgListByIndex failed", err.Error(), index, ID)
|
||||
}
|
||||
@ -618,7 +617,7 @@ func (db *msgDatabase) deleteMsgRecursion(ctx context.Context, sourceID string,
|
||||
}
|
||||
//log.NewDebug(operationID, "ID:", sourceID, "index:", index, "uid:", msgs.UID, "len:", len(msgs.Msg))
|
||||
if int64(len(msgs.Msg)) > db.msg.GetSingleGocMsgNum() {
|
||||
log.NewWarn(tracelog.GetOperationID(ctx), utils.GetSelfFuncName(), "msgs too large:", len(msgs.Msg), "docID:", msgs.DocID)
|
||||
log.NewWarn(mcontext.GetOperationID(ctx), utils.GetSelfFuncName(), "msgs too large:", len(msgs.Msg), "docID:", msgs.DocID)
|
||||
}
|
||||
if msgs.Msg[len(msgs.Msg)-1].SendTime+(remainTime*1000) < utils.GetCurrentTimestampByMill() && msgs.IsFull() {
|
||||
delStruct.delDocIDs = append(delStruct.delDocIDs, msgs.DocID)
|
||||
|
@ -1,23 +1,24 @@
|
||||
package relation
|
||||
package ormutil
|
||||
|
||||
import (
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/utils"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/errs"
|
||||
"gorm.io/gorm"
|
||||
)
|
||||
|
||||
func gormPage[E any](db *gorm.DB, pageNumber, showNumber int32) (uint32, []*E, error) {
|
||||
func GormPage[E any](db *gorm.DB, pageNumber, showNumber int32) (uint32, []*E, error) {
|
||||
var count int64
|
||||
if err := db.Count(&count).Error; err != nil {
|
||||
return 0, nil, utils.Wrap(err, "")
|
||||
var model E
|
||||
if err := db.Model(&model).Count(&count).Error; err != nil {
|
||||
return 0, nil, errs.Wrap(err)
|
||||
}
|
||||
var es []*E
|
||||
if err := db.Limit(int(showNumber)).Offset(int(pageNumber * showNumber)).Find(&es).Error; err != nil {
|
||||
return 0, nil, utils.Wrap(err, "")
|
||||
return 0, nil, errs.Wrap(err)
|
||||
}
|
||||
return uint32(count), es, nil
|
||||
}
|
||||
|
||||
func gormSearch[E any](db *gorm.DB, fields []string, value string, pageNumber, showNumber int32) (uint32, []*E, error) {
|
||||
func GormSearch[E any](db *gorm.DB, fields []string, value string, pageNumber, showNumber int32) (uint32, []*E, error) {
|
||||
if len(fields) > 0 && value != "" {
|
||||
value = "%" + value + "%"
|
||||
if len(fields) == 1 {
|
||||
@ -30,23 +31,23 @@ func gormSearch[E any](db *gorm.DB, fields []string, value string, pageNumber, s
|
||||
db = db.Where(t)
|
||||
}
|
||||
}
|
||||
return gormPage[E](db, pageNumber, showNumber)
|
||||
return GormPage[E](db, pageNumber, showNumber)
|
||||
}
|
||||
|
||||
func gormIn[E any](db **gorm.DB, field string, es []E) {
|
||||
func GormIn[E any](db **gorm.DB, field string, es []E) {
|
||||
if len(es) == 0 {
|
||||
return
|
||||
}
|
||||
*db = (*db).Where(field+" in (?)", es)
|
||||
}
|
||||
|
||||
func mapCount(db *gorm.DB, field string) (map[string]uint32, error) {
|
||||
func MapCount(db *gorm.DB, field string) (map[string]uint32, error) {
|
||||
var items []struct {
|
||||
ID string `gorm:"column:id"`
|
||||
Count uint32 `gorm:"column:count"`
|
||||
}
|
||||
if err := db.Select(field + " as id, count(1) as count").Group(field).Find(&items).Error; err != nil {
|
||||
return nil, err
|
||||
return nil, errs.Wrap(err)
|
||||
}
|
||||
m := make(map[string]uint32)
|
||||
for _, item := range items {
|
@ -2,6 +2,7 @@ package relation
|
||||
|
||||
import (
|
||||
"context"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/db/ormutil"
|
||||
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/db/table/relation"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/utils"
|
||||
@ -50,7 +51,7 @@ func (b *BlackGorm) FindOwnerBlacks(ctx context.Context, ownerUserID string, pag
|
||||
if err != nil {
|
||||
return nil, 0, utils.Wrap(err, "")
|
||||
}
|
||||
totalUint32, blacks, err := gormPage[relation.BlackModel](b.db(ctx), pageNumber, showNumber)
|
||||
totalUint32, blacks, err := ormutil.GormPage[relation.BlackModel](b.db(ctx), pageNumber, showNumber)
|
||||
total = int64(totalUint32)
|
||||
return
|
||||
}
|
||||
|
@ -4,6 +4,7 @@ import (
|
||||
"context"
|
||||
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/constant"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/db/ormutil"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/db/table/relation"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/utils"
|
||||
"gorm.io/gorm"
|
||||
@ -72,14 +73,14 @@ func (g *GroupMemberGorm) TakeOwner(ctx context.Context, groupID string) (groupM
|
||||
|
||||
func (g *GroupMemberGorm) SearchMember(ctx context.Context, keyword string, groupIDs []string, userIDs []string, roleLevels []int32, pageNumber, showNumber int32) (total uint32, groupList []*relation.GroupMemberModel, err error) {
|
||||
db := g.DB
|
||||
gormIn(&db, "group_id", groupIDs)
|
||||
gormIn(&db, "user_id", userIDs)
|
||||
gormIn(&db, "role_level", roleLevels)
|
||||
return gormSearch[relation.GroupMemberModel](db, []string{"nickname"}, keyword, pageNumber, showNumber)
|
||||
ormutil.GormIn(&db, "group_id", groupIDs)
|
||||
ormutil.GormIn(&db, "user_id", userIDs)
|
||||
ormutil.GormIn(&db, "role_level", roleLevels)
|
||||
return ormutil.GormSearch[relation.GroupMemberModel](db, []string{"nickname"}, keyword, pageNumber, showNumber)
|
||||
}
|
||||
|
||||
func (g *GroupMemberGorm) MapGroupMemberNum(ctx context.Context, groupIDs []string) (count map[string]uint32, err error) {
|
||||
return mapCount(g.DB.Where("group_id in (?)", groupIDs), "group_id")
|
||||
return ormutil.MapCount(g.DB.Where("group_id in (?)", groupIDs), "group_id")
|
||||
}
|
||||
|
||||
func (g *GroupMemberGorm) FindJoinUserID(ctx context.Context, groupIDs []string) (groupUsers map[string][]string, err error) {
|
||||
|
@ -2,6 +2,7 @@ package relation
|
||||
|
||||
import (
|
||||
"context"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/db/ormutil"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/db/table/relation"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/utils"
|
||||
"gorm.io/gorm"
|
||||
@ -43,7 +44,7 @@ func (g *GroupGorm) Take(ctx context.Context, groupID string) (group *relation.G
|
||||
}
|
||||
|
||||
func (g *GroupGorm) Search(ctx context.Context, keyword string, pageNumber, showNumber int32) (total uint32, groups []*relation.GroupModel, err error) {
|
||||
return gormSearch[relation.GroupModel](g.DB, []string{"name"}, keyword, pageNumber, showNumber)
|
||||
return ormutil.GormSearch[relation.GroupModel](g.DB, []string{"name"}, keyword, pageNumber, showNumber)
|
||||
}
|
||||
|
||||
func (g *GroupGorm) GetGroupIDsByGroupType(ctx context.Context, groupType int) (groupIDs []string, err error) {
|
||||
|
@ -2,6 +2,7 @@ package relation
|
||||
|
||||
import (
|
||||
"context"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/db/ormutil"
|
||||
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/db/table/relation"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/utils"
|
||||
@ -39,5 +40,5 @@ func (g *GroupRequestGorm) Take(ctx context.Context, groupID string, userID stri
|
||||
}
|
||||
|
||||
func (g *GroupRequestGorm) Page(ctx context.Context, userID string, pageNumber, showNumber int32) (total uint32, groups []*relation.GroupRequestModel, err error) {
|
||||
return gormSearch[relation.GroupRequestModel](g.DB.Where("user_id = ?", userID), nil, "", pageNumber, showNumber)
|
||||
return ormutil.GormSearch[relation.GroupRequestModel](g.DB.Where("user_id = ?", userID), nil, "", pageNumber, showNumber)
|
||||
}
|
||||
|
@ -8,8 +8,6 @@ package kafka
|
||||
|
||||
import (
|
||||
"context"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/constant"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/tracelog"
|
||||
"github.com/Shopify/sarama"
|
||||
)
|
||||
|
||||
@ -42,16 +40,9 @@ func NewMConsumerGroup(consumerConfig *MConsumerGroupConfig, topics, addrs []str
|
||||
}
|
||||
}
|
||||
|
||||
func (mc *MConsumerGroup) GetContextFromMsg(cMsg *sarama.ConsumerMessage, rootFuncName string) context.Context {
|
||||
ctx := tracelog.NewCtx(rootFuncName)
|
||||
var operationID string
|
||||
for _, v := range cMsg.Headers {
|
||||
if string(v.Key) == constant.OperationID {
|
||||
operationID = string(v.Value)
|
||||
}
|
||||
}
|
||||
tracelog.SetOperationID(ctx, operationID)
|
||||
return ctx
|
||||
func (mc *MConsumerGroup) GetContextFromMsg(cMsg *sarama.ConsumerMessage) context.Context {
|
||||
return GetContextWithMQHeader(cMsg.Headers)
|
||||
|
||||
}
|
||||
|
||||
func (mc *MConsumerGroup) RegisterHandleAndConsumer(handler sarama.ConsumerGroupHandler) {
|
||||
|
@ -6,7 +6,7 @@ import (
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/config"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/constant"
|
||||
log "github.com/OpenIMSDK/Open-IM-Server/pkg/common/log"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/tracelog"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/mcontext"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/utils"
|
||||
|
||||
"github.com/Shopify/sarama"
|
||||
@ -15,6 +15,8 @@ import (
|
||||
prome "github.com/OpenIMSDK/Open-IM-Server/pkg/common/prome"
|
||||
)
|
||||
|
||||
var emptyMsg = errors.New("binary msg is empty")
|
||||
|
||||
type Producer struct {
|
||||
topic string
|
||||
addr []string
|
||||
@ -45,32 +47,48 @@ func NewKafkaProducer(addr []string, topic string) *Producer {
|
||||
p.producer = producer
|
||||
return &p
|
||||
}
|
||||
|
||||
func GetMQHeaderWithContext(ctx context.Context) ([]sarama.RecordHeader, error) {
|
||||
operationID, opUserID, platform, connID, err := mcontext.GetMustCtxInfo(ctx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return []sarama.RecordHeader{
|
||||
{Key: []byte(constant.OperationID), Value: []byte(operationID)},
|
||||
{Key: []byte(constant.OpUserID), Value: []byte(opUserID)},
|
||||
{Key: []byte(constant.OpUserPlatform), Value: []byte(platform)},
|
||||
{Key: []byte(constant.ConnID), Value: []byte(connID)}}, err
|
||||
}
|
||||
func GetContextWithMQHeader(header []*sarama.RecordHeader) context.Context {
|
||||
var values []string
|
||||
for _, recordHeader := range header {
|
||||
values = append(values, string(recordHeader.Value))
|
||||
}
|
||||
return mcontext.WithMustInfoCtx(values)
|
||||
}
|
||||
func (p *Producer) SendMessage(ctx context.Context, key string, m proto.Message) (int32, int64, error) {
|
||||
operationID := tracelog.GetOperationID(ctx)
|
||||
log.Info(operationID, "SendMessage", "key ", key, m.String(), p.producer)
|
||||
log.ZDebug(ctx, "SendMessage", "key ", key, "msg", m.String())
|
||||
kMsg := &sarama.ProducerMessage{}
|
||||
kMsg.Topic = p.topic
|
||||
kMsg.Key = sarama.StringEncoder(key)
|
||||
bMsg, err := proto.Marshal(m)
|
||||
if err != nil {
|
||||
log.Error(operationID, "", "proto marshal err = %s", err.Error())
|
||||
return -1, -1, err
|
||||
return 0, 0, utils.Wrap(err, "kafka proto Marshal err")
|
||||
}
|
||||
if len(bMsg) == 0 {
|
||||
log.Error(operationID, "len(bMsg) == 0 ")
|
||||
return 0, 0, errors.New("len(bMsg) == 0 ")
|
||||
return 0, 0, utils.Wrap(emptyMsg, "")
|
||||
}
|
||||
kMsg.Value = sarama.ByteEncoder(bMsg)
|
||||
log.Info(operationID, "ByteEncoder SendMessage begin", "key ", kMsg, p.producer, "len: ", kMsg.Key.Length(), kMsg.Value.Length())
|
||||
if kMsg.Key.Length() == 0 || kMsg.Value.Length() == 0 {
|
||||
log.Error(operationID, "kMsg.Key.Length() == 0 || kMsg.Value.Length() == 0 ", kMsg)
|
||||
return -1, -1, errors.New("key or value == 0")
|
||||
return 0, 0, utils.Wrap(emptyMsg, "")
|
||||
}
|
||||
kMsg.Metadata = ctx
|
||||
kMsg.Headers = []sarama.RecordHeader{{Key: []byte(constant.OperationID), Value: []byte(operationID)}}
|
||||
header, err := GetMQHeaderWithContext(ctx)
|
||||
if err != nil {
|
||||
return 0, 0, utils.Wrap(err, "")
|
||||
}
|
||||
kMsg.Headers = header
|
||||
partition, offset, err := p.producer.SendMessage(kMsg)
|
||||
log.Info(operationID, "ByteEncoder SendMessage end", "key ", kMsg.Key.Length(), kMsg.Value.Length(), p.producer)
|
||||
log.ZDebug(ctx, "ByteEncoder SendMessage end", "key ", kMsg.Key, "key length", kMsg.Value.Length())
|
||||
if err == nil {
|
||||
prome.Inc(prome.SendMsgCounter)
|
||||
}
|
||||
|
@ -4,7 +4,7 @@ import (
|
||||
"bufio"
|
||||
"context"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/config"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/tracelog"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/mcontext"
|
||||
|
||||
//"bufio"
|
||||
"fmt"
|
||||
@ -99,7 +99,7 @@ func loggerInit(moduleName string) *LogrusLogger {
|
||||
}
|
||||
|
||||
func InfoKv(ctx context.Context, msg string, keysAndValues ...interface{}) {
|
||||
operationID := tracelog.GetOperationID(ctx)
|
||||
operationID := mcontext.GetOperationID(ctx)
|
||||
logger.WithFields(logrus.Fields{
|
||||
"OperationID": operationID,
|
||||
"PID": logger.Pid,
|
||||
|
@ -4,7 +4,7 @@ import (
|
||||
"context"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/config"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/constant"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/tracelog"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/mcontext"
|
||||
rotatelogs "github.com/lestrrat-go/file-rotatelogs"
|
||||
"os"
|
||||
"path/filepath"
|
||||
@ -139,22 +139,34 @@ func (l *ZapLogger) Error(ctx context.Context, msg string, err error, keysAndVal
|
||||
if err != nil {
|
||||
keysAndValues = append(keysAndValues, "error", err.Error())
|
||||
}
|
||||
keysAndValues = append([]interface{}{constant.OperationID, tracelog.GetOperationID(ctx)}, keysAndValues...)
|
||||
keysAndValues = append([]interface{}{constant.OperationID, mcontext.GetOperationID(ctx)}, keysAndValues...)
|
||||
l.zap.Errorw(msg, keysAndValues...)
|
||||
}
|
||||
|
||||
func (l *ZapLogger) kvAppend(ctx context.Context, keysAndValues []interface{}) []interface{} {
|
||||
operationID := tracelog.GetOperationID(ctx)
|
||||
opUserID := tracelog.GetOpUserID(ctx)
|
||||
connID := tracelog.GetConnID(ctx)
|
||||
operationID := mcontext.GetOperationID(ctx)
|
||||
opUserID := mcontext.GetOpUserID(ctx)
|
||||
connID := mcontext.GetConnID(ctx)
|
||||
triggerID := mcontext.GetTriggerID(ctx)
|
||||
opUserPlatform := mcontext.GetOpUserPlatform(ctx)
|
||||
remoteAddr := mcontext.GetRemoteAddr(ctx)
|
||||
if opUserID != "" {
|
||||
keysAndValues = append([]interface{}{constant.OpUserID, tracelog.GetOpUserID(ctx)}, keysAndValues...)
|
||||
keysAndValues = append([]interface{}{constant.OpUserID, opUserID}, keysAndValues...)
|
||||
}
|
||||
if operationID != "" {
|
||||
keysAndValues = append([]interface{}{constant.OperationID, tracelog.GetOperationID(ctx)}, keysAndValues...)
|
||||
keysAndValues = append([]interface{}{constant.OperationID, operationID}, keysAndValues...)
|
||||
}
|
||||
if connID != "" {
|
||||
keysAndValues = append([]interface{}{constant.ConnID, tracelog.GetConnID(ctx)}, keysAndValues...)
|
||||
keysAndValues = append([]interface{}{constant.ConnID, connID}, keysAndValues...)
|
||||
}
|
||||
if triggerID != "" {
|
||||
keysAndValues = append([]interface{}{constant.TriggerID, triggerID}, keysAndValues...)
|
||||
}
|
||||
if opUserPlatform != "" {
|
||||
keysAndValues = append([]interface{}{constant.OpUserPlatform, opUserPlatform}, keysAndValues...)
|
||||
}
|
||||
if remoteAddr != "" {
|
||||
keysAndValues = append([]interface{}{constant.RemoteAddr, remoteAddr}, keysAndValues...)
|
||||
}
|
||||
return keysAndValues
|
||||
}
|
||||
|
122
pkg/common/mcontext/ctx.go
Normal file
122
pkg/common/mcontext/ctx.go
Normal file
@ -0,0 +1,122 @@
|
||||
package mcontext
|
||||
|
||||
import (
|
||||
"context"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/constant"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/errs"
|
||||
)
|
||||
|
||||
var mapper = []string{constant.OperationID, constant.OpUserID, constant.OpUserPlatform, constant.ConnID}
|
||||
|
||||
func WithOpUserIDContext(ctx context.Context, opUserID string) context.Context {
|
||||
return context.WithValue(ctx, constant.OpUserID, opUserID)
|
||||
}
|
||||
func WithOpUserPlatformContext(ctx context.Context, platform string) context.Context {
|
||||
return context.WithValue(ctx, constant.OpUserPlatform, platform)
|
||||
}
|
||||
func WithTriggerIDContext(ctx context.Context, triggerID string) context.Context {
|
||||
return context.WithValue(ctx, constant.TriggerID, triggerID)
|
||||
}
|
||||
func NewCtx(operationID string) context.Context {
|
||||
c := context.Background()
|
||||
ctx := context.WithValue(c, constant.OperationID, operationID)
|
||||
return SetOperationID(ctx, operationID)
|
||||
}
|
||||
|
||||
func SetOperationID(ctx context.Context, operationID string) context.Context {
|
||||
return context.WithValue(ctx, constant.OperationID, operationID)
|
||||
}
|
||||
|
||||
func SetOpUserID(ctx context.Context, opUserID string) context.Context {
|
||||
return context.WithValue(ctx, constant.OpUserID, opUserID)
|
||||
}
|
||||
|
||||
func SetConnID(ctx context.Context, connID string) context.Context {
|
||||
return context.WithValue(ctx, constant.ConnID, connID)
|
||||
}
|
||||
|
||||
func GetOperationID(ctx context.Context) string {
|
||||
if ctx.Value(constant.OperationID) != nil {
|
||||
s, ok := ctx.Value(constant.OperationID).(string)
|
||||
if ok {
|
||||
return s
|
||||
}
|
||||
}
|
||||
return ""
|
||||
}
|
||||
func GetOpUserID(ctx context.Context) string {
|
||||
if ctx.Value(constant.OpUserID) != "" {
|
||||
s, ok := ctx.Value(constant.OpUserID).(string)
|
||||
if ok {
|
||||
return s
|
||||
}
|
||||
}
|
||||
return ""
|
||||
}
|
||||
func GetConnID(ctx context.Context) string {
|
||||
if ctx.Value(constant.ConnID) != "" {
|
||||
s, ok := ctx.Value(constant.ConnID).(string)
|
||||
if ok {
|
||||
return s
|
||||
}
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func GetTriggerID(ctx context.Context) string {
|
||||
if ctx.Value(constant.TriggerID) != "" {
|
||||
s, ok := ctx.Value(constant.TriggerID).(string)
|
||||
if ok {
|
||||
return s
|
||||
}
|
||||
}
|
||||
return ""
|
||||
}
|
||||
func GetOpUserPlatform(ctx context.Context) string {
|
||||
if ctx.Value(constant.OpUserPlatform) != "" {
|
||||
s, ok := ctx.Value(constant.OpUserPlatform).(string)
|
||||
if ok {
|
||||
return s
|
||||
}
|
||||
}
|
||||
return ""
|
||||
}
|
||||
func GetRemoteAddr(ctx context.Context) string {
|
||||
if ctx.Value(constant.RemoteAddr) != "" {
|
||||
s, ok := ctx.Value(constant.RemoteAddr).(string)
|
||||
if ok {
|
||||
return s
|
||||
}
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func GetMustCtxInfo(ctx context.Context) (operationID, opUserID, platform, connID string, err error) {
|
||||
operationID, ok := ctx.Value(constant.OperationID).(string)
|
||||
if !ok {
|
||||
err = errs.ErrArgs.Wrap("ctx missing operationID")
|
||||
return
|
||||
}
|
||||
opUserID, ok1 := ctx.Value(constant.OpUserID).(string)
|
||||
if !ok1 {
|
||||
err = errs.ErrArgs.Wrap("ctx missing opUserID")
|
||||
return
|
||||
}
|
||||
platform, ok2 := ctx.Value(constant.OpUserPlatform).(string)
|
||||
if !ok2 {
|
||||
err = errs.ErrArgs.Wrap("ctx missing platform")
|
||||
return
|
||||
}
|
||||
connID, _ = ctx.Value(constant.ConnID).(string)
|
||||
return
|
||||
|
||||
}
|
||||
func WithMustInfoCtx(values []string) context.Context {
|
||||
ctx := context.Background()
|
||||
for i, v := range values {
|
||||
ctx = context.WithValue(ctx, mapper[i], v)
|
||||
|
||||
}
|
||||
return ctx
|
||||
|
||||
}
|
65
pkg/common/mw/check.go
Normal file
65
pkg/common/mw/check.go
Normal file
@ -0,0 +1,65 @@
|
||||
package mw
|
||||
|
||||
import (
|
||||
"crypto/aes"
|
||||
"crypto/cipher"
|
||||
"crypto/md5"
|
||||
"encoding/base64"
|
||||
"errors"
|
||||
"fmt"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/config"
|
||||
"math/rand"
|
||||
"strings"
|
||||
"sync"
|
||||
"time"
|
||||
)
|
||||
|
||||
var (
|
||||
once sync.Once
|
||||
block cipher.Block
|
||||
)
|
||||
|
||||
func init() {
|
||||
rand.Seed(time.Now().UnixNano())
|
||||
}
|
||||
|
||||
func initAesKey() {
|
||||
once.Do(func() {
|
||||
key := md5.Sum([]byte("openim:" + config.Config.Secret))
|
||||
var err error
|
||||
block, err = aes.NewCipher(key[:])
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func genReqKey(args []string) string {
|
||||
initAesKey()
|
||||
plaintext := md5.Sum([]byte(strings.Join(args, ":")))
|
||||
iv := make([]byte, aes.BlockSize, aes.BlockSize+md5.Size)
|
||||
if _, err := rand.Read(iv); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
ciphertext := make([]byte, md5.Size)
|
||||
cipher.NewCBCEncrypter(block, iv).CryptBlocks(ciphertext, plaintext[:])
|
||||
return base64.StdEncoding.EncodeToString(append(iv, ciphertext...))
|
||||
}
|
||||
|
||||
func verifyReqKey(args []string, key string) error {
|
||||
initAesKey()
|
||||
k, err := base64.StdEncoding.DecodeString(key)
|
||||
if err != nil {
|
||||
return fmt.Errorf("invalid key %v", err)
|
||||
}
|
||||
if len(k) != aes.BlockSize+md5.Size {
|
||||
return errors.New("invalid key")
|
||||
}
|
||||
plaintext := make([]byte, md5.Size)
|
||||
cipher.NewCBCDecrypter(block, k[:aes.BlockSize]).CryptBlocks(plaintext, k[aes.BlockSize:])
|
||||
sum := md5.Sum([]byte(strings.Join(args, ":")))
|
||||
if string(plaintext) != string(sum[:]) {
|
||||
return errors.New("mismatch key")
|
||||
}
|
||||
return nil
|
||||
}
|
28
pkg/common/mw/check_test.go
Normal file
28
pkg/common/mw/check_test.go
Normal file
@ -0,0 +1,28 @@
|
||||
package mw
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/config"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestCheck(t *testing.T) {
|
||||
config.Config.Secret = "123456"
|
||||
|
||||
args := []string{"1", "2", "3"}
|
||||
|
||||
key := genReqKey(args)
|
||||
fmt.Println("key:", key)
|
||||
err := verifyReqKey(args, key)
|
||||
|
||||
fmt.Println(err)
|
||||
|
||||
args = []string{"4", "5", "6"}
|
||||
|
||||
key = genReqKey(args)
|
||||
fmt.Println("key:", key)
|
||||
err = verifyReqKey(args, key)
|
||||
|
||||
fmt.Println(err)
|
||||
|
||||
}
|
@ -3,6 +3,7 @@ package mw
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/apiresp"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/config"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/constant"
|
||||
@ -90,8 +91,9 @@ func GinParseOperationID() gin.HandlerFunc {
|
||||
return
|
||||
}
|
||||
if req.OperationID == "" {
|
||||
log.ZWarn(c, "header must have operationID", errs.ErrArgs.Wrap(err.Error()))
|
||||
apiresp.GinError(c, errs.ErrArgs.Wrap("header must have operationID"+err.Error()))
|
||||
err := errors.New("header must have operationID")
|
||||
log.ZWarn(c, "header must have operationID", err)
|
||||
apiresp.GinError(c, errs.ErrArgs.Wrap(err.Error()))
|
||||
c.Abort()
|
||||
return
|
||||
}
|
||||
@ -153,7 +155,7 @@ func GinParseToken(rdb redis.UniversalClient) gin.HandlerFunc {
|
||||
return
|
||||
}
|
||||
}
|
||||
c.Set(constant.OpUserIDPlatformID, constant.PlatformNameToID(claims.Platform))
|
||||
c.Set(constant.OpUserPlatform, claims.Platform)
|
||||
c.Set(constant.OpUserID, claims.UID)
|
||||
c.Next()
|
||||
}
|
||||
|
@ -3,6 +3,7 @@ package mw
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/constant"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/log"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/errs"
|
||||
@ -22,17 +23,11 @@ func rpcClientInterceptor(ctx context.Context, method string, req, resp interfac
|
||||
return errs.ErrInternalServer.Wrap("call rpc request context is nil")
|
||||
}
|
||||
log.ZInfo(ctx, "rpc client req", "funcName", method, "req", rpcString(req))
|
||||
operationID, ok := ctx.Value(constant.OperationID).(string)
|
||||
if !ok {
|
||||
log.ZWarn(ctx, "ctx missing operationID", errors.New("ctx missing operationID"), "funcName", method)
|
||||
return errs.ErrArgs.Wrap("ctx missing operationID")
|
||||
ctx, err = getRpcContext(ctx, method)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
md := metadata.Pairs(constant.OperationID, operationID)
|
||||
opUserID, ok := ctx.Value(constant.OpUserID).(string)
|
||||
if ok {
|
||||
md.Append(constant.OpUserID, opUserID)
|
||||
}
|
||||
err = invoker(metadata.NewOutgoingContext(ctx, md), method, req, resp, cc, opts...)
|
||||
err = invoker(ctx, method, req, resp, cc, opts...)
|
||||
if err == nil {
|
||||
log.ZInfo(ctx, "rpc client resp", "funcName", method, "resp", rpcString(resp))
|
||||
return nil
|
||||
@ -55,3 +50,43 @@ func rpcClientInterceptor(ctx context.Context, method string, req, resp interfac
|
||||
}
|
||||
return errs.NewCodeError(int(sta.Code()), sta.Message()).Wrap()
|
||||
}
|
||||
|
||||
func getRpcContext(ctx context.Context, method string) (context.Context, error) {
|
||||
md := metadata.Pairs()
|
||||
if keys, _ := ctx.Value(constant.RpcCustomHeader).([]string); len(keys) > 0 {
|
||||
for _, key := range keys {
|
||||
val, ok := ctx.Value(key).([]string)
|
||||
if !ok {
|
||||
return nil, errs.ErrInternalServer.Wrap(fmt.Sprintf("ctx missing key %s", key))
|
||||
}
|
||||
if len(val) == 0 {
|
||||
return nil, errs.ErrInternalServer.Wrap(fmt.Sprintf("ctx key %s value is empty", key))
|
||||
}
|
||||
md.Set(key, val...)
|
||||
}
|
||||
md.Set(constant.RpcCustomHeader, keys...)
|
||||
}
|
||||
operationID, ok := ctx.Value(constant.OperationID).(string)
|
||||
if !ok {
|
||||
log.ZWarn(ctx, "ctx missing operationID", errors.New("ctx missing operationID"), "funcName", method)
|
||||
return nil, errs.ErrArgs.Wrap("ctx missing operationID")
|
||||
}
|
||||
md.Set(constant.OperationID, operationID)
|
||||
var checkArgs []string
|
||||
checkArgs = append(checkArgs, constant.OperationID, operationID)
|
||||
opUserID, ok := ctx.Value(constant.OpUserID).(string)
|
||||
if ok {
|
||||
md.Set(constant.OpUserID, opUserID)
|
||||
checkArgs = append(checkArgs, constant.OpUserID, opUserID)
|
||||
}
|
||||
opUserIDPlatformID, ok := ctx.Value(constant.OpUserPlatform).(string)
|
||||
if ok {
|
||||
md.Set(constant.OpUserPlatform, opUserIDPlatformID)
|
||||
}
|
||||
connID, ok := ctx.Value(constant.ConnID).(string)
|
||||
if ok {
|
||||
md.Set(constant.ConnID, connID)
|
||||
}
|
||||
md.Set(constant.CheckKey, genReqKey(checkArgs))
|
||||
return metadata.NewOutgoingContext(ctx, md), nil
|
||||
}
|
||||
|
@ -3,6 +3,7 @@ package mw
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/constant"
|
||||
"math"
|
||||
"runtime"
|
||||
"runtime/debug"
|
||||
@ -20,9 +21,6 @@ import (
|
||||
"google.golang.org/grpc/status"
|
||||
)
|
||||
|
||||
const OperationID = "operationID"
|
||||
const OpUserID = "opUserID"
|
||||
|
||||
func rpcString(v interface{}) string {
|
||||
if s, ok := v.(interface{ String() string }); ok {
|
||||
return s.String()
|
||||
@ -31,7 +29,6 @@ func rpcString(v interface{}) string {
|
||||
}
|
||||
|
||||
func rpcServerInterceptor(ctx context.Context, req interface{}, info *grpc.UnaryServerInfo, handler grpc.UnaryHandler) (resp interface{}, err error) {
|
||||
var operationID string
|
||||
defer func() {
|
||||
if r := recover(); r != nil {
|
||||
log.ZError(ctx, "rpc panic", nil, "FullMethod", info.FullMethod, "type:", fmt.Sprintf("%T", r), "panic:", r)
|
||||
@ -59,17 +56,39 @@ func rpcServerInterceptor(ctx context.Context, req interface{}, info *grpc.Unary
|
||||
if !ok {
|
||||
return nil, status.New(codes.InvalidArgument, "missing metadata").Err()
|
||||
}
|
||||
if opts := md.Get(OperationID); len(opts) != 1 || opts[0] == "" {
|
||||
if keys := md.Get(constant.RpcCustomHeader); len(keys) > 0 {
|
||||
for _, key := range keys {
|
||||
values := md.Get(key)
|
||||
if len(values) == 0 {
|
||||
return nil, status.New(codes.InvalidArgument, fmt.Sprintf("missing metadata key %s", key)).Err()
|
||||
}
|
||||
ctx = context.WithValue(ctx, key, values)
|
||||
}
|
||||
}
|
||||
args := make([]string, 0, 4)
|
||||
if opts := md.Get(constant.OperationID); len(opts) != 1 || opts[0] == "" {
|
||||
return nil, status.New(codes.InvalidArgument, "operationID error").Err()
|
||||
} else {
|
||||
operationID = opts[0]
|
||||
args = append(args, constant.OperationID, opts[0])
|
||||
ctx = context.WithValue(ctx, constant.OperationID, opts[0])
|
||||
}
|
||||
var opUserID string
|
||||
if opts := md.Get(OpUserID); len(opts) == 1 {
|
||||
opUserID = opts[0]
|
||||
if opts := md.Get(constant.OpUserID); len(opts) == 1 {
|
||||
args = append(args, constant.OpUserID, opts[0])
|
||||
ctx = context.WithValue(ctx, constant.OpUserID, opts[0])
|
||||
}
|
||||
if opts := md.Get(constant.OpUserPlatform); len(opts) == 1 {
|
||||
ctx = context.WithValue(ctx, constant.OpUserPlatform, opts[0])
|
||||
}
|
||||
if opts := md.Get(constant.ConnID); len(opts) == 1 {
|
||||
ctx = context.WithValue(ctx, constant.ConnID, opts[0])
|
||||
}
|
||||
if opts := md.Get(constant.CheckKey); len(opts) != 1 || opts[0] == "" {
|
||||
return nil, status.New(codes.InvalidArgument, "check key empty").Err()
|
||||
} else {
|
||||
if err := verifyReqKey(args, opts[0]); err != nil {
|
||||
return nil, status.New(codes.InvalidArgument, err.Error()).Err()
|
||||
}
|
||||
}
|
||||
ctx = context.WithValue(ctx, OperationID, operationID)
|
||||
ctx = context.WithValue(ctx, OpUserID, opUserID)
|
||||
log.ZInfo(ctx, "rpc server req", "funcName", funcName, "req", rpcString(req))
|
||||
resp, err = handler(ctx, req)
|
||||
if err == nil {
|
||||
@ -119,7 +138,7 @@ func rpcServerInterceptor(ctx context.Context, req interface{}, info *grpc.Unary
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
log.ZError(ctx, "rpc server resp", err, "funcName", funcName)
|
||||
log.ZWarn(ctx, "rpc server resp", err, "funcName", funcName)
|
||||
return nil, details.Err()
|
||||
}
|
||||
|
||||
|
@ -4,7 +4,7 @@ import (
|
||||
"context"
|
||||
"fmt"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/config"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/tracelog"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/mcontext"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/errs"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/utils"
|
||||
"github.com/golang-jwt/jwt/v4"
|
||||
@ -61,7 +61,7 @@ func GetClaimFromToken(tokensString string) (*Claims, error) {
|
||||
}
|
||||
|
||||
func CheckAccessV3(ctx context.Context, ownerUserID string) (err error) {
|
||||
opUserID := tracelog.GetOpUserID(ctx)
|
||||
opUserID := mcontext.GetOpUserID(ctx)
|
||||
if utils.IsContain(opUserID, config.Config.Manager.AppManagerUid) {
|
||||
return nil
|
||||
}
|
||||
@ -72,14 +72,14 @@ func CheckAccessV3(ctx context.Context, ownerUserID string) (err error) {
|
||||
}
|
||||
|
||||
func IsAppManagerUid(ctx context.Context) bool {
|
||||
return utils.IsContain(tracelog.GetOpUserID(ctx), config.Config.Manager.AppManagerUid)
|
||||
return utils.IsContain(mcontext.GetOpUserID(ctx), config.Config.Manager.AppManagerUid)
|
||||
}
|
||||
|
||||
func CheckAdmin(ctx context.Context) error {
|
||||
if utils.IsContain(tracelog.GetOpUserID(ctx), config.Config.Manager.AppManagerUid) {
|
||||
if utils.IsContain(mcontext.GetOpUserID(ctx), config.Config.Manager.AppManagerUid) {
|
||||
return nil
|
||||
}
|
||||
return errs.ErrIdentity.Wrap(fmt.Sprintf("user %s is not admin userID", tracelog.GetOpUserID(ctx)))
|
||||
return errs.ErrIdentity.Wrap(fmt.Sprintf("user %s is not admin userID", mcontext.GetOpUserID(ctx)))
|
||||
}
|
||||
|
||||
func ParseRedisInterfaceToken(redisToken interface{}) (*Claims, error) {
|
||||
|
@ -1,55 +0,0 @@
|
||||
package tracelog
|
||||
|
||||
import (
|
||||
"context"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/constant"
|
||||
)
|
||||
|
||||
func NewCtx(operationID string) context.Context {
|
||||
c := context.Background()
|
||||
ctx := context.WithValue(c, constant.OperationID, operationID)
|
||||
SetOperationID(ctx, operationID)
|
||||
return ctx
|
||||
}
|
||||
|
||||
func SetOperationID(ctx context.Context, operationID string) {
|
||||
ctx = context.WithValue(ctx, constant.OperationID, operationID)
|
||||
}
|
||||
|
||||
func SetOpUserID(ctx context.Context, opUserID string) {
|
||||
ctx = context.WithValue(ctx, constant.OpUserID, opUserID)
|
||||
}
|
||||
|
||||
func SetConnID(ctx context.Context, connID string) {
|
||||
ctx = context.WithValue(ctx, constant.ConnID, connID)
|
||||
}
|
||||
|
||||
func GetOperationID(ctx context.Context) string {
|
||||
if ctx.Value(constant.OperationID) != nil {
|
||||
s, ok := ctx.Value(constant.OperationID).(string)
|
||||
if ok {
|
||||
return s
|
||||
}
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func GetOpUserID(ctx context.Context) string {
|
||||
if ctx.Value(constant.OpUserID) != "" {
|
||||
s, ok := ctx.Value(constant.OpUserID).(string)
|
||||
if ok {
|
||||
return s
|
||||
}
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func GetConnID(ctx context.Context) string {
|
||||
if ctx.Value(constant.ConnID) != "" {
|
||||
s, ok := ctx.Value(constant.ConnID).(string)
|
||||
if ok {
|
||||
return s
|
||||
}
|
||||
}
|
||||
return ""
|
||||
}
|
@ -48,7 +48,7 @@ const (
|
||||
|
||||
// 群组错误码
|
||||
GroupIDNotFoundError = 1201 //GroupID不存在
|
||||
GroupIDIDExisted = 1202 //GroupID已存在
|
||||
GroupIDExisted = 1202 //GroupID已存在
|
||||
OnlyOneOwnerError = 1203 //只能有一个群主
|
||||
InGroupAlreadyError = 1204 //已在群组中
|
||||
NotInGroupYetError = 1205 //不在群组中
|
||||
|
@ -11,7 +11,8 @@ type CodeError interface {
|
||||
Msg() string
|
||||
Detail() string
|
||||
WithDetail(detail string) CodeError
|
||||
Is(err error) bool
|
||||
// Is 判断是否是某个错误, loose为false时, 只有错误码相同就认为是同一个错误, 默认为true
|
||||
Is(err error, loose ...bool) bool
|
||||
Wrap(msg ...string) error
|
||||
error
|
||||
}
|
||||
@ -59,13 +60,23 @@ func (e *codeError) Wrap(w ...string) error {
|
||||
return errors.Wrap(e, strings.Join(w, ", "))
|
||||
}
|
||||
|
||||
func (e *codeError) Is(err error) bool {
|
||||
func (e *codeError) Is(err error, loose ...bool) bool {
|
||||
if err == nil {
|
||||
return false
|
||||
}
|
||||
var allowSubclasses bool
|
||||
if len(loose) == 0 {
|
||||
allowSubclasses = true
|
||||
} else {
|
||||
allowSubclasses = loose[0]
|
||||
}
|
||||
codeErr, ok := Unwrap(err).(CodeError)
|
||||
if ok {
|
||||
return codeErr.Code() == e.code
|
||||
if allowSubclasses {
|
||||
return Relation.Is(e.code, codeErr.Code())
|
||||
} else {
|
||||
return codeErr.Code() == e.code
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
@ -12,7 +12,7 @@ var (
|
||||
|
||||
ErrUserIDNotFound = NewCodeError(UserIDNotFoundError, "UserIDNotFoundError")
|
||||
ErrGroupIDNotFound = NewCodeError(GroupIDNotFoundError, "GroupIDNotFoundError")
|
||||
ErrGroupIDExisted = NewCodeError(GroupIDIDExisted, "GroupIDExisted")
|
||||
ErrGroupIDExisted = NewCodeError(GroupIDExisted, "GroupIDExisted")
|
||||
ErrUserIDExisted = NewCodeError(UserIDExisted, "UserIDExisted")
|
||||
|
||||
ErrRecordNotFound = NewCodeError(RecordNotFoundError, "RecordNotFoundError")
|
||||
|
43
pkg/errs/relation.go
Normal file
43
pkg/errs/relation.go
Normal file
@ -0,0 +1,43 @@
|
||||
package errs
|
||||
|
||||
var Relation = &relation{m: make(map[int]map[int]struct{})}
|
||||
|
||||
func init() {
|
||||
Relation.Add(RecordNotFoundError, UserIDNotFoundError)
|
||||
Relation.Add(RecordNotFoundError, GroupIDNotFoundError)
|
||||
Relation.Add(DuplicateKeyError, UserIDExisted)
|
||||
Relation.Add(DuplicateKeyError, GroupIDExisted)
|
||||
}
|
||||
|
||||
type relation struct {
|
||||
m map[int]map[int]struct{}
|
||||
}
|
||||
|
||||
func (r *relation) Add(codes ...int) {
|
||||
if len(codes) < 2 {
|
||||
panic("codes length must be greater than 2")
|
||||
}
|
||||
for i := 1; i < len(codes); i++ {
|
||||
parent := codes[i-1]
|
||||
s, ok := r.m[parent]
|
||||
if !ok {
|
||||
s = make(map[int]struct{})
|
||||
r.m[parent] = s
|
||||
}
|
||||
for _, code := range codes[i:] {
|
||||
s[code] = struct{}{}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (r *relation) Is(parent, child int) bool {
|
||||
if parent == child {
|
||||
return true
|
||||
}
|
||||
s, ok := r.m[parent]
|
||||
if !ok {
|
||||
return false
|
||||
}
|
||||
_, ok = s[child]
|
||||
return ok
|
||||
}
|
63
pkg/rpcclient/check/gateway.go
Normal file
63
pkg/rpcclient/check/gateway.go
Normal file
@ -0,0 +1,63 @@
|
||||
package check
|
||||
|
||||
import (
|
||||
"context"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/config"
|
||||
discoveryRegistry "github.com/OpenIMSDK/Open-IM-Server/pkg/discoveryregistry"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/proto/friend"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/proto/sdkws"
|
||||
"google.golang.org/grpc"
|
||||
)
|
||||
|
||||
type MessageGateWayRpcClient struct {
|
||||
zk discoveryRegistry.SvcDiscoveryRegistry
|
||||
}
|
||||
|
||||
func NewMessageGateWayRpcClient(zk discoveryRegistry.SvcDiscoveryRegistry) *MessageGateWayRpcClient {
|
||||
return &MessageGateWayRpcClient{
|
||||
zk: zk,
|
||||
}
|
||||
}
|
||||
|
||||
func (m *MessageGateWayRpcClient) GetFriendsInfo(ctx context.Context, ownerUserID, friendUserID string) (resp *sdkws.FriendInfo, err error) {
|
||||
cc, err := m.getConn()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
r, err := friend.NewFriendClient(cc).GetDesignatedFriends(ctx, &friend.GetDesignatedFriendsReq{OwnerUserID: ownerUserID, FriendUserIDs: []string{friendUserID}})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
resp = r.FriendsInfo[0]
|
||||
return
|
||||
}
|
||||
func (m *MessageGateWayRpcClient) getConn() (*grpc.ClientConn, error) {
|
||||
return m.zk.GetConn(config.Config.RpcRegisterName.OpenImMessageGatewayName)
|
||||
}
|
||||
|
||||
// possibleFriendUserID是否在userID的好友中
|
||||
func (m *MessageGateWayRpcClient) IsFriend(ctx context.Context, possibleFriendUserID, userID string) (bool, error) {
|
||||
cc, err := m.getConn()
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
resp, err := friend.NewFriendClient(cc).IsFriend(ctx, &friend.IsFriendReq{UserID1: userID, UserID2: possibleFriendUserID})
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
return resp.InUser1Friends, nil
|
||||
|
||||
}
|
||||
|
||||
func (m *MessageGateWayRpcClient) GetFriendIDs(ctx context.Context, ownerUserID string) (friendIDs []string, err error) {
|
||||
cc, err := m.getConn()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
req := friend.GetFriendIDsReq{UserID: ownerUserID}
|
||||
resp, err := friend.NewFriendClient(cc).GetFriendIDs(ctx, &req)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return resp.FriendIDs, err
|
||||
}
|
@ -4,7 +4,7 @@ import (
|
||||
"context"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/apistruct"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/constant"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/tracelog"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/mcontext"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/proto/msg"
|
||||
sdkws "github.com/OpenIMSDK/Open-IM-Server/pkg/proto/sdkws"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/utils"
|
||||
@ -14,7 +14,7 @@ func (c *Check) ExtendMessageUpdatedNotification(ctx context.Context, sendID str
|
||||
req *msg.SetMessageReactionExtensionsReq, resp *msg.SetMessageReactionExtensionsResp, isHistory bool, isReactionFromCache bool) {
|
||||
var m apistruct.ReactionMessageModifierNotification
|
||||
m.SourceID = req.SourceID
|
||||
m.OpUserID = tracelog.GetOpUserID(ctx)
|
||||
m.OpUserID = mcontext.GetOpUserID(ctx)
|
||||
m.SessionType = req.SessionType
|
||||
keyMap := make(map[string]*sdkws.KeyValue)
|
||||
for _, valueResp := range resp.Result {
|
||||
|
@ -5,8 +5,8 @@ import (
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/config"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/constant"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/log"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/mcontext"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/tokenverify"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/common/tracelog"
|
||||
pbGroup "github.com/OpenIMSDK/Open-IM-Server/pkg/proto/group"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/proto/sdkws"
|
||||
"github.com/OpenIMSDK/Open-IM-Server/pkg/proto/wrapperspb"
|
||||
@ -16,7 +16,7 @@ import (
|
||||
)
|
||||
|
||||
func (c *Check) setOpUserInfo(ctx context.Context, groupID string, groupMemberInfo *sdkws.GroupMemberFullInfo) error {
|
||||
opUserID := tracelog.GetOpUserID(ctx)
|
||||
opUserID := mcontext.GetOpUserID(ctx)
|
||||
if tokenverify.IsManagerUserID(opUserID) {
|
||||
user, err := c.user.GetUsersInfos(ctx, []string{opUserID}, true)
|
||||
if err != nil {
|
||||
@ -221,7 +221,7 @@ func (c *Check) GroupCreatedNotification(ctx context.Context, groupID string, in
|
||||
}
|
||||
}
|
||||
|
||||
c.groupNotification(ctx, constant.GroupCreatedNotification, &GroupCreatedTips, tracelog.GetOpUserID(ctx), groupID, "")
|
||||
c.groupNotification(ctx, constant.GroupCreatedNotification, &GroupCreatedTips, mcontext.GetOpUserID(ctx), groupID, "")
|
||||
}
|
||||
|
||||
// 群信息改变后掉用
|
||||
@ -246,7 +246,7 @@ func (c *Check) GroupInfoSetNotification(ctx context.Context, groupID string, gr
|
||||
if err := c.setOpUserInfo(ctx, groupID, GroupInfoChangedTips.OpUser); err != nil {
|
||||
return
|
||||
}
|
||||
c.groupNotification(ctx, constant.GroupInfoSetNotification, &GroupInfoChangedTips, tracelog.GetOpUserID(ctx), groupID, "")
|
||||
c.groupNotification(ctx, constant.GroupInfoSetNotification, &GroupInfoChangedTips, mcontext.GetOpUserID(ctx), groupID, "")
|
||||
}
|
||||
|
||||
func (c *Check) GroupMutedNotification(ctx context.Context, groupID string) {
|
||||
@ -258,7 +258,7 @@ func (c *Check) GroupMutedNotification(ctx context.Context, groupID string) {
|
||||
if err := c.setOpUserInfo(ctx, groupID, tips.OpUser); err != nil {
|
||||
return
|
||||
}
|
||||
c.groupNotification(ctx, constant.GroupMutedNotification, &tips, tracelog.GetOpUserID(ctx), groupID, "")
|
||||
c.groupNotification(ctx, constant.GroupMutedNotification, &tips, mcontext.GetOpUserID(ctx), groupID, "")
|
||||
}
|
||||
|
||||
func (c *Check) GroupCancelMutedNotification(ctx context.Context, groupID string) {
|
||||
@ -270,7 +270,7 @@ func (c *Check) GroupCancelMutedNotification(ctx context.Context, groupID string
|
||||
if err := c.setOpUserInfo(ctx, groupID, tips.OpUser); err != nil {
|
||||
return
|
||||
}
|
||||
c.groupNotification(ctx, constant.GroupCancelMutedNotification, &tips, tracelog.GetOpUserID(ctx), groupID, "")
|
||||
c.groupNotification(ctx, constant.GroupCancelMutedNotification, &tips, mcontext.GetOpUserID(ctx), groupID, "")
|
||||
}
|
||||
|
||||
func (c *Check) GroupMemberMutedNotification(ctx context.Context, groupID, groupMemberUserID string, mutedSeconds uint32) {
|
||||
@ -286,7 +286,7 @@ func (c *Check) GroupMemberMutedNotification(ctx context.Context, groupID, group
|
||||
if err := c.setGroupMemberInfo(ctx, groupID, groupMemberUserID, tips.MutedUser); err != nil {
|
||||
return
|
||||
}
|
||||
c.groupNotification(ctx, constant.GroupMemberMutedNotification, &tips, tracelog.GetOpUserID(ctx), groupID, "")
|
||||
c.groupNotification(ctx, constant.GroupMemberMutedNotification, &tips, mcontext.GetOpUserID(ctx), groupID, "")
|
||||
}
|
||||
|
||||
func (c *Check) GroupMemberInfoSetNotification(ctx context.Context, groupID, groupMemberUserID string) {
|
||||
@ -301,7 +301,7 @@ func (c *Check) GroupMemberInfoSetNotification(ctx context.Context, groupID, gro
|
||||
if err := c.setGroupMemberInfo(ctx, groupID, groupMemberUserID, tips.ChangedUser); err != nil {
|
||||
return
|
||||
}
|
||||
c.groupNotification(ctx, constant.GroupMemberInfoSetNotification, &tips, tracelog.GetOpUserID(ctx), groupID, "")
|
||||
c.groupNotification(ctx, constant.GroupMemberInfoSetNotification, &tips, mcontext.GetOpUserID(ctx), groupID, "")
|
||||
}
|
||||
|
||||
func (c *Check) GroupMemberRoleLevelChangeNotification(ctx context.Context, operationID, opUserID, groupID, groupMemberUserID string, notificationType int32) {
|
||||
@ -323,7 +323,7 @@ func (c *Check) GroupMemberRoleLevelChangeNotification(ctx context.Context, oper
|
||||
log.Error(operationID, "setGroupMemberInfo failed ", err.Error(), groupID, groupMemberUserID)
|
||||
return
|
||||
}
|
||||
c.groupNotification(ctx, notificationType, &tips, tracelog.GetOpUserID(ctx), groupID, "")
|
||||
c.groupNotification(ctx, notificationType, &tips, mcontext.GetOpUserID(ctx), groupID, "")
|
||||
}
|
||||
|
||||
func (c *Check) GroupMemberCancelMutedNotification(ctx context.Context, groupID, groupMemberUserID string) {
|
||||
@ -338,7 +338,7 @@ func (c *Check) GroupMemberCancelMutedNotification(ctx context.Context, groupID,
|
||||
if err := c.setGroupMemberInfo(ctx, groupID, groupMemberUserID, tips.MutedUser); err != nil {
|
||||
return
|
||||
}
|
||||
c.groupNotification(ctx, constant.GroupMemberCancelMutedNotification, &tips, tracelog.GetOpUserID(ctx), groupID, "")
|
||||
c.groupNotification(ctx, constant.GroupMemberCancelMutedNotification, &tips, mcontext.GetOpUserID(ctx), groupID, "")
|
||||
}
|
||||
|
||||
// message ReceiveJoinApplicationTips{
|
||||
@ -359,7 +359,7 @@ func (c *Check) JoinGroupApplicationNotification(ctx context.Context, req *pbGro
|
||||
|
||||
return
|
||||
}
|
||||
if err = c.setPublicUserInfo(ctx, tracelog.GetOpUserID(ctx), JoinGroupApplicationTips.Applicant); err != nil {
|
||||
if err = c.setPublicUserInfo(ctx, mcontext.GetOpUserID(ctx), JoinGroupApplicationTips.Applicant); err != nil {
|
||||
|
||||
return
|
||||
}
|
||||
@ -369,7 +369,7 @@ func (c *Check) JoinGroupApplicationNotification(ctx context.Context, req *pbGro
|
||||
return
|
||||
}
|
||||
for _, v := range managerList {
|
||||
c.groupNotification(ctx, constant.JoinGroupApplicationNotification, &JoinGroupApplicationTips, tracelog.GetOpUserID(ctx), "", v.UserID)
|
||||
c.groupNotification(ctx, constant.JoinGroupApplicationNotification, &JoinGroupApplicationTips, mcontext.GetOpUserID(ctx), "", v.UserID)
|
||||
}
|
||||
}
|
||||
|
||||
@ -382,7 +382,7 @@ func (c *Check) MemberQuitNotification(ctx context.Context, req *pbGroup.QuitGro
|
||||
return
|
||||
}
|
||||
|
||||
c.groupNotification(ctx, constant.MemberQuitNotification, &MemberQuitTips, tracelog.GetOpUserID(ctx), req.GroupID, "")
|
||||
c.groupNotification(ctx, constant.MemberQuitNotification, &MemberQuitTips, mcontext.GetOpUserID(ctx), req.GroupID, "")
|
||||
}
|
||||
|
||||
// message ApplicationProcessedTips{
|
||||
@ -402,17 +402,17 @@ func (c *Check) GroupApplicationAcceptedNotification(ctx context.Context, req *p
|
||||
return
|
||||
}
|
||||
|
||||
c.groupNotification(ctx, constant.GroupApplicationAcceptedNotification, &GroupApplicationAcceptedTips, tracelog.GetOpUserID(ctx), "", req.FromUserID)
|
||||
c.groupNotification(ctx, constant.GroupApplicationAcceptedNotification, &GroupApplicationAcceptedTips, mcontext.GetOpUserID(ctx), "", req.FromUserID)
|
||||
adminList, err := c.group.GetOwnerAndAdminInfos(ctx, req.GroupID)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
for _, v := range adminList {
|
||||
if v.UserID == tracelog.GetOpUserID(ctx) {
|
||||
if v.UserID == mcontext.GetOpUserID(ctx) {
|
||||
continue
|
||||
}
|
||||
GroupApplicationAcceptedTips.ReceiverAs = 1
|
||||
c.groupNotification(ctx, constant.GroupApplicationAcceptedNotification, &GroupApplicationAcceptedTips, tracelog.GetOpUserID(ctx), "", v.UserID)
|
||||
c.groupNotification(ctx, constant.GroupApplicationAcceptedNotification, &GroupApplicationAcceptedTips, mcontext.GetOpUserID(ctx), "", v.UserID)
|
||||
}
|
||||
}
|
||||
|
||||
@ -424,17 +424,17 @@ func (c *Check) GroupApplicationRejectedNotification(ctx context.Context, req *p
|
||||
if err := c.setOpUserInfo(ctx, req.GroupID, GroupApplicationRejectedTips.OpUser); err != nil {
|
||||
return
|
||||
}
|
||||
c.groupNotification(ctx, constant.GroupApplicationRejectedNotification, &GroupApplicationRejectedTips, tracelog.GetOpUserID(ctx), "", req.FromUserID)
|
||||
c.groupNotification(ctx, constant.GroupApplicationRejectedNotification, &GroupApplicationRejectedTips, mcontext.GetOpUserID(ctx), "", req.FromUserID)
|
||||
adminList, err := c.group.GetOwnerAndAdminInfos(ctx, req.GroupID)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
for _, v := range adminList {
|
||||
if v.UserID == tracelog.GetOpUserID(ctx) {
|
||||
if v.UserID == mcontext.GetOpUserID(ctx) {
|
||||
continue
|
||||
}
|
||||
GroupApplicationRejectedTips.ReceiverAs = 1
|
||||
c.groupNotification(ctx, constant.GroupApplicationRejectedNotification, &GroupApplicationRejectedTips, tracelog.GetOpUserID(ctx), "", v.UserID)
|
||||
c.groupNotification(ctx, constant.GroupApplicationRejectedNotification, &GroupApplicationRejectedTips, mcontext.GetOpUserID(ctx), "", v.UserID)
|
||||
}
|
||||
}
|
||||
|
||||
@ -449,7 +449,7 @@ func (c *Check) GroupOwnerTransferredNotification(ctx context.Context, req *pbGr
|
||||
if err := c.setGroupMemberInfo(ctx, req.GroupID, req.NewOwnerUserID, GroupOwnerTransferredTips.NewGroupOwner); err != nil {
|
||||
return
|
||||
}
|
||||
c.groupNotification(ctx, constant.GroupOwnerTransferredNotification, &GroupOwnerTransferredTips, tracelog.GetOpUserID(ctx), req.GroupID, "")
|
||||
c.groupNotification(ctx, constant.GroupOwnerTransferredNotification, &GroupOwnerTransferredTips, mcontext.GetOpUserID(ctx), req.GroupID, "")
|
||||
}
|
||||
|
||||
func (c *Check) GroupDismissedNotification(ctx context.Context, req *pbGroup.DismissGroupReq) {
|
||||
@ -462,7 +462,7 @@ func (c *Check) GroupDismissedNotification(ctx context.Context, req *pbGroup.Dis
|
||||
|
||||
return
|
||||
}
|
||||
c.groupNotification(ctx, constant.GroupDismissedNotification, &tips, tracelog.GetOpUserID(ctx), req.GroupID, "")
|
||||
c.groupNotification(ctx, constant.GroupDismissedNotification, &tips, mcontext.GetOpUserID(ctx), req.GroupID, "")
|
||||
}
|
||||
|
||||
// message MemberKickedTips{
|
||||
@ -488,7 +488,7 @@ func (c *Check) MemberKickedNotification(ctx context.Context, req *pbGroup.KickG
|
||||
}
|
||||
MemberKickedTips.KickedUserList = append(MemberKickedTips.KickedUserList, &groupMemberInfo)
|
||||
}
|
||||
c.groupNotification(ctx, constant.MemberKickedNotification, &MemberKickedTips, tracelog.GetOpUserID(ctx), req.GroupID, "")
|
||||
c.groupNotification(ctx, constant.MemberKickedNotification, &MemberKickedTips, mcontext.GetOpUserID(ctx), req.GroupID, "")
|
||||
//
|
||||
//for _, v := range kickedUserIDList {
|
||||
// groupNotification(constant.MemberKickedNotification, &MemberKickedTips, req.OpUserID, "", v, req.OperationID)
|
||||
@ -519,7 +519,7 @@ func (c *Check) MemberInvitedNotification(ctx context.Context, groupID, reason s
|
||||
}
|
||||
MemberInvitedTips.InvitedUserList = append(MemberInvitedTips.InvitedUserList, &groupMemberInfo)
|
||||
}
|
||||
c.groupNotification(ctx, constant.MemberInvitedNotification, &MemberInvitedTips, tracelog.GetOpUserID(ctx), groupID, "")
|
||||
c.groupNotification(ctx, constant.MemberInvitedNotification, &MemberInvitedTips, mcontext.GetOpUserID(ctx), groupID, "")
|
||||
}
|
||||
|
||||
// 群成员主动申请进群,管理员同意后调用,
|
||||
@ -531,7 +531,7 @@ func (c *Check) MemberEnterNotification(ctx context.Context, req *pbGroup.GroupA
|
||||
if err := c.setGroupMemberInfo(ctx, req.GroupID, req.FromUserID, MemberEnterTips.EntrantUser); err != nil {
|
||||
return
|
||||
}
|
||||
c.groupNotification(ctx, constant.MemberEnterNotification, &MemberEnterTips, tracelog.GetOpUserID(ctx), req.GroupID, "")
|
||||
c.groupNotification(ctx, constant.MemberEnterNotification, &MemberEnterTips, mcontext.GetOpUserID(ctx), req.GroupID, "")
|
||||
}
|
||||
|
||||
func (c *Check) MemberEnterDirectlyNotification(ctx context.Context, groupID string, entrantUserID string, operationID string) {
|
||||
|
@ -472,7 +472,7 @@ func Unwrap(err error) error {
|
||||
|
||||
// NotNilReplace 当new_不为空时, 将old设置为new_
|
||||
func NotNilReplace[T any](old, new_ *T) {
|
||||
if old == nil || new_ == nil {
|
||||
if new_ == nil {
|
||||
return
|
||||
}
|
||||
*old = *new_
|
||||
|
Loading…
x
Reference in New Issue
Block a user