Create & Init Project...

This commit is contained in:
2019-04-22 18:49:16 +08:00
commit fc4fa37393
25440 changed files with 4054998 additions and 0 deletions

View File

@@ -0,0 +1,67 @@
load(
"@io_bazel_rules_go//go:def.bzl",
"go_library",
"go_test",
)
go_library(
name = "go_default_library",
srcs = [
"aes.go",
"auth.go",
"clean_token.go",
"contact_bind_log.go",
"login_log.go",
"pwd_log.go",
"service.go",
"set_token.go",
"sync_pwd_log.go",
],
importpath = "go-common/app/job/main/passport/service",
tags = ["automanaged"],
visibility = ["//visibility:public"],
deps = [
"//app/job/main/passport/conf:go_default_library",
"//app/job/main/passport/dao:go_default_library",
"//app/job/main/passport/model:go_default_library",
"//app/service/main/identify-game/model:go_default_library",
"//app/service/main/identify-game/rpc/client:go_default_library",
"//library/log:go_default_library",
"//library/queue/databus:go_default_library",
],
)
go_test(
name = "go_default_test",
srcs = [
"aes_test.go",
"contact_bind_log_test.go",
"service_test.go",
],
embed = [":go_default_library"],
rundir = ".",
tags = ["automanaged"],
deps = [
"//app/job/main/passport/conf:go_default_library",
"//app/job/main/passport/model:go_default_library",
"//app/service/main/identify-game/model:go_default_library",
"//library/log:go_default_library",
"//library/queue/databus:go_default_library",
"//library/time:go_default_library",
"//vendor/github.com/smartystreets/goconvey/convey:go_default_library",
],
)
filegroup(
name = "package-srcs",
srcs = glob(["**"]),
tags = ["automanaged"],
visibility = ["//visibility:private"],
)
filegroup(
name = "all-srcs",
srcs = [":package-srcs"],
tags = ["automanaged"],
visibility = ["//visibility:public"],
)

View File

@@ -0,0 +1,66 @@
package service
import (
"bytes"
"crypto/aes"
"crypto/cipher"
"crypto/rand"
"encoding/base64"
"errors"
"io"
)
func pad(src []byte) []byte {
padding := aes.BlockSize - len(src)%aes.BlockSize
padText := bytes.Repeat([]byte{byte(padding)}, padding)
return append(src, padText...)
}
func unpad(src []byte) ([]byte, error) {
length := len(src)
unpadding := int(src[length-1])
if unpadding > length {
return nil, errors.New("unpad error. This could happen when incorrect encryption key is used")
}
return src[:(length - unpadding)], nil
}
func (s *Service) encrypt(text string) (string, error) {
msg := pad([]byte(text))
cipherText := make([]byte, aes.BlockSize+len(msg))
iv := cipherText[:aes.BlockSize]
if _, err := io.ReadFull(rand.Reader, iv); err != nil {
return "", err
}
cfb := cipher.NewCFBEncrypter(s.AESBlock, iv)
cfb.XORKeyStream(cipherText[aes.BlockSize:], []byte(msg))
finalMsg := base64.URLEncoding.EncodeToString(cipherText)
return finalMsg, nil
}
func (s *Service) decrypt(text string) (string, error) {
decodedMsg, err := base64.URLEncoding.DecodeString(text)
if err != nil {
return "", err
}
if (len(decodedMsg) % aes.BlockSize) != 0 {
return "", errors.New("blocksize must be multipe of decoded message length")
}
iv := decodedMsg[:aes.BlockSize]
msg := decodedMsg[aes.BlockSize:]
cfb := cipher.NewCFBDecrypter(s.AESBlock, iv)
cfb.XORKeyStream(msg, msg)
unpadMsg, err := unpad(msg)
if err != nil {
return "", err
}
return string(unpadMsg), nil
}

View File

@@ -0,0 +1,19 @@
package service
import (
"testing"
"github.com/smartystreets/goconvey/convey"
)
func TestService_encrypt(t *testing.T) {
once.Do(startService)
convey.Convey("", t, func() {
text := "123456"
et, err := s.encrypt(text)
convey.So(err, convey.ShouldBeNil)
dt, err := s.decrypt(et)
convey.So(err, convey.ShouldBeNil)
convey.So(dt, convey.ShouldEqual, text)
})
}

View File

@@ -0,0 +1,137 @@
package service
import (
"encoding/base64"
"encoding/hex"
"encoding/json"
"strconv"
"strings"
"time"
"go-common/app/job/main/passport/model"
"go-common/library/log"
"go-common/library/queue/databus"
)
func (s *Service) authBinLogconsumeproc() {
mergeNum := s.c.Group.AuthBinLog.Num
var (
err error
n int
msgs = s.authBinLog.Messages()
)
for {
msg, ok := <-msgs
if !ok {
log.Error("s.authBinLogconsumeproc closed")
return
}
// marked head to first commit
m := &message{data: msg}
if n, err = strconv.Atoi(msg.Key); err != nil {
log.Error("strconv.Atoi(%s) error(%v)", msg.Key, err)
continue
}
s.authBinLogMu.Lock()
if s.authBinLogHead == nil {
s.authBinLogHead = m
s.authBinLogLast = m
} else {
s.authBinLogLast.next = m
s.authBinLogLast = m
}
s.authBinLogMu.Unlock()
// use specify goroutine to merge messages
s.authBinLogMergeChans[n%mergeNum] <- m
log.Info("authBinLogconsumeproc key:%s partition:%d offset:%d", msg.Key, msg.Partition, msg.Offset)
}
}
func (s *Service) authBinLogcommitproc() {
commits := make(map[int32]*databus.Message, s.c.Group.AuthBinLog.Size)
for {
done := <-s.authBinLogDoneChan
// merge partitions to commit offset
for _, d := range done {
d.done = true
}
s.mu.Lock()
for ; s.authBinLogHead != nil && s.authBinLogHead.done; s.authBinLogHead = s.authBinLogHead.next {
commits[s.authBinLogHead.data.Partition] = s.authBinLogHead.data
}
s.mu.Unlock()
for k, m := range commits {
log.Info("authBinLogcommitproc committed, key:%s partition:%d offset:%d", m.Key, m.Partition, m.Offset)
m.Commit()
delete(commits, k)
}
}
}
func (s *Service) authBinLogmergeproc(c chan *message) {
var (
err error
max = s.c.Group.AuthBinLog.Size
merges = make([]*model.AuthToken, 0, max)
marked = make([]*message, 0, max)
ticker = time.NewTicker(time.Duration(s.c.Group.AuthBinLog.Ticker))
)
for {
select {
case msg, ok := <-c:
if !ok {
log.Error("s.authBinLogmergeproc closed")
return
}
bmsg := &model.BMsg{}
if err = json.Unmarshal(msg.data.Value, bmsg); err != nil {
log.Error("json.Unmarshal(%s) error(%v)", string(msg.data.Value), err)
continue
}
if bmsg.Action == "delete" && strings.HasPrefix(bmsg.Table, "user_token_") {
t := &model.AuthToken{}
if err = json.Unmarshal(bmsg.New, t); err != nil {
log.Error("json.Unmarshal(%s) error(%v)", string(bmsg.New), err)
continue
}
merges = append(merges, t)
}
marked = append(marked, msg)
if len(marked) < max && len(merges) < max {
continue
}
case <-ticker.C:
}
if len(merges) > 0 {
s.cleanAuthTokens(merges)
merges = make([]*model.AuthToken, 0, max)
}
if len(marked) > 0 {
s.authBinLogDoneChan <- marked
marked = make([]*message, 0, max)
}
}
}
// cleanTokens by auth .
func (s *Service) cleanAuthTokens(authTokens []*model.AuthToken) {
for _, authToken := range authTokens {
var (
bytes []byte
err error
)
if bytes, err = base64.StdEncoding.DecodeString(authToken.Token); err != nil {
log.Error("cleanAuthTokens base64 decode err %v", err)
continue
}
token := hex.EncodeToString(bytes)
log.Info("auth binlog clear cleanAuthTokens,msg is (%+v)", authToken)
t := &model.AccessInfo{
Mid: authToken.Mid,
AppID: int32(authToken.AppID),
Token: token,
Expires: authToken.Expires,
}
s.cleanToken(t)
}
}

View File

@@ -0,0 +1,170 @@
package service
import (
"context"
"encoding/json"
"strconv"
"strings"
"time"
"go-common/app/job/main/passport/model"
igmdl "go-common/app/service/main/identify-game/model"
"go-common/library/log"
"go-common/library/queue/databus"
)
const (
_changePwd = "changePwd"
_retryCount = 3
_retryDuration = time.Second
)
func (s *Service) tokenconsumeproc() {
mergeNum := s.c.Group.AsoBinLog.Num
var (
err error
n int
msgs = s.dsToken.Messages()
)
for {
msg, ok := <-msgs
if !ok {
log.Error("s.tokenconsumeproc closed")
return
}
// marked head to first commit
m := &message{data: msg}
if n, err = strconv.Atoi(msg.Key); err != nil {
log.Error("strconv.Atoi(%s) error(%v)", msg.Key, err)
continue
}
s.mu.Lock()
if s.head == nil {
s.head = m
s.last = m
} else {
s.last.next = m
s.last = m
}
s.mu.Unlock()
// use specify goroutine to merge messages
s.tokenMergeChans[n%mergeNum] <- m
log.Info("tokenconsumeproc key:%s partition:%d offset:%d", msg.Key, msg.Partition, msg.Offset)
}
}
func (s *Service) tokencommitproc() {
commits := make(map[int32]*databus.Message, s.c.Group.AsoBinLog.Size)
for {
done := <-s.tokenDoneChan
// merge partitions to commit offset
for _, d := range done {
d.done = true
}
s.mu.Lock()
for ; s.head != nil && s.head.done; s.head = s.head.next {
commits[s.head.data.Partition] = s.head.data
}
s.mu.Unlock()
for k, m := range commits {
log.Info("tokencommitproc committed, key:%s partition:%d offset:%d", m.Key, m.Partition, m.Offset)
m.Commit()
delete(commits, k)
}
}
}
func (s *Service) tokenmergeproc(c chan *message) {
var (
err error
max = s.c.Group.AsoBinLog.Size
merges = make([]*model.AccessInfo, 0, max)
marked = make([]*message, 0, max)
ticker = time.NewTicker(time.Duration(s.c.Group.AsoBinLog.Ticker))
)
for {
select {
case msg, ok := <-c:
if !ok {
log.Error("s.tokenmergeproc closed")
return
}
bmsg := &model.BMsg{}
if err = json.Unmarshal(msg.data.Value, bmsg); err != nil {
log.Error("json.Unmarshal(%s) error(%v)", string(msg.data.Value), err)
continue
}
if bmsg.Action == "delete" && strings.HasPrefix(bmsg.Table, "aso_app_perm") {
t := &model.AccessInfo{}
if err = json.Unmarshal(bmsg.New, t); err != nil {
log.Error("json.Unmarshal(%s) error(%v)", string(bmsg.New), err)
continue
}
merges = append(merges, t)
}
marked = append(marked, msg)
if len(marked) < max && len(merges) < max {
continue
}
case <-ticker.C:
}
if len(merges) > 0 {
s.cleanTokens(merges)
merges = make([]*model.AccessInfo, 0, max)
}
if len(marked) > 0 {
s.tokenDoneChan <- marked
marked = make([]*message, 0, max)
}
}
}
// cleanTokens clean tokens.
func (s *Service) cleanTokens(tokens []*model.AccessInfo) {
for _, token := range tokens {
s.cleanToken(token)
}
}
// cleanToken to notify other clean access token.
func (s *Service) cleanToken(token *model.AccessInfo) (err error) {
if token == nil || token.Expires < time.Now().Unix() {
return
}
isGame := false
for _, id := range s.gameAppIDs {
if id == token.AppID {
isGame = true
break
}
}
if !isGame {
return
}
for {
if err = s.d.DelCache(context.TODO(), token.Token); err == nil {
break
}
time.Sleep(_retryDuration)
}
for i := 0; i < _retryCount; i++ {
arg := &igmdl.CleanCacheArgs{
Token: token.Token,
Mid: token.Mid,
}
if err = s.igRPC.DelCache(context.TODO(), arg); err == nil {
break
}
log.Error("service.identifyGameRPC.DelCache(%+v) error(%v)", arg, err)
time.Sleep(_retryDuration)
}
for i := 0; i < _retryCount; i++ {
if err = s.d.NotifyGame(token, _changePwd); err == nil {
return
}
time.Sleep(_retryDuration)
}
log.Error("notify err, token(%+v)", token)
return
}

View File

@@ -0,0 +1,283 @@
package service
import (
"context"
"crypto/sha1"
"encoding/base64"
"encoding/json"
"strings"
"time"
"go-common/app/job/main/passport/model"
"go-common/library/log"
"go-common/library/queue/databus"
)
func (s *Service) contactBindLogconsumeproc() {
mergeRoutineNum := int64(s.c.Group.ContactBindLog.Num)
for {
msg, ok := <-s.dsContactBindLog.Messages()
if !ok {
log.Error("s.telBindlogconsumeproc closed")
return
}
m := &message{data: msg}
p := &model.BMsg{}
if err := json.Unmarshal(msg.Value, p); err != nil {
log.Error("json.Unmarshal(%s) error(%v)", string(msg.Value), err)
continue
}
//m.object = p
mid := int64(0)
switch {
case strings.HasPrefix(p.Table, _telBindTable):
t := new(model.TelBindLog)
if err := json.Unmarshal(p.New, t); err != nil {
log.Error("json.Unmarshal(%s) error(%v)", string(p.New), err)
continue
}
mid = t.Mid
m.object = p
log.Info("contactBindLogconsumeproc table:%s key:%s partition:%d offset:%d", p.Table, msg.Key, msg.Partition, msg.Offset)
case strings.HasPrefix(p.Table, _emailBindTable):
t := new(model.EmailBindLog)
if err := json.Unmarshal(p.New, t); err != nil {
log.Error("json.Unmarshal(%s) error(%v)", string(p.New), err)
continue
}
mid = t.Mid
m.object = p
log.Info("contactBindLogconsumeproc table:%s key:%s partition:%d offset:%d", p.Table, msg.Key, msg.Partition, msg.Offset)
default:
log.Warn("unrecognized message: %+v", p)
continue
}
if mid == 0 {
log.Warn("invalid message: %+v", p)
continue
}
s.contactBindLogMu.Lock()
if s.contactBindLogHead == nil {
s.contactBindLogHead = m
s.contactBindLogLast = m
} else {
s.contactBindLogLast.next = m
s.contactBindLogLast = m
}
s.contactBindLogMu.Unlock()
// use specify goroutine to merge messages
s.contactBindLogMergeChans[mid%mergeRoutineNum] <- m
log.Info("contactBindLogconsumeproc key:%s partition:%d offset:%d", msg.Key, msg.Partition, msg.Offset)
}
}
func (s *Service) contactBindLogcommitproc() {
commits := make(map[int32]*databus.Message, s.c.Group.Log.Size)
for {
done := <-s.contactBindLogDoneChan
// merge partitions to commit offset
for _, d := range done {
d.done = true
}
s.contactBindLogMu.Lock()
for ; s.contactBindLogHead != nil && s.contactBindLogHead.done; s.contactBindLogHead = s.contactBindLogHead.next {
commits[s.contactBindLogHead.data.Partition] = s.contactBindLogHead.data
}
s.contactBindLogMu.Unlock()
for k, m := range commits {
log.Info("logcommitproc committed, key:%s partition:%d offset:%d", m.Key, m.Partition, m.Offset)
m.Commit()
delete(commits, k)
}
}
}
func (s *Service) contactBindLogMergeproc(c chan *message) {
var (
max = s.c.Group.ContactBindLog.Size
merges = make([]*model.BMsg, 0, max)
marked = make([]*message, 0, max)
ticker = time.NewTicker(time.Duration(s.c.Group.Log.Ticker))
)
for {
select {
case msg, ok := <-c:
if !ok {
log.Error("s.contactBindLogMergeproc closed")
return
}
p, assertOk := msg.object.(*model.BMsg)
if !assertOk {
log.Warn("s.contactBindLogMergeproc cannot convert BMsg")
continue
}
//if p.Action != "insert" {
// continue
//}
if p.Action == "delete" {
continue
}
log.Info("s.contactBindLogMergeproc: %+v", msg)
switch {
case strings.HasPrefix(p.Table, _telBindTable) || strings.HasPrefix(p.Table, _emailBindTable):
merges = append(merges, p)
default:
log.Warn("unrecognized the message: %+v", p)
}
marked = append(marked, msg)
if len(marked) < max && len(merges) < max {
continue
}
case <-ticker.C:
}
if len(merges) > 0 {
s.contactBindLogProcessMerges(merges)
merges = make([]*model.BMsg, 0, max)
}
if len(marked) > 0 {
s.contactBindLogDoneChan <- marked
marked = make([]*message, 0, max)
}
}
}
func (s *Service) contactBindLogProcessMerges(bmsgs []*model.BMsg) {
for _, msg := range bmsgs {
log.Info("contactBindLogProcessMerges: %+v", msg.Table)
switch {
case strings.HasPrefix(msg.Table, _telBindTable):
t := new(model.TelBindLog)
if err := json.Unmarshal(msg.New, t); err != nil {
log.Error("json.Unmarshal(%s) error(%v)", string(msg.New), err)
continue
}
s.handleTelBindLog(t)
case strings.HasPrefix(msg.Table, _emailBindTable):
t := new(model.EmailBindLog)
if err := json.Unmarshal(msg.New, t); err != nil {
log.Error("json.Unmarshal(%s) error(%v)", string(msg.New), err)
continue
}
s.handleEmailBindLog(t)
}
}
}
type userLogExtra struct {
EncryptTel string `json:"tel"`
EncryptEmail string `json:"email"`
}
type userLog struct {
Action string `json:"action"`
Mid int64 `json:"mid"`
Str0 string `json:"str_0"`
ExtraData string `json:"extra_data"`
Business int `json:"business"`
CTime string `json:"ctime"`
}
func (s *Service) handleTelBindLog(telLog *model.TelBindLog) (err error) {
var bindLog *model.TelBindLog
for {
bindLog, err = s.d.QueryTelBindLog(telLog.ID)
if err != nil {
log.Error("QueryTelBindLog (%v) err(%v)", telLog, err)
time.Sleep(100 * time.Millisecond)
continue
}
break
}
if bindLog == nil || bindLog.ID == 0 {
log.Warn("telephone log (%v) nil", bindLog)
return
}
rt, err := s.encrypt(bindLog.Tel)
if err != nil {
log.Error("aesEncrypt(%v) error(%v)", bindLog, err)
return
}
extraData := userLogExtra{
EncryptTel: rt,
}
hash := sha1.New()
hash.Write([]byte(bindLog.Tel))
extraDataBytes, err := json.Marshal(extraData)
if err != nil {
log.Error("extraData (%v) json marshal err(%v)", extraData, err)
return
}
uLog := userLog{
Action: "telBindLog",
Mid: bindLog.Mid,
Str0: base64.StdEncoding.EncodeToString(hash.Sum(s.hashSalt)),
ExtraData: string(extraDataBytes),
Business: 54,
CTime: time.Unix(bindLog.Timestamp, 0).Format("2006-01-02 15:04:05"),
}
for {
if err = s.userLogPub.Send(context.Background(), bindLog.Tel, uLog); err != nil {
log.Error("databus send(%v) error(%v)", uLog, err)
time.Sleep(100 * time.Millisecond)
continue
}
log.Info("uselog pub uLog: %+v", uLog)
break
}
return
}
func (s *Service) handleEmailBindLog(emailLog *model.EmailBindLog) (err error) {
var bindLog *model.EmailBindLog
for {
bindLog, err = s.d.QueryEmailBindLog(emailLog.ID)
if err != nil {
log.Error("QueryEmailBindLog (%v) err(%v)", emailLog, err)
time.Sleep(100 * time.Millisecond)
continue
}
break
}
if bindLog == nil || bindLog.ID == 0 {
log.Warn("email log (%v) nil", bindLog)
return
}
rt, err := s.encrypt(bindLog.Email)
if err != nil {
log.Error("aesEncrypt(%v) error(%v)", bindLog, err)
return
}
extraData := userLogExtra{
EncryptEmail: rt,
}
hash := sha1.New()
hash.Write([]byte(bindLog.Email))
extraDataBytes, err := json.Marshal(extraData)
if err != nil {
log.Error("extraData (%v) json marshal err(%v)", extraData, err)
return
}
uLog := userLog{
Action: "emailBindLog",
Mid: bindLog.Mid,
Str0: base64.StdEncoding.EncodeToString(hash.Sum(s.hashSalt)),
ExtraData: string(extraDataBytes),
Business: 54,
CTime: time.Unix(bindLog.Timestamp, 0).Format("2006-01-02 15:04:05"),
}
for {
if err = s.userLogPub.Send(context.Background(), bindLog.Email, uLog); err != nil {
log.Error("databus send(%v) error(%v)", uLog, err)
time.Sleep(100 * time.Millisecond)
continue
}
log.Info("uselog pub uLog: %+v", uLog)
break
}
return
}

View File

@@ -0,0 +1,100 @@
package service
import (
"context"
"testing"
"time"
"encoding/json"
"go-common/app/job/main/passport/model"
"go-common/library/queue/databus"
xtime "go-common/library/time"
)
var (
pCfg = &databus.Config{
Key: "dbe67e6a4c36f877",
Secret: "8c775ea242caa367ba5c876c04576571",
Group: "Test1-MainCommonArch-P",
Topic: "test1",
Action: "pub",
Name: "databus",
Proto: "tcp",
Addr: "172.18.33.50:6205",
Active: 10,
Idle: 5,
DialTimeout: xtime.Duration(time.Second),
WriteTimeout: xtime.Duration(time.Second),
ReadTimeout: xtime.Duration(time.Second),
IdleTimeout: xtime.Duration(time.Minute),
}
)
func testPub(t *testing.T, d *databus.Databus) {
tel := model.TelBindLog{ID: 2, Mid: 88883, Tel: "18817352650", Timestamp: 1500022511}
da, _ := json.Marshal(&tel)
c := &model.BMsg{Action: "insert", Table: "aso_telephone_bind_log", New: da}
if err := d.Send(context.Background(), "test", c); err != nil {
t.Errorf("d.Send(test) error(%v)", err)
}
}
func TestDatabus(t *testing.T) {
d := databus.New(pCfg)
testPub(t, d)
testPub(t, d)
testPub(t, d)
d.Close()
}
//var aesBlock, _ = aes.NewCipher([]byte("1234567890abcdef"))
//func TestEncode(t *testing.T) {
// for a := 0; a < 1000; a++ {
// go enconde()
// }
// time.Sleep(10000 * time.Second)
//
//}
//
//func enconde() {
// for i := 0; i < 100; i++ {
// key := []byte("1234567890abcdef")
// origData := []byte(strconv.Itoa(rand.Intn(100)))
// blockSize := aesBlock.BlockSize()
// origData = PKCS7Padding(origData, blockSize)
// blockMode := cipher.NewCBCEncrypter(aesBlock, key[:blockSize])
// crypted := make([]byte, len(origData))
// blockMode.CryptBlocks(crypted, origData)
// fmt.Println(base64.StdEncoding.EncodeToString(crypted))
// }
//}
//func TestDeode(t *testing.T) {
// key := []byte("1234567890abcdef")
// b,_:=base64.StdEncoding.DecodeString("29YQhqBb/J2XiBAj6bP3Zg==");
// s,_:=AesDecrypt(b,key)
// fmt.Print(string(s))
//}
//
//
//func AesDecrypt(crypted, key []byte) ([]byte, error) {
// block, err := aes.NewCipher(key)
// if err != nil {
// return nil, err
// }
// blockSize := block.BlockSize()
// blockMode := cipher.NewCBCDecrypter(block, key[:blockSize])
// origData := make([]byte, len(crypted))
// blockMode.CryptBlocks(origData, crypted)
// origData = PKCS7UnPadding(origData)
// return origData, nil
//}
//
//
//func PKCS7UnPadding(origData []byte) []byte {
// length := len(origData)
// unpadding := int(origData[length-1])
// return origData[:(length - unpadding)]
//}

View File

@@ -0,0 +1,121 @@
package service
import (
"context"
"encoding/json"
"time"
"go-common/app/job/main/passport/model"
"go-common/library/log"
"go-common/library/queue/databus"
)
const (
_addHBaseRetryCount = 3
_addHBaseRetryDuration = time.Second
)
func (s *Service) logconsumeproc() {
mergeRoutineNum := int64(s.c.Group.Log.Num)
for {
msg, ok := <-s.dsLog.Messages()
if !ok {
log.Error("s.logconsumeproc closed")
return
}
// marked head to first commit
m := &message{data: msg}
p := &model.LoginLog{}
if err := json.Unmarshal(msg.Value, p); err != nil {
log.Error("json.Unmarshal(%s) error(%v)", string(msg.Value), err)
continue
}
s.logMu.Lock()
if s.logHead == nil {
s.logHead = m
s.logLast = m
} else {
s.logLast.next = m
s.logLast = m
}
s.logMu.Unlock()
m.object = p
// use specify goroutine to merge messages
s.logMergeChans[p.Mid%mergeRoutineNum] <- m
log.Info("logconsumeproc key:%s partition:%d offset:%d", msg.Key, msg.Partition, msg.Offset)
}
}
func (s *Service) logcommitproc() {
commits := make(map[int32]*databus.Message, s.c.Group.Log.Size)
for {
done := <-s.logDoneChan
// merge partitions to commit offset
for _, d := range done {
d.done = true
}
s.logMu.Lock()
for ; s.logHead != nil && s.logHead.done; s.logHead = s.logHead.next {
commits[s.logHead.data.Partition] = s.logHead.data
}
s.logMu.Unlock()
for k, m := range commits {
log.Info("logcommitproc committed, key:%s partition:%d offset:%d", m.Key, m.Partition, m.Offset)
m.Commit()
delete(commits, k)
}
}
}
func (s *Service) logmergeproc(c chan *message) {
var (
max = s.c.Group.Log.Size
merges = make([]*model.LoginLog, 0, max)
marked = make([]*message, 0, max)
ticker = time.NewTicker(time.Duration(s.c.Group.Log.Ticker))
)
for {
select {
case msg, ok := <-c:
if !ok {
log.Error("s.logmergeproc closed")
return
}
p, assertOk := msg.object.(*model.LoginLog)
if assertOk {
merges = append(merges, p)
}
marked = append(marked, msg)
if len(marked) < max && len(merges) < max {
continue
}
case <-ticker.C:
}
if len(merges) > 0 {
s.processMerges(merges)
merges = make([]*model.LoginLog, 0, max)
}
if len(marked) > 0 {
s.logDoneChan <- marked
marked = make([]*message, 0, max)
}
}
}
func (s *Service) processMerges(merges []*model.LoginLog) {
s.d.AddLoginLog(merges)
for _, v := range merges {
s.addLoginLog(context.TODO(), v)
}
}
func (s *Service) addLoginLog(c context.Context, v *model.LoginLog) (err error) {
for i := 0; i < _addHBaseRetryCount; i++ {
if err = s.d.AddLoginLogHBase(c, v); err == nil {
return
}
log.Error("failed to add login log to hbase, service.dao.AddLoginLogHBase(%+v) error(%v)", v, err)
time.Sleep(_addHBaseRetryDuration)
}
return
}

View File

@@ -0,0 +1,154 @@
package service
import (
"context"
"encoding/json"
"strings"
"time"
"go-common/app/job/main/passport/model"
"go-common/library/log"
"go-common/library/queue/databus"
)
type pwdLogBMsg struct {
Action string
Table string
New *model.PwdLog
}
func (s *Service) pwdlogconsumeproc() {
mergeRoutineNum := int64(s.c.Group.PwdLog.Num)
for {
msg, ok := <-s.dsPwdLog.Messages()
if !ok {
log.Error("s.pwdlogconsumeproc closed")
return
}
// marked head to first commit
m := &message{data: msg}
p := &pwdLogBMsg{}
if err := json.Unmarshal(msg.Value, p); err != nil {
log.Error("json.Unmarshal(%s) error(%v)", string(msg.Value), err)
continue
}
// 只处理 aso_pwd_log insert binlog
if p.Table != "aso_pwd_log" {
continue
}
if p.Action != "insert" {
continue
}
m.object = p
s.pwdLogMu.Lock()
if s.pwdLogHead == nil {
s.pwdLogHead = m
s.pwdLogLast = m
} else {
s.pwdLogLast.next = m
s.pwdLogLast = m
}
s.pwdLogMu.Unlock()
// use specify goroutine to merge messages
s.pwdLogMergeChans[p.New.Mid%mergeRoutineNum] <- m
log.Info("pwdlogconsumeproc key:%s partition:%d offset:%d", msg.Key, msg.Partition, msg.Offset)
}
}
func (s *Service) pwdlogcommitproc() {
commits := make(map[int32]*databus.Message, s.c.Group.PwdLog.Size)
for {
done := <-s.pwdLogDoneChan
// merge partitions to commit offset
for _, d := range done {
d.done = true
}
s.pwdLogMu.Lock()
for ; s.pwdLogHead != nil && s.pwdLogHead.done; s.pwdLogHead = s.pwdLogHead.next {
commits[s.pwdLogHead.data.Partition] = s.pwdLogHead.data
}
s.pwdLogMu.Unlock()
for k, m := range commits {
log.Info("pwdlogcommitproc committed, key:%s partition:%d offset:%d", m.Key, m.Partition, m.Offset)
m.Commit()
delete(commits, k)
}
}
}
func (s *Service) pwdlogmergeproc(c chan *message) {
var (
max = s.c.Group.PwdLog.Size
merges = make([]*model.PwdLog, 0, max)
marked = make([]*message, 0, max)
ticker = time.NewTicker(time.Duration(s.c.Group.PwdLog.Ticker))
err error
)
for {
select {
case msg, ok := <-c:
if !ok {
log.Error("s.pwdlogmergeproc closed")
return
}
bmsg := &model.BMsg{}
if err = json.Unmarshal(msg.data.Value, bmsg); err != nil {
log.Error("json.Unmarshal(%s) error(%v)", string(msg.data.Value), err)
continue
}
if bmsg.Action == "insert" && strings.HasPrefix(bmsg.Table, "aso_pwd_log") {
p := &model.PwdLog{}
if err = json.Unmarshal(bmsg.New, p); err != nil {
log.Error("json.Unmarshal(%s) error(%v)", string(bmsg.New), err)
continue
}
merges = append(merges, p)
}
marked = append(marked, msg)
if len(marked) < max && len(merges) < max {
continue
}
case <-ticker.C:
}
if len(merges) > 0 {
s.pwdlogprocessMerges(merges)
merges = make([]*model.PwdLog, 0, max)
}
if len(marked) > 0 {
s.logDoneChan <- marked
marked = make([]*message, 0, max)
}
}
}
func (s *Service) pwdlogprocessMerges(merges []*model.PwdLog) {
for _, v := range merges {
for {
res, err := s.d.GetPwdLog(context.Background(), v.ID)
if err != nil {
log.Error("fail to get pwd log, id(%d) err(%v)", v.ID, err)
time.Sleep(_addHBaseRetryDuration)
continue
}
if err := s.addPwdLog(context.Background(), res); err != nil {
time.Sleep(_addHBaseRetryDuration)
continue
}
break
}
}
}
func (s *Service) addPwdLog(c context.Context, v *model.PwdLog) (err error) {
for i := 0; i < _addHBaseRetryCount; i++ {
if err = s.d.AddPwdLogHBase(c, v); err == nil {
return
}
log.Error("failed to add pwd log to hbase, service.dao.AddPwdLogHBase(%+v) error(%v)", v, err)
time.Sleep(_addHBaseRetryDuration)
}
return
}

View File

@@ -0,0 +1,213 @@
package service
import (
"context"
"crypto/aes"
"crypto/cipher"
"sync"
"go-common/app/job/main/passport/conf"
"go-common/app/job/main/passport/dao"
igrpc "go-common/app/service/main/identify-game/rpc/client"
"go-common/library/queue/databus"
)
const (
_gameAppID = int32(876)
_telBindTable = "aso_telephone_bind_log"
_emailBindTable = "aso_email_bind_log"
)
// Service struct of service.
type Service struct {
c *conf.Config
d *dao.Dao
// RPC
igRPC *igrpc.Client
// game app ids
gameAppIDs []int32
// token proc
dsToken *databus.Databus
tokenMergeChans []chan *message
tokenDoneChan chan []*message
head, last *message
mu sync.Mutex
// user proc
dsUser *databus.Databus
userMergeChans []chan *message
userDoneChan chan []*message
userHead, userLast *message
userMu sync.Mutex
// log proc
dsLog *databus.Databus
logMergeChans []chan *message
logDoneChan chan []*message
logHead, logLast *message
logMu sync.Mutex
// pwd log proc
dsPwdLog *databus.Databus
pwdLogMergeChans []chan *message
pwdLogDoneChan chan []*message
pwdLogHead, pwdLogLast *message
pwdLogMu sync.Mutex
// auth bin log proc
authBinLog *databus.Databus
authBinLogMergeChans []chan *message
authBinLogDoneChan chan []*message
authBinLogHead, authBinLogLast *message
authBinLogMu sync.Mutex
dsContactBindLog *databus.Databus
contactBindLogMergeChans []chan *message
contactBindLogDoneChan chan []*message
contactBindLogHead, contactBindLogLast *message
contactBindLogMu sync.Mutex
userLogPub *databus.Databus
AESBlock cipher.Block
hashSalt []byte
}
type message struct {
next *message
data *databus.Message
object interface{}
done bool
}
// New create service instance and return.
func New(c *conf.Config) (s *Service) {
gameAppIDs := make([]int32, 0)
gameAppIDs = append(gameAppIDs, _gameAppID)
for _, id := range c.Game.AppIDs {
if id == _gameAppID {
continue
}
gameAppIDs = append(gameAppIDs, id)
}
s = &Service{
c: c,
d: dao.New(c),
gameAppIDs: gameAppIDs,
// RPC
igRPC: igrpc.New(c.RPC.IdentifyGame),
// token
dsToken: databus.New(c.DataBus.AsoBinLog),
tokenMergeChans: make([]chan *message, c.Group.AsoBinLog.Num),
tokenDoneChan: make(chan []*message, c.Group.AsoBinLog.Chan),
// user
dsUser: databus.New(c.DataBus.User),
userMergeChans: make([]chan *message, c.Group.User.Num),
userDoneChan: make(chan []*message, c.Group.User.Chan),
// log
dsLog: databus.New(c.DataBus.Log),
logMergeChans: make([]chan *message, c.Group.Log.Num),
logDoneChan: make(chan []*message, c.Group.Log.Chan),
// log
dsPwdLog: databus.New(c.DataBus.PwdLog),
pwdLogMergeChans: make([]chan *message, c.Group.PwdLog.Num),
pwdLogDoneChan: make(chan []*message, c.Group.PwdLog.Chan),
// emial and tel log
dsContactBindLog: databus.New(c.DataBus.ContactBindLog),
contactBindLogMergeChans: make([]chan *message, c.Group.ContactBindLog.Num),
contactBindLogDoneChan: make(chan []*message, c.Group.ContactBindLog.Chan),
userLogPub: databus.New(c.DataBus.UserLog),
// auth bin log
authBinLog: databus.New(c.DataBus.AuthBinLog),
authBinLogMergeChans: make([]chan *message, c.Group.AuthBinLog.Num),
authBinLogDoneChan: make(chan []*message, c.Group.AuthBinLog.Chan),
hashSalt: []byte(c.Encode.Salt),
}
s.AESBlock, _ = aes.NewCipher([]byte(c.Encode.AesKey))
// start token proc
go s.tokencommitproc()
for i := 0; i < c.Group.AsoBinLog.Num; i++ {
ch := make(chan *message, c.Group.AsoBinLog.Chan)
s.tokenMergeChans[i] = ch
go s.tokenmergeproc(ch)
}
go s.tokenconsumeproc()
// start user proc
go s.usercommitproc()
for i := 0; i < c.Group.User.Num; i++ {
ch := make(chan *message, c.Group.User.Chan)
s.userMergeChans[i] = ch
go s.usermergeproc(ch)
}
go s.userconsumeproc()
// start log proc
go s.logcommitproc()
for i := 0; i < c.Group.Log.Num; i++ {
ch := make(chan *message, c.Group.Log.Chan)
s.logMergeChans[i] = ch
go s.logmergeproc(ch)
}
go s.logconsumeproc()
// start pwd log proc
go s.pwdlogcommitproc()
for i := 0; i < c.Group.PwdLog.Num; i++ {
ch := make(chan *message, c.Group.PwdLog.Chan)
s.pwdLogMergeChans[i] = ch
go s.pwdlogmergeproc(ch)
}
go s.pwdlogconsumeproc()
go s.contactBindLogcommitproc()
for i := 0; i < c.Group.ContactBindLog.Num; i++ {
ch := make(chan *message, c.Group.ContactBindLog.Chan)
s.contactBindLogMergeChans[i] = ch
go s.contactBindLogMergeproc(ch)
}
go s.contactBindLogconsumeproc()
// start auth bin log token proc
go s.authBinLogcommitproc()
for i := 0; i < c.Group.AuthBinLog.Num; i++ {
ch := make(chan *message, c.Group.AuthBinLog.Chan)
s.authBinLogMergeChans[i] = ch
go s.authBinLogmergeproc(ch)
}
go s.authBinLogconsumeproc()
// end auth bin log token proc
go s.syncPwdLog()
return
}
// Ping ping check service health.
func (s *Service) Ping(c context.Context) (err error) {
err = s.d.Ping(c)
return
}
// Close close service.
func (s *Service) Close() (err error) {
if s.dsToken != nil {
s.dsToken.Close()
}
if s.dsUser != nil {
s.dsUser.Close()
}
if s.dsLog != nil {
s.dsLog.Close()
}
if s.d != nil {
s.d.Close()
}
if s.dsContactBindLog != nil {
s.dsContactBindLog.Close()
}
if s.userLogPub != nil {
s.userLogPub.Close()
}
if s.dsPwdLog != nil {
s.dsPwdLog.Close()
}
return
}

View File

@@ -0,0 +1,54 @@
package service
import (
"context"
"fmt"
"sync"
"testing"
"time"
"go-common/app/job/main/passport/conf"
idfgmdl "go-common/app/service/main/identify-game/model"
"go-common/library/log"
. "github.com/smartystreets/goconvey/convey"
)
var (
once sync.Once
s *Service
)
func startService() {
if err := conf.Init(); err != nil {
panic(fmt.Sprintf("conf.Init() error(%v)", err))
}
// init log
log.Init(conf.Conf.Xlog)
s = New(conf.Conf)
}
func TestNew(t *testing.T) {
once.Do(startService)
Convey("new", t, func() {
So(s.gameAppIDs[0], ShouldEqual, _gameAppID)
t.Logf("s.gameAppIDs: %v", s.gameAppIDs)
So(s.c.URI, ShouldNotBeNil)
So(s.c.URI.DelCache, ShouldNotBeEmpty)
So(s.c.URI.SetToken, ShouldNotBeEmpty)
t.Logf("s.c.URI: %+v", s.c.URI)
})
}
func TestDelCache(t *testing.T) {
once.Do(startService)
time.Sleep(time.Second * 1)
Convey("del cache", t, func() {
arg := &idfgmdl.CleanCacheArgs{
Token: "foo",
}
err := s.igRPC.DelCache(context.Background(), arg)
So(err, ShouldBeNil)
})
}

View File

@@ -0,0 +1,123 @@
package service
import (
"context"
"encoding/json"
"strings"
"time"
"go-common/app/job/main/passport/model"
"go-common/library/log"
"go-common/library/queue/databus"
)
func (s *Service) userconsumeproc() {
mergeRoutineNum := int64(s.c.Group.User.Num)
msgs := s.dsUser.Messages()
for {
msg, ok := <-msgs
if !ok {
log.Error("s.userconsumeproc closed")
return
}
// marked head to first commit
m := &message{data: msg}
p := new(model.PMsg)
if err := json.Unmarshal(msg.Value, p); err != nil {
log.Error("json.Unmarshal(%s) error(%v)", string(msg.Value), err)
continue
}
s.userMu.Lock()
if s.userHead == nil {
s.userHead = m
s.userLast = m
} else {
s.userLast.next = m
s.userLast = m
}
s.userMu.Unlock()
m.object = p
// use specify goroutine to merge messages
s.userMergeChans[p.Data.Mid%mergeRoutineNum] <- m
log.Info("userconsumeproc key:%s partition:%d offset:%d", msg.Key, msg.Partition, msg.Offset)
}
}
func (s *Service) usercommitproc() {
commits := make(map[int32]*databus.Message, s.c.Group.User.Size)
for {
done := <-s.userDoneChan
// merge partitions to commit offset
for _, d := range done {
d.done = true
}
s.userMu.Lock()
for ; s.userHead != nil && s.userHead.done; s.userHead = s.userHead.next {
commits[s.userHead.data.Partition] = s.userHead.data
}
s.userMu.Unlock()
for k, m := range commits {
log.Info("usercommitproc committed, key:%s partition:%d offset:%d", m.Key, m.Partition, m.Offset)
m.Commit()
delete(commits, k)
}
}
}
func (s *Service) usermergeproc(c chan *message) {
var (
max = s.c.Group.User.Size
merges = make([]*model.PMsg, 0, max)
marked = make([]*message, 0, max)
ticker = time.NewTicker(time.Duration(s.c.Group.User.Ticker))
)
for {
select {
case msg, ok := <-c:
if !ok {
log.Error("s.usermergeproc closed")
return
}
p, assertOk := msg.object.(*model.PMsg)
if assertOk && strings.HasPrefix(p.Table, "aso_app_perm") && p.Action != "" {
merges = append(merges, p)
}
marked = append(marked, msg)
if len(marked) < max && len(merges) < max {
continue
}
case <-ticker.C:
}
if len(merges) > 0 {
s.setTokens(merges)
merges = make([]*model.PMsg, 0, max)
}
if len(marked) > 0 {
s.userDoneChan <- marked
marked = make([]*message, 0, max)
}
}
}
// setTokens for set tokens.
func (s *Service) setTokens(msgs []*model.PMsg) {
for _, msg := range msgs {
s.setToken(msg.Action, msg.Data)
}
}
// setToken set single token.
func (s *Service) setToken(action string, t *model.Token) {
if action == "" || t == nil || t.Token == "" {
return
}
switch action {
case "insert":
for {
if err := s.d.SetToken(context.TODO(), t); err == nil {
return
}
time.Sleep(time.Second)
}
}
}

View File

@@ -0,0 +1,32 @@
package service
import (
"context"
"time"
"go-common/library/log"
)
func (s *Service) syncPwdLog() {
id := s.c.Sync.SyncPwdID
for {
pwds, err := s.d.BatchGetPwdLog(context.Background(), id)
if err != nil {
log.Error("failed to batch get pwd log, s.d.BatchGetPwdLog(%d), error(%v)", id, err)
time.Sleep(1 * time.Second)
continue
}
log.Info("SyncPwdID (%d), len(pwds) (%d)", id, len(pwds))
if len(pwds) == 0 {
break
}
for _, pwd := range pwds {
if err := s.d.AddPwdLogHBase(context.Background(), pwd); err != nil {
log.Error("failed to add pwd log to hbase, service.dao.AddLoginLogHBase(%+v) error(%v)", pwd, err)
time.Sleep(1 * time.Second)
continue
}
id = pwd.ID
}
}
}