Create & Init Project...

This commit is contained in:
2019-04-22 18:49:16 +08:00
commit fc4fa37393
25440 changed files with 4054998 additions and 0 deletions

View File

@@ -0,0 +1,21 @@
filegroup(
name = "package-srcs",
srcs = glob(["**"]),
tags = ["automanaged"],
visibility = ["//visibility:private"],
)
filegroup(
name = "all-srcs",
srcs = [
":package-srcs",
"//app/job/main/passport/cmd:all-srcs",
"//app/job/main/passport/conf:all-srcs",
"//app/job/main/passport/dao:all-srcs",
"//app/job/main/passport/http:all-srcs",
"//app/job/main/passport/model:all-srcs",
"//app/job/main/passport/service:all-srcs",
],
tags = ["automanaged"],
visibility = ["//visibility:public"],
)

View File

@@ -0,0 +1,72 @@
## passport-job
#### Version 1.11.0
> 1.remove net ip.
#### Version 1.10.4
> 1.subscribe new auth db to notify game clear cache and commit
#### Version 1.10.3
> 1.subscribe new auth db to notify game clear cache
#### Version 1.10.2
> 1.subscribe insert and update bin log
#### Version 1.10.1
> 1.fix pwd log consumer memory leak
#### Version 1.10.0
> 1.add email bind log
#### Version 1.9.0
> 1.add sync pwd log to hbase
#### Version 1.8.0
> 1.add user tel bind log
#### Version 1.7.0
> 1.change to bm router
#### Version 1.6.0
> 1.add login logs to hbase
#### Version 1.5.0
> 1.move to kratos
> 2.add call of RPC.DelCache in identify-game-service
#### Version 1.3.1
> 1.fix userproc local var assign error
#### Version 1.3.0
> 1.refactor userproc to CSP style concurrency
#### Version 1.2.5
> 1.fix not commit databus message
#### Version 1.2.4
> 1.gray test for debug
#### Version 1.2.3
> 1.gray test for debug
#### Version 1.2.2
> 1.gray test for debug
#### Version 1.2.1
> 1.gray test for debug
#### Version 1.2.0
> 1.remove sms monitor
#### Version 1.1.0
> 1.refactor to CSP style parallel
> 2.add del account cache
#### Version 1.0.2
> 1.fix sms nofity
#### Version 1.0.1
> 1.fix notify too slow
#### Version 1.0.0
> 1.通知游戏业务清除accesskey

View File

@@ -0,0 +1,12 @@
# Owner
wanghuan01
zhoujiahui
wutao
# Author
wanghuan01
wutao
wucongyou
# Reviewer
wanghuan01

View File

@@ -0,0 +1,17 @@
# See the OWNERS docs at https://go.k8s.io/owners
approvers:
- wanghuan01
- wucongyou
- wutao
- zhoujiahui
labels:
- job
- job/main/passport
- main
options:
no_parent_owners: true
reviewers:
- wanghuan01
- wucongyou
- wutao

View File

@@ -0,0 +1,12 @@
## passport-game-cloud-job
#### 项目简介
> 1.账号job主要用于消费源站数据变更和通知业务方删除缓存
#### 编译环境
> 请只用golang v1.8.x以上版本编译执行。
#### 依赖包
> 1.公共包go-common
#### 特别说明

View File

@@ -0,0 +1,41 @@
load(
"@io_bazel_rules_go//go:def.bzl",
"go_library",
"go_binary",
)
go_library(
name = "go_default_library",
srcs = ["main.go"],
data = ["passport-job.toml"],
importpath = "go-common/app/job/main/passport/cmd",
tags = ["automanaged"],
visibility = ["//visibility:private"],
deps = [
"//app/job/main/passport/conf:go_default_library",
"//app/job/main/passport/http:go_default_library",
"//app/job/main/passport/service:go_default_library",
"//library/log:go_default_library",
"//library/net/trace:go_default_library",
],
)
go_binary(
name = "cmd",
embed = [":go_default_library"],
visibility = ["//visibility:public"],
)
filegroup(
name = "package-srcs",
srcs = glob(["**"]),
tags = ["automanaged"],
visibility = ["//visibility:private"],
)
filegroup(
name = "all-srcs",
srcs = [":package-srcs"],
tags = ["automanaged"],
visibility = ["//visibility:public"],
)

View File

@@ -0,0 +1,48 @@
package main
import (
"flag"
"os"
"os/signal"
"syscall"
"time"
"go-common/app/job/main/passport/conf"
"go-common/app/job/main/passport/http"
"go-common/app/job/main/passport/service"
"go-common/library/log"
"go-common/library/net/trace"
)
func main() {
flag.Parse()
if err := conf.Init(); err != nil {
panic(err)
}
log.Init(conf.Conf.Xlog)
defer log.Close()
trace.Init(conf.Conf.Tracer)
defer trace.Close()
// service init
srv := service.New(conf.Conf)
http.Init(conf.Conf, srv)
// signal handler
log.Info("passport-job start")
c := make(chan os.Signal, 1)
signal.Notify(c, syscall.SIGHUP, syscall.SIGQUIT, syscall.SIGTERM, syscall.SIGINT)
for {
s := <-c
log.Info("passport-job get a signal %s", s.String())
switch s {
case syscall.SIGQUIT, syscall.SIGTERM, syscall.SIGINT:
srv.Close()
time.Sleep(time.Second * 2)
log.Info("passport-job exit")
return
case syscall.SIGHUP:
// TODO reload
default:
return
}
}
}

View File

@@ -0,0 +1,250 @@
# This is a TOML document. Boom.
version = "1.0.0"
[xlog]
dir = "/tmp/"
[bm]
addr = "0.0.0.0:6651"
timeout = "1s"
[uri]
setToken = "http://passport.bilibili.co/intranet/auth/setToken"
delCache = "http://passport.bilibili.co/intranet/acc/job/delCache"
[game]
# NOTE: the conf item appIDs represents the game app id list,
# the app id 876 will always be the first item of the the final game app id list,
# even if it's not configured in this file or configured in other order.
appIDs = [876,900]
delCacheURI = "http://p.biligame.com/api/notify/purge.cache"
[game.client]
key = "ebbec09cd271049e"
secret = "test"
dial = "500ms"
timeout = "1s"
keepAlive = "60s"
timer = 10
[game.client.breaker]
window = "3s"
sleep = "100ms"
bucket = 10
ratio = 0.5
request = 100
[rpc]
[rpc.identifyGame]
pullInterval = "10s"
[rpc.identifyGame.client]
proto = "tcp"
timeout = "30s"
timer = 1000
[rpc.identifyGame.client.breaker]
window = "3s"
sleep = "100ms"
bucket = 10
ratio = 0.5
request = 100
[rpc.identifyGame.zookeeper]
root = "/microservice/identify-game-service/"
addrs = ["172.18.33.172:2181"]
timeout = "30s"
[db]
[db.log]
addr = "172.16.33.205:3306"
dsn = "aso:hA0DAnENNFz78kYB@tcp(172.16.33.205:3306)/aso?timeout=5s&readTimeout=5s&writeTimeout=5s&parseTime=true&loc=Local&charset=utf8,utf8mb4"
active = 5
idle = 2
queryTimeout = "1s"
execTimeout = "2s"
tranTimeout = "2s"
[db.log.breaker]
window = "3s"
sleep = "100ms"
bucket = 10
ratio = 0.5
request = 100
[db.aso]
addr = "172.16.33.205:3306"
dsn = "aso:hA0DAnENNFz78kYB@tcp(172.16.33.205:3306)/aso?timeout=5s&readTimeout=5s&writeTimeout=5s&parseTime=true&loc=Local&charset=utf8,utf8mb4"
active = 5
idle = 2
queryTimeout = "1s"
execTimeout = "2s"
tranTimeout = "2s"
[db.aso.breaker]
window = "3s"
sleep = "100ms"
bucket = 10
ratio = 0.5
request = 100
[httpClient]
key = "ebbec09cd271049e"
secret = "test"
dial = "500ms"
timeout = "1s"
keepAlive = "60s"
timer = 10
[httpClient.breaker]
window = "3s"
sleep = "100ms"
bucket = 10
ratio = 0.5
request = 100
[group]
[group.asoBinLog]
size = 1024
num = 32
ticker = "5s"
chan = 1024
[group.user]
size = 1024
num = 32
ticker = "5s"
chan = 1024
[group.log]
size = 1024
num = 32
ticker = "5s"
chan = 1024
[group.contactBindLog]
size = 2
num = 2
ticker = "5s"
chan = 2
[group.pwdLog]
size = 2
num = 2
ticker = "5s"
chan = 2
[databus]
[databus.asoBinLog]
key = "0QHEzXlXE9Ijewj8V4zu"
secret = "0QHEzXlXE9Ijewj8V4zv"
group = "PassportBinlog-UGC-S"
topic = "Passport-T"
action = "sub"
offset = "old"
buffer = 2048
name = "passport-job/databus"
proto = "tcp"
addr = "172.16.33.158:6205"
idle = 1
active = 1
dialTimeout = "1s"
readTimeout = "60s"
writeTimeout = "1s"
idleTimeout = "10s"
[databus.user]
key = "0QHEzXlXE9Ijewj8V4zu"
secret = "0QHEzXlXE9Ijewj8V4zv"
group = "PassportUserInfo-SetUserInfo-S"
topic = "PassportUserInfo-T"
action = "sub"
offset = "old"
buffer = 2048
name = "passport-job/databus"
proto = "tcp"
addr = "172.16.33.158:6205"
idle = 1
active = 1
dialTimeout = "1s"
readTimeout = "60s"
writeTimeout = "1s"
idleTimeout = "10s"
[databus.log]
key = "0QEO9F8JuuIxZzNDvklH"
secret = "0QEO9F8JuuIxZzNDvklI"
group = "PassportLog-Job-S"
topic = "PassportLog-T"
action = "sub"
offset = "old"
buffer = 2048
name = "passport-job/databus"
proto = "tcp"
addr = "172.16.33.158:6205"
idle = 1
active = 1
dialTimeout = "1s"
readTimeout = "60s"
writeTimeout = "1s"
idleTimeout = "10s"
[databus.contactBindLog]
key = "dbe67e6a4c36f877"
secret = "8c775ea242caa367ba5c876c04576571"
group = "Test1-MainCommonArch-S"
topic = "test1"
action = "sub"
offset = "old"
buffer = 2048
name = "databus"
proto = "tcp"
addr = "172.18.33.50:6205"
idle = 1
active = 1
dialTimeout = "1s"
readTimeout = "60s"
writeTimeout = "1s"
idleTimeout = "10s"
[databus.userLog]
key = "2511663d546f1413"
secret = "cde3b480836cc76df3d635470f991caa"
group = "LogUserAction-MainSearch-P"
topic = "LogUserAction-T"
action = "pub"
buffer = 10240
name = "log-user-action/log-sub"
proto = "tcp"
addr = "172.18.33.50:6205"
idle = 1
active = 1
dialTimeout = "1s"
readTimeout = "60s"
writeTimeout = "1s"
idleTimeout = "10s"
[hbase]
[hbase.loginLog]
master = ""
meta = ""
testRowKey = "passport-job-ping"
dialTimeout = "1s"
readTimeout = "10s"
readsTimeout = "10s"
writeTimeout = "10s"
writesTimeout = "10s"
[hbase.loginLog.zookeeper]
root = ""
addrs = ["172.18.33.131:2181","172.18.33.168:2181","172.18.33.169:2181"]
#addrs = ["127.0.0.1:12181"]
timeout = "30s"
[hbase.pwdLog]
master = ""
meta = ""
testRowKey = "passport-job-ping"
dialTimeout = "1s"
readTimeout = "10s"
readsTimeout = "10s"
writeTimeout = "10s"
writesTimeout = "10s"
[hbase.pwdLog.zookeeper]
root = ""
addrs = ["172.18.33.131:2181","172.18.33.168:2181","172.18.33.169:2181"]
timeout = "30s"
[encode]
salt = "12345678"
aesKey = "0123456789abcdef"

View File

@@ -0,0 +1,38 @@
load(
"@io_bazel_rules_go//go:def.bzl",
"go_library",
)
go_library(
name = "go_default_library",
srcs = ["conf.go"],
importpath = "go-common/app/job/main/passport/conf",
tags = ["automanaged"],
visibility = ["//visibility:public"],
deps = [
"//library/conf:go_default_library",
"//library/database/hbase.v2:go_default_library",
"//library/database/sql:go_default_library",
"//library/log:go_default_library",
"//library/net/http/blademaster:go_default_library",
"//library/net/rpc:go_default_library",
"//library/net/trace:go_default_library",
"//library/queue/databus:go_default_library",
"//library/time:go_default_library",
"//vendor/github.com/BurntSushi/toml:go_default_library",
],
)
filegroup(
name = "package-srcs",
srcs = glob(["**"]),
tags = ["automanaged"],
visibility = ["//visibility:private"],
)
filegroup(
name = "all-srcs",
srcs = [":package-srcs"],
tags = ["automanaged"],
visibility = ["//visibility:public"],
)

View File

@@ -0,0 +1,188 @@
package conf
import (
"errors"
"flag"
"go-common/library/conf"
"go-common/library/database/sql"
"go-common/library/log"
"go-common/library/net/http/blademaster"
httpx "go-common/library/net/http/blademaster"
"go-common/library/net/rpc"
"go-common/library/net/trace"
"go-common/library/queue/databus"
"go-common/library/time"
"go-common/library/database/hbase.v2"
"github.com/BurntSushi/toml"
)
// Conf global variable.
var (
Conf = &Config{}
client *conf.Client
confPath string
)
// Config struct of conf.
type Config struct {
// base
// Tracer tracer
Tracer *trace.Config
// Xlog log
Xlog *log.Config
// BM
BM *blademaster.ServerConfig
// URI uri
URI *URI
// Game game
Game *Game
// RPC rpc
RPC *RPC
// DB db
DB *DB
// HTTPClient httpx client
HTTPClient *httpx.ClientConfig
// Group group.
Group *Group
// DataBus databus
DataBus *DataBus
// HBase hbase
HBase *HBase
Encode *Encode
Sync *Sync
}
// Encode encode
type Encode struct {
AesKey string
Salt string
}
// HBase multi hbase.
type HBase struct {
LoginLog *HBaseConfig
PwdLog *HBaseConfig
}
// HBaseConfig .
type HBaseConfig struct {
*hbase.Config
ReadTimeout time.Duration
WriteTimeout time.Duration
}
// URI multi uri.
type URI struct {
SetToken string
DelCache string
}
// RPC multi rpc conf collection.
type RPC struct {
IdentifyGame *rpc.ClientConfig
}
// Game game notify conf.
type Game struct {
AppIDs []int32
DelCacheURI string
Client *httpx.ClientConfig
}
// Group multi group config collection.
type Group struct {
AsoBinLog *GroupConfig
User *GroupConfig
Log *GroupConfig
ContactBindLog *GroupConfig
PwdLog *GroupConfig
AuthBinLog *GroupConfig
}
// GroupConfig group config.
type GroupConfig struct {
// Size merge size
Size int
// Num merge goroutine num
Num int
// Ticker duration of submit merges when no new message
Ticker time.Duration
// Chan size of merge chan and done chan
Chan int
}
// DataBus multi databus collection.
type DataBus struct {
AsoBinLog *databus.Config
User *databus.Config
Log *databus.Config
ContactBindLog *databus.Config
UserLog *databus.Config
PwdLog *databus.Config
AuthBinLog *databus.Config
}
// DB db config.
type DB struct {
Log *sql.Config
ASO *sql.Config
}
// Sync config.
type Sync struct {
SyncPwdID int64
}
func local() (err error) {
_, err = toml.DecodeFile(confPath, &Conf)
return
}
func remote() (err error) {
if client, err = conf.New(); err != nil {
return
}
if err = load(); err != nil {
return
}
go func() {
for range client.Event() {
log.Info("config reload")
if load() != nil {
log.Error("config reload error (%v)", err)
}
}
}()
return
}
func load() (err error) {
var (
s string
ok bool
tmpConf *Config
)
if s, ok = client.Toml2(); !ok {
return errors.New("load config center error")
}
if _, err = toml.Decode(s, &tmpConf); err != nil {
return errors.New("could not decode config")
}
*Conf = *tmpConf
return
}
func init() {
flag.StringVar(&confPath, "conf", "", "default config path")
}
// Init int config
func Init() error {
if confPath != "" {
return local()
}
return remote()
}

View File

@@ -0,0 +1,69 @@
load(
"@io_bazel_rules_go//go:def.bzl",
"go_library",
"go_test",
)
go_library(
name = "go_default_library",
srcs = [
"acc.go",
"dao.go",
"hbase_login_log.go",
"hbase_pwd_log.go",
"mysql.go",
],
importpath = "go-common/app/job/main/passport/dao",
tags = ["automanaged"],
visibility = ["//visibility:public"],
deps = [
"//app/job/main/passport/conf:go_default_library",
"//app/job/main/passport/model:go_default_library",
"//library/database/hbase.v2:go_default_library",
"//library/database/sql:go_default_library",
"//library/ecode:go_default_library",
"//library/log:go_default_library",
"//library/net/http/blademaster:go_default_library",
],
)
go_test(
name = "go_default_test",
srcs = [
"acc_test.go",
"dao_test.go",
"hbase_login_log_test.go",
"hbase_pwd_log_test.go",
"mysql_test.go",
],
embed = [":go_default_library"],
rundir = ".",
tags = ["automanaged"],
deps = [
"//app/job/main/passport/conf:go_default_library",
"//app/job/main/passport/model:go_default_library",
"//library/database/hbase.v2:go_default_library",
"//library/database/sql:go_default_library",
"//library/ecode:go_default_library",
"//library/net/http/blademaster:go_default_library",
"//library/queue/databus:go_default_library",
"//library/time:go_default_library",
"//vendor/github.com/bouk/monkey:go_default_library",
"//vendor/github.com/smartystreets/goconvey/convey:go_default_library",
"//vendor/github.com/tsuna/gohbase/hrpc:go_default_library",
],
)
filegroup(
name = "package-srcs",
srcs = glob(["**"]),
tags = ["automanaged"],
visibility = ["//visibility:private"],
)
filegroup(
name = "all-srcs",
srcs = [":package-srcs"],
tags = ["automanaged"],
visibility = ["//visibility:public"],
)

View File

@@ -0,0 +1,76 @@
package dao
import (
"context"
"net/url"
"strconv"
"go-common/app/job/main/passport/model"
"go-common/library/ecode"
"go-common/library/log"
)
// SetToken set token via passport api.
func (d *Dao) SetToken(c context.Context, t *model.Token) (err error) {
params := url.Values{}
params.Set("mid", strconv.FormatInt(t.Mid, 10))
params.Set("appid", strconv.FormatInt(t.Appid, 10))
params.Set("appSubid", strconv.FormatInt(t.Subid, 10))
params.Set("accessToken", t.Token)
params.Set("refreshToken", t.RToken)
params.Set("tp", strconv.FormatInt(t.Type, 10))
params.Set("createAt", strconv.FormatInt(t.CTime, 10))
params.Set("expires", strconv.FormatInt(t.Expires, 10))
params.Set("from", "passport-job")
var res struct {
Code int `json:"code"`
}
if err = d.client.Post(context.TODO(), d.setTokenURI, "127.0.0.1", params, &res); err != nil {
log.Error("d.client.Get error(%v)", err)
return
}
if res.Code != 0 {
err = ecode.Int(res.Code)
log.Error("set token url(%s) err(%v)", d.setTokenURI+"?"+params.Encode(), err)
}
return
}
// DelCache del cache via passport api.
func (d *Dao) DelCache(c context.Context, accessKey string) (err error) {
params := url.Values{}
params.Set("access_key", accessKey)
var res struct {
Code int `json:"code"`
}
if err = d.client.Get(context.TODO(), d.delCacheURI, "", params, &res); err != nil {
log.Error("d.client.Get url(%s) error(%v)", d.delCacheURI+"?"+params.Encode(), err)
return
}
if res.Code != 0 {
err = ecode.Int(res.Code)
log.Error("del cache url(%s) error(%v)", d.delCacheURI+"?"+params.Encode(), err)
}
return
}
// NotifyGame notify game.
func (d *Dao) NotifyGame(token *model.AccessInfo, action string) (err error) {
params := url.Values{}
params.Set("modifiedAttr", action)
params.Set("mid", strconv.FormatInt(token.Mid, 10))
params.Set("access_token", token.Token)
params.Set("from", "passport-job")
var res struct {
Code int `json:"code"`
}
if err = d.gameClient.Get(context.TODO(), d.delGameCacheURI, "127.0.0.1", params, &res); err != nil {
log.Error("d.client.Get url(%s) error(%v)", d.delGameCacheURI+"?"+params.Encode(), err)
return
}
if res.Code != 0 {
err = ecode.Int(res.Code)
log.Error("url(%s) err(%v)", d.delGameCacheURI+"?"+params.Encode(), err)
}
return
}

View File

@@ -0,0 +1,65 @@
package dao
import (
"context"
"go-common/app/job/main/passport/model"
"go-common/library/ecode"
bm "go-common/library/net/http/blademaster"
"net/url"
"reflect"
"testing"
"github.com/bouk/monkey"
"github.com/smartystreets/goconvey/convey"
)
func TestDao_SetToken(t *testing.T) {
convey.Convey("SetToken", t, func(ctx convey.C) {
token := &model.Token{
Mid: 88888970,
Token: "foo",
}
ctx.Convey("When everything gose positive", func(ctx convey.C) {
err := d.SetToken(context.TODO(), token)
ctx.So(err, convey.ShouldBeNil)
})
})
}
func TestDao_DelCache(t *testing.T) {
convey.Convey("DelCache", t, func(ctx convey.C) {
token := "foo"
ctx.Convey("When everything gose positive", func(ctx convey.C) {
err := d.DelCache(context.TODO(), token)
ctx.So(err, convey.ShouldBeNil)
})
})
}
func TestDao_NotifyGame(t *testing.T) {
convey.Convey("NotifyGame", t, func(ctx convey.C) {
var (
mid = &model.AccessInfo{}
action = ""
)
ctx.Convey("When everything gose positive", func(ctx convey.C) {
mock := monkey.PatchInstanceMethod(reflect.TypeOf(d.gameClient), "Get", func(d *bm.Client, _ context.Context, _, _ string, _ url.Values, _ interface{}) error {
return nil
})
defer mock.Unpatch()
err := d.NotifyGame(mid, action)
ctx.Convey("Then err should be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
})
mock2 := monkey.PatchInstanceMethod(reflect.TypeOf(d.gameClient), "Get", func(d *bm.Client, _ context.Context, _, _ string, _ url.Values, _ interface{}) error {
return ecode.Int(500)
})
defer mock2.Unpatch()
err = d.NotifyGame(mid, action)
ctx.Convey("Then err should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldNotBeNil)
})
})
})
}

View File

@@ -0,0 +1,66 @@
package dao
import (
"context"
"go-common/app/job/main/passport/conf"
"go-common/library/database/hbase.v2"
"go-common/library/database/sql"
"go-common/library/log"
bm "go-common/library/net/http/blademaster"
)
// Dao struct info of Dao.
type Dao struct {
c *conf.Config
logDB *sql.DB
asoDB *sql.DB
client *bm.Client
gameClient *bm.Client
loginLogHBase *hbase.Client
pwdLogHBase *hbase.Client
setTokenURI string
delCacheURI string
delGameCacheURI string
}
// New new a Dao and return.
func New(c *conf.Config) (d *Dao) {
d = &Dao{
c: c,
logDB: sql.NewMySQL(c.DB.Log),
asoDB: sql.NewMySQL(c.DB.ASO),
client: bm.NewClient(c.HTTPClient),
gameClient: bm.NewClient(c.Game.Client),
loginLogHBase: hbase.NewClient(c.HBase.LoginLog.Config),
pwdLogHBase: hbase.NewClient(c.HBase.PwdLog.Config),
setTokenURI: c.URI.SetToken,
delCacheURI: c.URI.DelCache,
delGameCacheURI: c.Game.DelCacheURI,
}
return d
}
// Ping ping check dao health.
func (d *Dao) Ping(c context.Context) (err error) {
if err = d.logDB.Ping(c); err != nil {
log.Info("dao.logDB.Ping() error(%v)", err)
}
return
}
// Close close dao.
func (d *Dao) Close() (err error) {
if d.logDB != nil {
d.logDB.Close()
}
if d.loginLogHBase != nil {
d.loginLogHBase.Close()
}
if d.pwdLogHBase != nil {
d.pwdLogHBase.Close()
}
return
}

View File

@@ -0,0 +1,81 @@
package dao
import (
"context"
"flag"
"math/big"
"net"
"os"
"reflect"
"testing"
"go-common/app/job/main/passport/conf"
"go-common/library/database/sql"
"github.com/bouk/monkey"
"github.com/smartystreets/goconvey/convey"
"go-common/library/database/hbase.v2"
)
var (
d *Dao
)
func TestMain(m *testing.M) {
if os.Getenv("DEPLOY_ENV") != "" {
flag.Set("app_id", "main.passport.passport-user-job")
flag.Set("conf_token", "f5c791689788882beaef2903735949ea")
flag.Set("tree_id", "3074")
flag.Set("conf_version", "docker-1")
flag.Set("deploy_env", "uat")
flag.Set("conf_host", "config.bilibili.co")
flag.Set("conf_path", "/tmp")
flag.Set("region", "sh")
flag.Set("zone", "sh001")
} else {
flag.Set("conf", "../cmd/passport-job.toml")
}
flag.Parse()
if err := conf.Init(); err != nil {
panic(err)
}
d = New(conf.Conf)
os.Exit(m.Run())
}
// InetAtoN convert ip addr to int64.
func InetAtoN(ip string) int64 {
ret := big.NewInt(0)
ret.SetBytes(net.ParseIP(ip).To4())
return ret.Int64()
}
func TestDaoPing(t *testing.T) {
var c = context.Background()
convey.Convey("Ping", t, func(ctx convey.C) {
err := d.Ping(c)
ctx.Convey("Then err should be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
})
})
}
func TestDaoClose(t *testing.T) {
convey.Convey("Close", t, func(ctx convey.C) {
monkey.PatchInstanceMethod(reflect.TypeOf(d.logDB), "Close", func(_ *sql.DB) error {
return nil
})
monkey.PatchInstanceMethod(reflect.TypeOf(d.loginLogHBase), "Close", func(_ *hbase.Client) error {
return nil
})
monkey.PatchInstanceMethod(reflect.TypeOf(d.pwdLogHBase), "Close", func(_ *hbase.Client) error {
return nil
})
defer monkey.UnpatchAll()
var err error
d.Close()
ctx.Convey("Then err should be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
})
})
}

View File

@@ -0,0 +1,69 @@
package dao
import (
"bytes"
"context"
"encoding/binary"
"strconv"
"time"
"go-common/app/job/main/passport/model"
"go-common/library/log"
)
const (
_tLoginLog = "ugc:AsoLoginLog"
_fLoginLogInfo = "f"
_cLoginLogMid = "mid"
_cLoginLogTs = "ts"
_cLoginLogLoginIP = "ip"
_cLoginLogType = "t"
_cLoginLogServer = "s"
_int64Max = 0x7fffffffffffffff
)
// AddLoginLogHBase add login log.
func (d *Dao) AddLoginLogHBase(c context.Context, loginLog *model.LoginLog) (err error) {
fvs := make(map[string][]byte)
fvs[_cLoginLogMid] = []byte(strconv.FormatInt(loginLog.Mid, 10))
fvs[_cLoginLogTs] = []byte(strconv.FormatInt(loginLog.Timestamp, 10))
fvs[_cLoginLogLoginIP] = []byte(strconv.FormatInt(loginLog.LoginIP, 10))
fvs[_cLoginLogType] = []byte(strconv.FormatInt(loginLog.Type, 10))
fvs[_cLoginLogServer] = []byte(loginLog.Server)
values := map[string]map[string][]byte{_fLoginLogInfo: fvs}
key := rowKeyLoginLog(loginLog.Mid, loginLog.Timestamp)
ctx, cancel := context.WithTimeout(c, time.Duration(d.c.HBase.LoginLog.WriteTimeout))
defer cancel()
if _, err = d.loginLogHBase.PutStr(ctx, _tLoginLog, string(key), values); err != nil {
log.Error("dao.hbase.Put(%v) error(%v)", err)
}
return
}
// rowKeyLoginLog generate row key of login log.
func rowKeyLoginLog(mid, ts int64) (res []byte) {
buf := bytes.Buffer{}
b := make([]byte, 8)
// reverse mid bytes
binary.BigEndian.PutUint64(b, uint64(mid))
reverse(b)
buf.Write(b)
// (int64_max - ts) bytes
binary.BigEndian.PutUint64(b, uint64(_int64Max-ts))
buf.Write(b)
res = buf.Bytes()
return
}
func reverse(b []byte) {
l := len(b)
for i := 0; i < l/2; i++ {
t := b[i]
b[i] = b[l-1-i]
b[l-1-i] = t
}
}

View File

@@ -0,0 +1,83 @@
package dao
import (
"context"
"strconv"
"testing"
"time"
"go-common/app/job/main/passport/model"
"go-common/library/queue/databus"
xtime "go-common/library/time"
"github.com/smartystreets/goconvey/convey"
)
func TestDao_AddLoginLogHBase(t *testing.T) {
ts := time.Now().Unix()
mid := int64(88888970)
ipN := InetAtoN("127.0.0.1")
for i := 0; i < 6; i++ {
m := &model.LoginLog{
Mid: mid,
Timestamp: ts + int64(i),
LoginIP: int64(ipN),
Type: 1,
Server: strconv.FormatInt(int64(i), 10),
}
if err := d.AddLoginLogHBase(context.TODO(), m); err != nil {
t.Logf("dao.AddLoginLogHBase(%+v) error(%v)", m, err)
t.FailNow()
}
}
}
func TestDao_SendLoginLogMsgs(t *testing.T) {
convey.Convey("SetToken", t, func(ctx convey.C) {
dsPubConf := &databus.Config{
Key: "0QEO9F8JuuIxZzNDvklH",
Secret: "0QEO9F8JuuIxZzNDvklI",
Group: "PassportLog-Login-P",
Topic: "PassportLog-T",
Action: "pub",
Name: "databus",
Proto: "tcp",
Addr: "172.16.33.158:6205",
Active: 1,
Idle: 1,
DialTimeout: xtime.Duration(time.Second),
WriteTimeout: xtime.Duration(time.Second),
ReadTimeout: xtime.Duration(time.Second),
IdleTimeout: xtime.Duration(time.Minute),
}
dsPub := databus.New(dsPubConf)
defer dsPub.Close()
ts := time.Now().Unix()
mid := int64(88888970)
ipN := InetAtoN("127.0.0.1")
for i := 1; i <= 1; i++ {
v := &model.LoginLog{
Mid: mid,
Timestamp: ts + int64(i),
LoginIP: int64(ipN),
Type: 1,
Server: strconv.FormatInt(int64(i), 10),
}
k := dsPubConf.Topic + strconv.FormatInt(mid, 10)
if err := dsPub.Send(context.TODO(), k, v); err != nil {
t.Errorf("failed to send login log databus message, dsPub.Send(%v, %v) error(%v)", k, v, err)
t.FailNow()
}
}
})
}
func TestIntCast(t *testing.T) {
convey.Convey("when ts diff by delta, the uint32(_int64 - ts) diff should be the same", t, func(ctx convey.C) {
ts := time.Now().Unix()
delta := int64(10)
a := uint32(_int64Max - ts)
b := uint32(_int64Max - ts - delta)
ctx.So(a-b, convey.ShouldEqual, delta)
})
}

View File

@@ -0,0 +1,64 @@
package dao
import (
"bytes"
"context"
"encoding/binary"
"strconv"
"time"
"go-common/app/job/main/passport/model"
"go-common/library/log"
)
const (
_tPwdLog = "ugc:PwdLog"
_fPwdLog = "pwdlog"
_cPwdLogMid = "mid"
_cPwdLogOldPwd = "old_pwd"
_cPwdLogOldSalt = "old_salt"
_cPwdLogNewPwd = "new_pwd"
_cPwdLogNewSalt = "new_salt"
_cPwdLogIP = "ip"
_cPwdLogTs = "ts"
)
// AddPwdLogHBase add pwd log.
func (d *Dao) AddPwdLogHBase(c context.Context, pwdLog *model.PwdLog) (err error) {
fvs := make(map[string][]byte)
fvs[_cPwdLogMid] = []byte(strconv.FormatInt(pwdLog.Mid, 10))
fvs[_cPwdLogOldPwd] = []byte(pwdLog.OldPwd)
fvs[_cPwdLogOldSalt] = []byte(pwdLog.OldSalt)
fvs[_cPwdLogNewPwd] = []byte(pwdLog.NewPwd)
fvs[_cPwdLogNewSalt] = []byte(pwdLog.NewSalt)
fvs[_cPwdLogTs] = []byte(strconv.FormatInt(pwdLog.Timestamp, 10))
fvs[_cPwdLogIP] = []byte(strconv.FormatInt(pwdLog.IP, 10))
values := map[string]map[string][]byte{_fPwdLog: fvs}
key := rowKeyPwdLog(pwdLog.Mid, pwdLog.Timestamp)
ctx, cancel := context.WithTimeout(c, time.Duration(d.c.HBase.PwdLog.WriteTimeout))
defer cancel()
if _, err = d.pwdLogHBase.PutStr(ctx, _tPwdLog, string(key), values); err != nil {
log.Error("failed to put pwd log to hbase, dao.hbase.Put(%+v) error(%v)", pwdLog, err)
}
log.Info("Add pwdLog to HBase, (%+v)", pwdLog)
return
}
// rowKeyPwdLog generate row key of pwd log.
func rowKeyPwdLog(mid, ts int64) (res []byte) {
buf := bytes.Buffer{}
b := make([]byte, 8)
// reverse mid bytes
binary.BigEndian.PutUint64(b, uint64(mid))
reverse(b)
buf.Write(b)
// (int64_max - ts) bytes
binary.BigEndian.PutUint64(b, uint64(_int64Max-ts))
buf.Write(b)
res = buf.Bytes()
return
}

View File

@@ -0,0 +1,51 @@
package dao
import (
"context"
"fmt"
"reflect"
"testing"
"go-common/app/job/main/passport/model"
"go-common/library/database/hbase.v2"
"github.com/bouk/monkey"
"github.com/smartystreets/goconvey/convey"
"github.com/tsuna/gohbase/hrpc"
)
func TestDao_AddPwdLogHBase(t *testing.T) {
convey.Convey("AddPwdLogHBase", t, func(ctx convey.C) {
var (
c = context.Background()
a = &model.PwdLog{}
)
ctx.Convey("When everything gose positive", func(ctx convey.C) {
mock := monkey.PatchInstanceMethod(reflect.TypeOf(d.pwdLogHBase), "PutStr", func(_ *hbase.Client, _ context.Context, _, _ string, _ map[string]map[string][]byte, _ ...func(hrpc.Call) error) (res *hrpc.Result, err error) {
return nil, nil
})
defer mock.Unpatch()
err := d.AddPwdLogHBase(c, a)
ctx.Convey("Then err should be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
})
})
ctx.Convey("When error", func(ctx convey.C) {
mock := monkey.PatchInstanceMethod(reflect.TypeOf(d.pwdLogHBase), "PutStr", func(_ *hbase.Client, _ context.Context, _, _ string, _ map[string]map[string][]byte, _ ...func(hrpc.Call) error) (res *hrpc.Result, err error) {
return nil, fmt.Errorf("error")
})
defer mock.Unpatch()
err := d.AddPwdLogHBase(c, a)
ctx.Convey("Then err should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldNotBeNil)
})
})
})
}
func TestDao_rowKeyPwdLog(t *testing.T) {
convey.Convey("rowKeyPwdLog", t, func() {
res := rowKeyPwdLog(0, 0)
convey.So(res, convey.ShouldNotBeNil)
})
}

View File

@@ -0,0 +1,108 @@
package dao
import (
"context"
"fmt"
"strings"
"go-common/app/job/main/passport/model"
"go-common/library/database/sql"
"go-common/library/log"
)
const (
_insertLoginLog = "INSERT INTO aso_login_log%d(`mid`, `timestamp`, `loginip`, `type`, `server`) VALUES %s"
_queryTelBindLog = "SELECT id, mid, tel, timestamp FROM aso_telephone_bind_log where id = ?"
_queryEmailBindLog = "SELECT id, mid, email, timestamp FROM aso_email_bind_log where id = ?"
_batchGetPwdLog = "select id, timestamp, mid, ip, old_pwd, old_salt, new_pwd, new_salt from aso_pwd_log where id < ? order by id desc limit 1000"
_getPwdLog = "select id, timestamp, mid, ip, old_pwd, old_salt, new_pwd, new_salt from aso_pwd_log where id = ?"
)
// AddLoginLog insert service to db.
func (d *Dao) AddLoginLog(vs []*model.LoginLog) (err error) {
if len(vs) == 0 {
return
}
var args = make([]string, 0, len(vs))
for _, v := range vs {
args = append(args, fmt.Sprintf(`(%d,%d,%d,%d,'%s')`, v.Mid, v.Timestamp, v.LoginIP, v.Type, v.Server))
}
if len(args) == 0 {
return
}
s := fmt.Sprintf(_insertLoginLog, vs[0].Mid%10, strings.Join(args, ","))
if _, err = d.logDB.Exec(context.Background(), s); err != nil {
log.Error("d.logDB.Exec(%s) error(%v)", s, err)
}
return
}
// QueryTelBindLog query from id
func (d *Dao) QueryTelBindLog(id int64) (res *model.TelBindLog, err error) {
if id <= 0 {
return
}
res = new(model.TelBindLog)
row := d.asoDB.QueryRow(context.Background(), _queryTelBindLog, id)
if err = row.Scan(&res.ID, &res.Mid, &res.Tel, &res.Timestamp); err != nil {
if err == sql.ErrNoRows {
err = nil
return
}
log.Error("QueryTelBindLog err(%+v)", err)
return
}
return
}
// QueryEmailBindLog query from id
func (d *Dao) QueryEmailBindLog(id int64) (res *model.EmailBindLog, err error) {
if id <= 0 {
return
}
res = new(model.EmailBindLog)
row := d.asoDB.QueryRow(context.Background(), _queryEmailBindLog, id)
if err = row.Scan(&res.ID, &res.Mid, &res.Email, &res.Timestamp); err != nil {
if err == sql.ErrNoRows {
err = nil
return
}
log.Error("QueryEmailBindLog err(%+v)", err)
return
}
return
}
// BatchGetPwdLog batch get pwd log
func (d *Dao) BatchGetPwdLog(c context.Context, id int64) (res []*model.PwdLog, err error) {
var rows *sql.Rows
if rows, err = d.asoDB.Query(c, _batchGetPwdLog, id); err != nil {
log.Error("batch get pwd log, dao.db.Query(%s) error(%v)", _batchGetPwdLog, err)
return
}
defer rows.Close()
for rows.Next() {
pwd := new(model.PwdLog)
if err = rows.Scan(&pwd.ID, &pwd.Timestamp, &pwd.Mid, &pwd.IP, &pwd.OldPwd, &pwd.OldSalt, &pwd.NewPwd, &pwd.NewSalt); err != nil {
log.Error("row.Scan() error(%v)", err)
return
}
res = append(res, pwd)
}
return
}
// GetPwdLog get pwd log
func (d *Dao) GetPwdLog(c context.Context, id int64) (res *model.PwdLog, err error) {
res = new(model.PwdLog)
row := d.asoDB.QueryRow(c, _getPwdLog, id)
if err = row.Scan(&res.ID, &res.Timestamp, &res.Mid, &res.IP, &res.OldPwd, &res.OldSalt, &res.NewPwd, &res.NewSalt); err != nil {
if err == sql.ErrNoRows {
err = nil
return
}
log.Error("row.Scan() error(%v)", err)
return
}
return
}

View File

@@ -0,0 +1,69 @@
package dao
import (
"context"
"testing"
"time"
"go-common/app/job/main/passport/model"
"github.com/smartystreets/goconvey/convey"
)
func TestDao_AddLoginLog(t *testing.T) {
vs := make([]*model.LoginLog, 0)
v := &model.LoginLog{
Mid: 10,
LoginIP: InetAtoN("127.0.0.1"),
Timestamp: time.Now().Unix(),
Type: 1,
Server: "server",
}
for i := 0; i < 10; i++ {
vs = append(vs, v)
}
if err := d.AddLoginLog(vs); err != nil {
t.Errorf("dao.AddLoginLog(%v) error(%v)", vs, err)
t.FailNow()
}
}
func TestDao_QueryTelBindLog(t *testing.T) {
convey.Convey("", t, func() {
res, err := d.QueryTelBindLog(1)
convey.So(err, convey.ShouldBeNil)
convey.So(res, convey.ShouldNotBeNil)
convey.So(res.ID, convey.ShouldEqual, 1)
})
}
func TestDao_QueryEmailBindLog(t *testing.T) {
convey.Convey("", t, func() {
res, err := d.QueryEmailBindLog(1)
convey.So(err, convey.ShouldBeNil)
convey.So(res, convey.ShouldNotBeNil)
convey.So(res.ID, convey.ShouldEqual, 1)
})
}
func TestDao_BatchGetPwdLog(t *testing.T) {
convey.Convey("BatchGetPwdLog", t, func() {
var (
c = context.Background()
)
res, err := d.BatchGetPwdLog(c, 100000000)
convey.So(err, convey.ShouldBeNil)
convey.So(res, convey.ShouldNotBeNil)
})
}
func TestDao_GetPwdLog(t *testing.T) {
convey.Convey("GetPwdLog", t, func() {
var (
c = context.Background()
)
res, err := d.GetPwdLog(c, 1)
convey.So(err, convey.ShouldBeNil)
convey.So(res, convey.ShouldNotBeNil)
})
}

View File

@@ -0,0 +1,32 @@
load(
"@io_bazel_rules_go//go:def.bzl",
"go_library",
)
go_library(
name = "go_default_library",
srcs = ["http.go"],
importpath = "go-common/app/job/main/passport/http",
tags = ["automanaged"],
visibility = ["//visibility:public"],
deps = [
"//app/job/main/passport/conf:go_default_library",
"//app/job/main/passport/service:go_default_library",
"//library/log:go_default_library",
"//library/net/http/blademaster:go_default_library",
],
)
filegroup(
name = "package-srcs",
srcs = glob(["**"]),
tags = ["automanaged"],
visibility = ["//visibility:private"],
)
filegroup(
name = "all-srcs",
srcs = [":package-srcs"],
tags = ["automanaged"],
visibility = ["//visibility:public"],
)

View File

@@ -0,0 +1,41 @@
package http
import (
"net/http"
"go-common/app/job/main/passport/conf"
"go-common/app/job/main/passport/service"
"go-common/library/log"
bm "go-common/library/net/http/blademaster"
)
var (
srv *service.Service
)
// Init init http sever instance.
func Init(c *conf.Config, s *service.Service) {
srv = s
// init inner router
// engine
engIn := bm.DefaultServer(c.BM)
innerRouter(engIn)
// init inner server
if err := engIn.Start(); err != nil {
log.Error("bm.Start error(%v)", err)
panic(err)
}
}
// innerRouter init inner router.
func innerRouter(e *bm.Engine) {
e.Ping(ping)
}
// ping check server ok.
func ping(c *bm.Context) {
if err := srv.Ping(c); err != nil {
log.Error("ping error(%v)", err)
c.AbortWithStatus(http.StatusServiceUnavailable)
}
}

View File

@@ -0,0 +1,33 @@
load(
"@io_bazel_rules_go//go:def.bzl",
"go_library",
)
go_library(
name = "go_default_library",
srcs = [
"access.go",
"auth.go",
"job.go",
"log.go",
"message.go",
"tel_bind_log.go",
],
importpath = "go-common/app/job/main/passport/model",
tags = ["automanaged"],
visibility = ["//visibility:public"],
)
filegroup(
name = "package-srcs",
srcs = glob(["**"]),
tags = ["automanaged"],
visibility = ["//visibility:private"],
)
filegroup(
name = "all-srcs",
srcs = [":package-srcs"],
tags = ["automanaged"],
visibility = ["//visibility:public"],
)

View File

@@ -0,0 +1,9 @@
package model
// AccessInfo aso_app_perm table
type AccessInfo struct {
AppID int32 `json:"appid"`
Mid int64 `json:"mid"`
Token string `json:"access_token"`
Expires int64 `json:"expires"`
}

View File

@@ -0,0 +1,11 @@
package model
// AuthToken for auth
type AuthToken struct {
ID int64 `json:"id"`
Mid int64 `json:"mid"`
AppID int64 `json:"appid"`
Token string `json:"token"`
Expires int64 `json:"expires"`
Type int64 `json:"type"`
}

View File

@@ -0,0 +1,13 @@
package model
// Token user access token.
type Token struct {
Mid int64 `json:"mid"`
Appid int64 `json:"appid"`
Subid int64 `json:"appSubid"`
Token string `json:"accessToken"`
RToken string `json:"refreshToken"`
CTime int64 `json:"createAt"`
Expires int64 `json:"expires"`
Type int64 `json:"type"`
}

View File

@@ -0,0 +1,22 @@
package model
// LoginLog login log.
type LoginLog struct {
Mid int64 `json:"mid"`
Timestamp int64 `json:"timestamp"`
LoginIP int64 `json:"loginip"`
Type int64 `json:"type"`
Server string `json:"server"`
}
// PwdLog pwd log.
type PwdLog struct {
ID int64 `json:"id"`
Mid int64 `json:"mid"`
Timestamp int64 `json:"timestamp"`
IP int64 `json:"ip"`
OldPwd string `json:"old_pwd"`
OldSalt string `json:"old_salt"`
NewPwd string `json:"new_pwd"`
NewSalt string `json:"new_salt"`
}

View File

@@ -0,0 +1,21 @@
package model
import (
"encoding/json"
)
// BMsg databus binlog message
type BMsg struct {
Action string `json:"action"`
Table string `json:"table"`
New json.RawMessage `json:"new"`
Old json.RawMessage `json:"old"`
}
// PMsg databus passport message
type PMsg struct {
Action string `json:"action"`
Table string `json:"type"`
Data *Token `json:"data"`
CTime int64 `json:"ctime"`
}

View File

@@ -0,0 +1,17 @@
package model
// TelBindLog bind log
type TelBindLog struct {
ID int64 `json:"id"`
Mid int64 `json:"mid"`
Tel string `json:"tel"`
Timestamp int64 `json:"timestamp"`
}
// EmailBindLog bind log
type EmailBindLog struct {
ID int64 `json:"id"`
Mid int64 `json:"mid"`
Email string `json:"email"`
Timestamp int64 `json:"timestamp"`
}

View File

@@ -0,0 +1,67 @@
load(
"@io_bazel_rules_go//go:def.bzl",
"go_library",
"go_test",
)
go_library(
name = "go_default_library",
srcs = [
"aes.go",
"auth.go",
"clean_token.go",
"contact_bind_log.go",
"login_log.go",
"pwd_log.go",
"service.go",
"set_token.go",
"sync_pwd_log.go",
],
importpath = "go-common/app/job/main/passport/service",
tags = ["automanaged"],
visibility = ["//visibility:public"],
deps = [
"//app/job/main/passport/conf:go_default_library",
"//app/job/main/passport/dao:go_default_library",
"//app/job/main/passport/model:go_default_library",
"//app/service/main/identify-game/model:go_default_library",
"//app/service/main/identify-game/rpc/client:go_default_library",
"//library/log:go_default_library",
"//library/queue/databus:go_default_library",
],
)
go_test(
name = "go_default_test",
srcs = [
"aes_test.go",
"contact_bind_log_test.go",
"service_test.go",
],
embed = [":go_default_library"],
rundir = ".",
tags = ["automanaged"],
deps = [
"//app/job/main/passport/conf:go_default_library",
"//app/job/main/passport/model:go_default_library",
"//app/service/main/identify-game/model:go_default_library",
"//library/log:go_default_library",
"//library/queue/databus:go_default_library",
"//library/time:go_default_library",
"//vendor/github.com/smartystreets/goconvey/convey:go_default_library",
],
)
filegroup(
name = "package-srcs",
srcs = glob(["**"]),
tags = ["automanaged"],
visibility = ["//visibility:private"],
)
filegroup(
name = "all-srcs",
srcs = [":package-srcs"],
tags = ["automanaged"],
visibility = ["//visibility:public"],
)

View File

@@ -0,0 +1,66 @@
package service
import (
"bytes"
"crypto/aes"
"crypto/cipher"
"crypto/rand"
"encoding/base64"
"errors"
"io"
)
func pad(src []byte) []byte {
padding := aes.BlockSize - len(src)%aes.BlockSize
padText := bytes.Repeat([]byte{byte(padding)}, padding)
return append(src, padText...)
}
func unpad(src []byte) ([]byte, error) {
length := len(src)
unpadding := int(src[length-1])
if unpadding > length {
return nil, errors.New("unpad error. This could happen when incorrect encryption key is used")
}
return src[:(length - unpadding)], nil
}
func (s *Service) encrypt(text string) (string, error) {
msg := pad([]byte(text))
cipherText := make([]byte, aes.BlockSize+len(msg))
iv := cipherText[:aes.BlockSize]
if _, err := io.ReadFull(rand.Reader, iv); err != nil {
return "", err
}
cfb := cipher.NewCFBEncrypter(s.AESBlock, iv)
cfb.XORKeyStream(cipherText[aes.BlockSize:], []byte(msg))
finalMsg := base64.URLEncoding.EncodeToString(cipherText)
return finalMsg, nil
}
func (s *Service) decrypt(text string) (string, error) {
decodedMsg, err := base64.URLEncoding.DecodeString(text)
if err != nil {
return "", err
}
if (len(decodedMsg) % aes.BlockSize) != 0 {
return "", errors.New("blocksize must be multipe of decoded message length")
}
iv := decodedMsg[:aes.BlockSize]
msg := decodedMsg[aes.BlockSize:]
cfb := cipher.NewCFBDecrypter(s.AESBlock, iv)
cfb.XORKeyStream(msg, msg)
unpadMsg, err := unpad(msg)
if err != nil {
return "", err
}
return string(unpadMsg), nil
}

View File

@@ -0,0 +1,19 @@
package service
import (
"testing"
"github.com/smartystreets/goconvey/convey"
)
func TestService_encrypt(t *testing.T) {
once.Do(startService)
convey.Convey("", t, func() {
text := "123456"
et, err := s.encrypt(text)
convey.So(err, convey.ShouldBeNil)
dt, err := s.decrypt(et)
convey.So(err, convey.ShouldBeNil)
convey.So(dt, convey.ShouldEqual, text)
})
}

View File

@@ -0,0 +1,137 @@
package service
import (
"encoding/base64"
"encoding/hex"
"encoding/json"
"strconv"
"strings"
"time"
"go-common/app/job/main/passport/model"
"go-common/library/log"
"go-common/library/queue/databus"
)
func (s *Service) authBinLogconsumeproc() {
mergeNum := s.c.Group.AuthBinLog.Num
var (
err error
n int
msgs = s.authBinLog.Messages()
)
for {
msg, ok := <-msgs
if !ok {
log.Error("s.authBinLogconsumeproc closed")
return
}
// marked head to first commit
m := &message{data: msg}
if n, err = strconv.Atoi(msg.Key); err != nil {
log.Error("strconv.Atoi(%s) error(%v)", msg.Key, err)
continue
}
s.authBinLogMu.Lock()
if s.authBinLogHead == nil {
s.authBinLogHead = m
s.authBinLogLast = m
} else {
s.authBinLogLast.next = m
s.authBinLogLast = m
}
s.authBinLogMu.Unlock()
// use specify goroutine to merge messages
s.authBinLogMergeChans[n%mergeNum] <- m
log.Info("authBinLogconsumeproc key:%s partition:%d offset:%d", msg.Key, msg.Partition, msg.Offset)
}
}
func (s *Service) authBinLogcommitproc() {
commits := make(map[int32]*databus.Message, s.c.Group.AuthBinLog.Size)
for {
done := <-s.authBinLogDoneChan
// merge partitions to commit offset
for _, d := range done {
d.done = true
}
s.mu.Lock()
for ; s.authBinLogHead != nil && s.authBinLogHead.done; s.authBinLogHead = s.authBinLogHead.next {
commits[s.authBinLogHead.data.Partition] = s.authBinLogHead.data
}
s.mu.Unlock()
for k, m := range commits {
log.Info("authBinLogcommitproc committed, key:%s partition:%d offset:%d", m.Key, m.Partition, m.Offset)
m.Commit()
delete(commits, k)
}
}
}
func (s *Service) authBinLogmergeproc(c chan *message) {
var (
err error
max = s.c.Group.AuthBinLog.Size
merges = make([]*model.AuthToken, 0, max)
marked = make([]*message, 0, max)
ticker = time.NewTicker(time.Duration(s.c.Group.AuthBinLog.Ticker))
)
for {
select {
case msg, ok := <-c:
if !ok {
log.Error("s.authBinLogmergeproc closed")
return
}
bmsg := &model.BMsg{}
if err = json.Unmarshal(msg.data.Value, bmsg); err != nil {
log.Error("json.Unmarshal(%s) error(%v)", string(msg.data.Value), err)
continue
}
if bmsg.Action == "delete" && strings.HasPrefix(bmsg.Table, "user_token_") {
t := &model.AuthToken{}
if err = json.Unmarshal(bmsg.New, t); err != nil {
log.Error("json.Unmarshal(%s) error(%v)", string(bmsg.New), err)
continue
}
merges = append(merges, t)
}
marked = append(marked, msg)
if len(marked) < max && len(merges) < max {
continue
}
case <-ticker.C:
}
if len(merges) > 0 {
s.cleanAuthTokens(merges)
merges = make([]*model.AuthToken, 0, max)
}
if len(marked) > 0 {
s.authBinLogDoneChan <- marked
marked = make([]*message, 0, max)
}
}
}
// cleanTokens by auth .
func (s *Service) cleanAuthTokens(authTokens []*model.AuthToken) {
for _, authToken := range authTokens {
var (
bytes []byte
err error
)
if bytes, err = base64.StdEncoding.DecodeString(authToken.Token); err != nil {
log.Error("cleanAuthTokens base64 decode err %v", err)
continue
}
token := hex.EncodeToString(bytes)
log.Info("auth binlog clear cleanAuthTokens,msg is (%+v)", authToken)
t := &model.AccessInfo{
Mid: authToken.Mid,
AppID: int32(authToken.AppID),
Token: token,
Expires: authToken.Expires,
}
s.cleanToken(t)
}
}

View File

@@ -0,0 +1,170 @@
package service
import (
"context"
"encoding/json"
"strconv"
"strings"
"time"
"go-common/app/job/main/passport/model"
igmdl "go-common/app/service/main/identify-game/model"
"go-common/library/log"
"go-common/library/queue/databus"
)
const (
_changePwd = "changePwd"
_retryCount = 3
_retryDuration = time.Second
)
func (s *Service) tokenconsumeproc() {
mergeNum := s.c.Group.AsoBinLog.Num
var (
err error
n int
msgs = s.dsToken.Messages()
)
for {
msg, ok := <-msgs
if !ok {
log.Error("s.tokenconsumeproc closed")
return
}
// marked head to first commit
m := &message{data: msg}
if n, err = strconv.Atoi(msg.Key); err != nil {
log.Error("strconv.Atoi(%s) error(%v)", msg.Key, err)
continue
}
s.mu.Lock()
if s.head == nil {
s.head = m
s.last = m
} else {
s.last.next = m
s.last = m
}
s.mu.Unlock()
// use specify goroutine to merge messages
s.tokenMergeChans[n%mergeNum] <- m
log.Info("tokenconsumeproc key:%s partition:%d offset:%d", msg.Key, msg.Partition, msg.Offset)
}
}
func (s *Service) tokencommitproc() {
commits := make(map[int32]*databus.Message, s.c.Group.AsoBinLog.Size)
for {
done := <-s.tokenDoneChan
// merge partitions to commit offset
for _, d := range done {
d.done = true
}
s.mu.Lock()
for ; s.head != nil && s.head.done; s.head = s.head.next {
commits[s.head.data.Partition] = s.head.data
}
s.mu.Unlock()
for k, m := range commits {
log.Info("tokencommitproc committed, key:%s partition:%d offset:%d", m.Key, m.Partition, m.Offset)
m.Commit()
delete(commits, k)
}
}
}
func (s *Service) tokenmergeproc(c chan *message) {
var (
err error
max = s.c.Group.AsoBinLog.Size
merges = make([]*model.AccessInfo, 0, max)
marked = make([]*message, 0, max)
ticker = time.NewTicker(time.Duration(s.c.Group.AsoBinLog.Ticker))
)
for {
select {
case msg, ok := <-c:
if !ok {
log.Error("s.tokenmergeproc closed")
return
}
bmsg := &model.BMsg{}
if err = json.Unmarshal(msg.data.Value, bmsg); err != nil {
log.Error("json.Unmarshal(%s) error(%v)", string(msg.data.Value), err)
continue
}
if bmsg.Action == "delete" && strings.HasPrefix(bmsg.Table, "aso_app_perm") {
t := &model.AccessInfo{}
if err = json.Unmarshal(bmsg.New, t); err != nil {
log.Error("json.Unmarshal(%s) error(%v)", string(bmsg.New), err)
continue
}
merges = append(merges, t)
}
marked = append(marked, msg)
if len(marked) < max && len(merges) < max {
continue
}
case <-ticker.C:
}
if len(merges) > 0 {
s.cleanTokens(merges)
merges = make([]*model.AccessInfo, 0, max)
}
if len(marked) > 0 {
s.tokenDoneChan <- marked
marked = make([]*message, 0, max)
}
}
}
// cleanTokens clean tokens.
func (s *Service) cleanTokens(tokens []*model.AccessInfo) {
for _, token := range tokens {
s.cleanToken(token)
}
}
// cleanToken to notify other clean access token.
func (s *Service) cleanToken(token *model.AccessInfo) (err error) {
if token == nil || token.Expires < time.Now().Unix() {
return
}
isGame := false
for _, id := range s.gameAppIDs {
if id == token.AppID {
isGame = true
break
}
}
if !isGame {
return
}
for {
if err = s.d.DelCache(context.TODO(), token.Token); err == nil {
break
}
time.Sleep(_retryDuration)
}
for i := 0; i < _retryCount; i++ {
arg := &igmdl.CleanCacheArgs{
Token: token.Token,
Mid: token.Mid,
}
if err = s.igRPC.DelCache(context.TODO(), arg); err == nil {
break
}
log.Error("service.identifyGameRPC.DelCache(%+v) error(%v)", arg, err)
time.Sleep(_retryDuration)
}
for i := 0; i < _retryCount; i++ {
if err = s.d.NotifyGame(token, _changePwd); err == nil {
return
}
time.Sleep(_retryDuration)
}
log.Error("notify err, token(%+v)", token)
return
}

View File

@@ -0,0 +1,283 @@
package service
import (
"context"
"crypto/sha1"
"encoding/base64"
"encoding/json"
"strings"
"time"
"go-common/app/job/main/passport/model"
"go-common/library/log"
"go-common/library/queue/databus"
)
func (s *Service) contactBindLogconsumeproc() {
mergeRoutineNum := int64(s.c.Group.ContactBindLog.Num)
for {
msg, ok := <-s.dsContactBindLog.Messages()
if !ok {
log.Error("s.telBindlogconsumeproc closed")
return
}
m := &message{data: msg}
p := &model.BMsg{}
if err := json.Unmarshal(msg.Value, p); err != nil {
log.Error("json.Unmarshal(%s) error(%v)", string(msg.Value), err)
continue
}
//m.object = p
mid := int64(0)
switch {
case strings.HasPrefix(p.Table, _telBindTable):
t := new(model.TelBindLog)
if err := json.Unmarshal(p.New, t); err != nil {
log.Error("json.Unmarshal(%s) error(%v)", string(p.New), err)
continue
}
mid = t.Mid
m.object = p
log.Info("contactBindLogconsumeproc table:%s key:%s partition:%d offset:%d", p.Table, msg.Key, msg.Partition, msg.Offset)
case strings.HasPrefix(p.Table, _emailBindTable):
t := new(model.EmailBindLog)
if err := json.Unmarshal(p.New, t); err != nil {
log.Error("json.Unmarshal(%s) error(%v)", string(p.New), err)
continue
}
mid = t.Mid
m.object = p
log.Info("contactBindLogconsumeproc table:%s key:%s partition:%d offset:%d", p.Table, msg.Key, msg.Partition, msg.Offset)
default:
log.Warn("unrecognized message: %+v", p)
continue
}
if mid == 0 {
log.Warn("invalid message: %+v", p)
continue
}
s.contactBindLogMu.Lock()
if s.contactBindLogHead == nil {
s.contactBindLogHead = m
s.contactBindLogLast = m
} else {
s.contactBindLogLast.next = m
s.contactBindLogLast = m
}
s.contactBindLogMu.Unlock()
// use specify goroutine to merge messages
s.contactBindLogMergeChans[mid%mergeRoutineNum] <- m
log.Info("contactBindLogconsumeproc key:%s partition:%d offset:%d", msg.Key, msg.Partition, msg.Offset)
}
}
func (s *Service) contactBindLogcommitproc() {
commits := make(map[int32]*databus.Message, s.c.Group.Log.Size)
for {
done := <-s.contactBindLogDoneChan
// merge partitions to commit offset
for _, d := range done {
d.done = true
}
s.contactBindLogMu.Lock()
for ; s.contactBindLogHead != nil && s.contactBindLogHead.done; s.contactBindLogHead = s.contactBindLogHead.next {
commits[s.contactBindLogHead.data.Partition] = s.contactBindLogHead.data
}
s.contactBindLogMu.Unlock()
for k, m := range commits {
log.Info("logcommitproc committed, key:%s partition:%d offset:%d", m.Key, m.Partition, m.Offset)
m.Commit()
delete(commits, k)
}
}
}
func (s *Service) contactBindLogMergeproc(c chan *message) {
var (
max = s.c.Group.ContactBindLog.Size
merges = make([]*model.BMsg, 0, max)
marked = make([]*message, 0, max)
ticker = time.NewTicker(time.Duration(s.c.Group.Log.Ticker))
)
for {
select {
case msg, ok := <-c:
if !ok {
log.Error("s.contactBindLogMergeproc closed")
return
}
p, assertOk := msg.object.(*model.BMsg)
if !assertOk {
log.Warn("s.contactBindLogMergeproc cannot convert BMsg")
continue
}
//if p.Action != "insert" {
// continue
//}
if p.Action == "delete" {
continue
}
log.Info("s.contactBindLogMergeproc: %+v", msg)
switch {
case strings.HasPrefix(p.Table, _telBindTable) || strings.HasPrefix(p.Table, _emailBindTable):
merges = append(merges, p)
default:
log.Warn("unrecognized the message: %+v", p)
}
marked = append(marked, msg)
if len(marked) < max && len(merges) < max {
continue
}
case <-ticker.C:
}
if len(merges) > 0 {
s.contactBindLogProcessMerges(merges)
merges = make([]*model.BMsg, 0, max)
}
if len(marked) > 0 {
s.contactBindLogDoneChan <- marked
marked = make([]*message, 0, max)
}
}
}
func (s *Service) contactBindLogProcessMerges(bmsgs []*model.BMsg) {
for _, msg := range bmsgs {
log.Info("contactBindLogProcessMerges: %+v", msg.Table)
switch {
case strings.HasPrefix(msg.Table, _telBindTable):
t := new(model.TelBindLog)
if err := json.Unmarshal(msg.New, t); err != nil {
log.Error("json.Unmarshal(%s) error(%v)", string(msg.New), err)
continue
}
s.handleTelBindLog(t)
case strings.HasPrefix(msg.Table, _emailBindTable):
t := new(model.EmailBindLog)
if err := json.Unmarshal(msg.New, t); err != nil {
log.Error("json.Unmarshal(%s) error(%v)", string(msg.New), err)
continue
}
s.handleEmailBindLog(t)
}
}
}
type userLogExtra struct {
EncryptTel string `json:"tel"`
EncryptEmail string `json:"email"`
}
type userLog struct {
Action string `json:"action"`
Mid int64 `json:"mid"`
Str0 string `json:"str_0"`
ExtraData string `json:"extra_data"`
Business int `json:"business"`
CTime string `json:"ctime"`
}
func (s *Service) handleTelBindLog(telLog *model.TelBindLog) (err error) {
var bindLog *model.TelBindLog
for {
bindLog, err = s.d.QueryTelBindLog(telLog.ID)
if err != nil {
log.Error("QueryTelBindLog (%v) err(%v)", telLog, err)
time.Sleep(100 * time.Millisecond)
continue
}
break
}
if bindLog == nil || bindLog.ID == 0 {
log.Warn("telephone log (%v) nil", bindLog)
return
}
rt, err := s.encrypt(bindLog.Tel)
if err != nil {
log.Error("aesEncrypt(%v) error(%v)", bindLog, err)
return
}
extraData := userLogExtra{
EncryptTel: rt,
}
hash := sha1.New()
hash.Write([]byte(bindLog.Tel))
extraDataBytes, err := json.Marshal(extraData)
if err != nil {
log.Error("extraData (%v) json marshal err(%v)", extraData, err)
return
}
uLog := userLog{
Action: "telBindLog",
Mid: bindLog.Mid,
Str0: base64.StdEncoding.EncodeToString(hash.Sum(s.hashSalt)),
ExtraData: string(extraDataBytes),
Business: 54,
CTime: time.Unix(bindLog.Timestamp, 0).Format("2006-01-02 15:04:05"),
}
for {
if err = s.userLogPub.Send(context.Background(), bindLog.Tel, uLog); err != nil {
log.Error("databus send(%v) error(%v)", uLog, err)
time.Sleep(100 * time.Millisecond)
continue
}
log.Info("uselog pub uLog: %+v", uLog)
break
}
return
}
func (s *Service) handleEmailBindLog(emailLog *model.EmailBindLog) (err error) {
var bindLog *model.EmailBindLog
for {
bindLog, err = s.d.QueryEmailBindLog(emailLog.ID)
if err != nil {
log.Error("QueryEmailBindLog (%v) err(%v)", emailLog, err)
time.Sleep(100 * time.Millisecond)
continue
}
break
}
if bindLog == nil || bindLog.ID == 0 {
log.Warn("email log (%v) nil", bindLog)
return
}
rt, err := s.encrypt(bindLog.Email)
if err != nil {
log.Error("aesEncrypt(%v) error(%v)", bindLog, err)
return
}
extraData := userLogExtra{
EncryptEmail: rt,
}
hash := sha1.New()
hash.Write([]byte(bindLog.Email))
extraDataBytes, err := json.Marshal(extraData)
if err != nil {
log.Error("extraData (%v) json marshal err(%v)", extraData, err)
return
}
uLog := userLog{
Action: "emailBindLog",
Mid: bindLog.Mid,
Str0: base64.StdEncoding.EncodeToString(hash.Sum(s.hashSalt)),
ExtraData: string(extraDataBytes),
Business: 54,
CTime: time.Unix(bindLog.Timestamp, 0).Format("2006-01-02 15:04:05"),
}
for {
if err = s.userLogPub.Send(context.Background(), bindLog.Email, uLog); err != nil {
log.Error("databus send(%v) error(%v)", uLog, err)
time.Sleep(100 * time.Millisecond)
continue
}
log.Info("uselog pub uLog: %+v", uLog)
break
}
return
}

View File

@@ -0,0 +1,100 @@
package service
import (
"context"
"testing"
"time"
"encoding/json"
"go-common/app/job/main/passport/model"
"go-common/library/queue/databus"
xtime "go-common/library/time"
)
var (
pCfg = &databus.Config{
Key: "dbe67e6a4c36f877",
Secret: "8c775ea242caa367ba5c876c04576571",
Group: "Test1-MainCommonArch-P",
Topic: "test1",
Action: "pub",
Name: "databus",
Proto: "tcp",
Addr: "172.18.33.50:6205",
Active: 10,
Idle: 5,
DialTimeout: xtime.Duration(time.Second),
WriteTimeout: xtime.Duration(time.Second),
ReadTimeout: xtime.Duration(time.Second),
IdleTimeout: xtime.Duration(time.Minute),
}
)
func testPub(t *testing.T, d *databus.Databus) {
tel := model.TelBindLog{ID: 2, Mid: 88883, Tel: "18817352650", Timestamp: 1500022511}
da, _ := json.Marshal(&tel)
c := &model.BMsg{Action: "insert", Table: "aso_telephone_bind_log", New: da}
if err := d.Send(context.Background(), "test", c); err != nil {
t.Errorf("d.Send(test) error(%v)", err)
}
}
func TestDatabus(t *testing.T) {
d := databus.New(pCfg)
testPub(t, d)
testPub(t, d)
testPub(t, d)
d.Close()
}
//var aesBlock, _ = aes.NewCipher([]byte("1234567890abcdef"))
//func TestEncode(t *testing.T) {
// for a := 0; a < 1000; a++ {
// go enconde()
// }
// time.Sleep(10000 * time.Second)
//
//}
//
//func enconde() {
// for i := 0; i < 100; i++ {
// key := []byte("1234567890abcdef")
// origData := []byte(strconv.Itoa(rand.Intn(100)))
// blockSize := aesBlock.BlockSize()
// origData = PKCS7Padding(origData, blockSize)
// blockMode := cipher.NewCBCEncrypter(aesBlock, key[:blockSize])
// crypted := make([]byte, len(origData))
// blockMode.CryptBlocks(crypted, origData)
// fmt.Println(base64.StdEncoding.EncodeToString(crypted))
// }
//}
//func TestDeode(t *testing.T) {
// key := []byte("1234567890abcdef")
// b,_:=base64.StdEncoding.DecodeString("29YQhqBb/J2XiBAj6bP3Zg==");
// s,_:=AesDecrypt(b,key)
// fmt.Print(string(s))
//}
//
//
//func AesDecrypt(crypted, key []byte) ([]byte, error) {
// block, err := aes.NewCipher(key)
// if err != nil {
// return nil, err
// }
// blockSize := block.BlockSize()
// blockMode := cipher.NewCBCDecrypter(block, key[:blockSize])
// origData := make([]byte, len(crypted))
// blockMode.CryptBlocks(origData, crypted)
// origData = PKCS7UnPadding(origData)
// return origData, nil
//}
//
//
//func PKCS7UnPadding(origData []byte) []byte {
// length := len(origData)
// unpadding := int(origData[length-1])
// return origData[:(length - unpadding)]
//}

View File

@@ -0,0 +1,121 @@
package service
import (
"context"
"encoding/json"
"time"
"go-common/app/job/main/passport/model"
"go-common/library/log"
"go-common/library/queue/databus"
)
const (
_addHBaseRetryCount = 3
_addHBaseRetryDuration = time.Second
)
func (s *Service) logconsumeproc() {
mergeRoutineNum := int64(s.c.Group.Log.Num)
for {
msg, ok := <-s.dsLog.Messages()
if !ok {
log.Error("s.logconsumeproc closed")
return
}
// marked head to first commit
m := &message{data: msg}
p := &model.LoginLog{}
if err := json.Unmarshal(msg.Value, p); err != nil {
log.Error("json.Unmarshal(%s) error(%v)", string(msg.Value), err)
continue
}
s.logMu.Lock()
if s.logHead == nil {
s.logHead = m
s.logLast = m
} else {
s.logLast.next = m
s.logLast = m
}
s.logMu.Unlock()
m.object = p
// use specify goroutine to merge messages
s.logMergeChans[p.Mid%mergeRoutineNum] <- m
log.Info("logconsumeproc key:%s partition:%d offset:%d", msg.Key, msg.Partition, msg.Offset)
}
}
func (s *Service) logcommitproc() {
commits := make(map[int32]*databus.Message, s.c.Group.Log.Size)
for {
done := <-s.logDoneChan
// merge partitions to commit offset
for _, d := range done {
d.done = true
}
s.logMu.Lock()
for ; s.logHead != nil && s.logHead.done; s.logHead = s.logHead.next {
commits[s.logHead.data.Partition] = s.logHead.data
}
s.logMu.Unlock()
for k, m := range commits {
log.Info("logcommitproc committed, key:%s partition:%d offset:%d", m.Key, m.Partition, m.Offset)
m.Commit()
delete(commits, k)
}
}
}
func (s *Service) logmergeproc(c chan *message) {
var (
max = s.c.Group.Log.Size
merges = make([]*model.LoginLog, 0, max)
marked = make([]*message, 0, max)
ticker = time.NewTicker(time.Duration(s.c.Group.Log.Ticker))
)
for {
select {
case msg, ok := <-c:
if !ok {
log.Error("s.logmergeproc closed")
return
}
p, assertOk := msg.object.(*model.LoginLog)
if assertOk {
merges = append(merges, p)
}
marked = append(marked, msg)
if len(marked) < max && len(merges) < max {
continue
}
case <-ticker.C:
}
if len(merges) > 0 {
s.processMerges(merges)
merges = make([]*model.LoginLog, 0, max)
}
if len(marked) > 0 {
s.logDoneChan <- marked
marked = make([]*message, 0, max)
}
}
}
func (s *Service) processMerges(merges []*model.LoginLog) {
s.d.AddLoginLog(merges)
for _, v := range merges {
s.addLoginLog(context.TODO(), v)
}
}
func (s *Service) addLoginLog(c context.Context, v *model.LoginLog) (err error) {
for i := 0; i < _addHBaseRetryCount; i++ {
if err = s.d.AddLoginLogHBase(c, v); err == nil {
return
}
log.Error("failed to add login log to hbase, service.dao.AddLoginLogHBase(%+v) error(%v)", v, err)
time.Sleep(_addHBaseRetryDuration)
}
return
}

View File

@@ -0,0 +1,154 @@
package service
import (
"context"
"encoding/json"
"strings"
"time"
"go-common/app/job/main/passport/model"
"go-common/library/log"
"go-common/library/queue/databus"
)
type pwdLogBMsg struct {
Action string
Table string
New *model.PwdLog
}
func (s *Service) pwdlogconsumeproc() {
mergeRoutineNum := int64(s.c.Group.PwdLog.Num)
for {
msg, ok := <-s.dsPwdLog.Messages()
if !ok {
log.Error("s.pwdlogconsumeproc closed")
return
}
// marked head to first commit
m := &message{data: msg}
p := &pwdLogBMsg{}
if err := json.Unmarshal(msg.Value, p); err != nil {
log.Error("json.Unmarshal(%s) error(%v)", string(msg.Value), err)
continue
}
// 只处理 aso_pwd_log insert binlog
if p.Table != "aso_pwd_log" {
continue
}
if p.Action != "insert" {
continue
}
m.object = p
s.pwdLogMu.Lock()
if s.pwdLogHead == nil {
s.pwdLogHead = m
s.pwdLogLast = m
} else {
s.pwdLogLast.next = m
s.pwdLogLast = m
}
s.pwdLogMu.Unlock()
// use specify goroutine to merge messages
s.pwdLogMergeChans[p.New.Mid%mergeRoutineNum] <- m
log.Info("pwdlogconsumeproc key:%s partition:%d offset:%d", msg.Key, msg.Partition, msg.Offset)
}
}
func (s *Service) pwdlogcommitproc() {
commits := make(map[int32]*databus.Message, s.c.Group.PwdLog.Size)
for {
done := <-s.pwdLogDoneChan
// merge partitions to commit offset
for _, d := range done {
d.done = true
}
s.pwdLogMu.Lock()
for ; s.pwdLogHead != nil && s.pwdLogHead.done; s.pwdLogHead = s.pwdLogHead.next {
commits[s.pwdLogHead.data.Partition] = s.pwdLogHead.data
}
s.pwdLogMu.Unlock()
for k, m := range commits {
log.Info("pwdlogcommitproc committed, key:%s partition:%d offset:%d", m.Key, m.Partition, m.Offset)
m.Commit()
delete(commits, k)
}
}
}
func (s *Service) pwdlogmergeproc(c chan *message) {
var (
max = s.c.Group.PwdLog.Size
merges = make([]*model.PwdLog, 0, max)
marked = make([]*message, 0, max)
ticker = time.NewTicker(time.Duration(s.c.Group.PwdLog.Ticker))
err error
)
for {
select {
case msg, ok := <-c:
if !ok {
log.Error("s.pwdlogmergeproc closed")
return
}
bmsg := &model.BMsg{}
if err = json.Unmarshal(msg.data.Value, bmsg); err != nil {
log.Error("json.Unmarshal(%s) error(%v)", string(msg.data.Value), err)
continue
}
if bmsg.Action == "insert" && strings.HasPrefix(bmsg.Table, "aso_pwd_log") {
p := &model.PwdLog{}
if err = json.Unmarshal(bmsg.New, p); err != nil {
log.Error("json.Unmarshal(%s) error(%v)", string(bmsg.New), err)
continue
}
merges = append(merges, p)
}
marked = append(marked, msg)
if len(marked) < max && len(merges) < max {
continue
}
case <-ticker.C:
}
if len(merges) > 0 {
s.pwdlogprocessMerges(merges)
merges = make([]*model.PwdLog, 0, max)
}
if len(marked) > 0 {
s.logDoneChan <- marked
marked = make([]*message, 0, max)
}
}
}
func (s *Service) pwdlogprocessMerges(merges []*model.PwdLog) {
for _, v := range merges {
for {
res, err := s.d.GetPwdLog(context.Background(), v.ID)
if err != nil {
log.Error("fail to get pwd log, id(%d) err(%v)", v.ID, err)
time.Sleep(_addHBaseRetryDuration)
continue
}
if err := s.addPwdLog(context.Background(), res); err != nil {
time.Sleep(_addHBaseRetryDuration)
continue
}
break
}
}
}
func (s *Service) addPwdLog(c context.Context, v *model.PwdLog) (err error) {
for i := 0; i < _addHBaseRetryCount; i++ {
if err = s.d.AddPwdLogHBase(c, v); err == nil {
return
}
log.Error("failed to add pwd log to hbase, service.dao.AddPwdLogHBase(%+v) error(%v)", v, err)
time.Sleep(_addHBaseRetryDuration)
}
return
}

View File

@@ -0,0 +1,213 @@
package service
import (
"context"
"crypto/aes"
"crypto/cipher"
"sync"
"go-common/app/job/main/passport/conf"
"go-common/app/job/main/passport/dao"
igrpc "go-common/app/service/main/identify-game/rpc/client"
"go-common/library/queue/databus"
)
const (
_gameAppID = int32(876)
_telBindTable = "aso_telephone_bind_log"
_emailBindTable = "aso_email_bind_log"
)
// Service struct of service.
type Service struct {
c *conf.Config
d *dao.Dao
// RPC
igRPC *igrpc.Client
// game app ids
gameAppIDs []int32
// token proc
dsToken *databus.Databus
tokenMergeChans []chan *message
tokenDoneChan chan []*message
head, last *message
mu sync.Mutex
// user proc
dsUser *databus.Databus
userMergeChans []chan *message
userDoneChan chan []*message
userHead, userLast *message
userMu sync.Mutex
// log proc
dsLog *databus.Databus
logMergeChans []chan *message
logDoneChan chan []*message
logHead, logLast *message
logMu sync.Mutex
// pwd log proc
dsPwdLog *databus.Databus
pwdLogMergeChans []chan *message
pwdLogDoneChan chan []*message
pwdLogHead, pwdLogLast *message
pwdLogMu sync.Mutex
// auth bin log proc
authBinLog *databus.Databus
authBinLogMergeChans []chan *message
authBinLogDoneChan chan []*message
authBinLogHead, authBinLogLast *message
authBinLogMu sync.Mutex
dsContactBindLog *databus.Databus
contactBindLogMergeChans []chan *message
contactBindLogDoneChan chan []*message
contactBindLogHead, contactBindLogLast *message
contactBindLogMu sync.Mutex
userLogPub *databus.Databus
AESBlock cipher.Block
hashSalt []byte
}
type message struct {
next *message
data *databus.Message
object interface{}
done bool
}
// New create service instance and return.
func New(c *conf.Config) (s *Service) {
gameAppIDs := make([]int32, 0)
gameAppIDs = append(gameAppIDs, _gameAppID)
for _, id := range c.Game.AppIDs {
if id == _gameAppID {
continue
}
gameAppIDs = append(gameAppIDs, id)
}
s = &Service{
c: c,
d: dao.New(c),
gameAppIDs: gameAppIDs,
// RPC
igRPC: igrpc.New(c.RPC.IdentifyGame),
// token
dsToken: databus.New(c.DataBus.AsoBinLog),
tokenMergeChans: make([]chan *message, c.Group.AsoBinLog.Num),
tokenDoneChan: make(chan []*message, c.Group.AsoBinLog.Chan),
// user
dsUser: databus.New(c.DataBus.User),
userMergeChans: make([]chan *message, c.Group.User.Num),
userDoneChan: make(chan []*message, c.Group.User.Chan),
// log
dsLog: databus.New(c.DataBus.Log),
logMergeChans: make([]chan *message, c.Group.Log.Num),
logDoneChan: make(chan []*message, c.Group.Log.Chan),
// log
dsPwdLog: databus.New(c.DataBus.PwdLog),
pwdLogMergeChans: make([]chan *message, c.Group.PwdLog.Num),
pwdLogDoneChan: make(chan []*message, c.Group.PwdLog.Chan),
// emial and tel log
dsContactBindLog: databus.New(c.DataBus.ContactBindLog),
contactBindLogMergeChans: make([]chan *message, c.Group.ContactBindLog.Num),
contactBindLogDoneChan: make(chan []*message, c.Group.ContactBindLog.Chan),
userLogPub: databus.New(c.DataBus.UserLog),
// auth bin log
authBinLog: databus.New(c.DataBus.AuthBinLog),
authBinLogMergeChans: make([]chan *message, c.Group.AuthBinLog.Num),
authBinLogDoneChan: make(chan []*message, c.Group.AuthBinLog.Chan),
hashSalt: []byte(c.Encode.Salt),
}
s.AESBlock, _ = aes.NewCipher([]byte(c.Encode.AesKey))
// start token proc
go s.tokencommitproc()
for i := 0; i < c.Group.AsoBinLog.Num; i++ {
ch := make(chan *message, c.Group.AsoBinLog.Chan)
s.tokenMergeChans[i] = ch
go s.tokenmergeproc(ch)
}
go s.tokenconsumeproc()
// start user proc
go s.usercommitproc()
for i := 0; i < c.Group.User.Num; i++ {
ch := make(chan *message, c.Group.User.Chan)
s.userMergeChans[i] = ch
go s.usermergeproc(ch)
}
go s.userconsumeproc()
// start log proc
go s.logcommitproc()
for i := 0; i < c.Group.Log.Num; i++ {
ch := make(chan *message, c.Group.Log.Chan)
s.logMergeChans[i] = ch
go s.logmergeproc(ch)
}
go s.logconsumeproc()
// start pwd log proc
go s.pwdlogcommitproc()
for i := 0; i < c.Group.PwdLog.Num; i++ {
ch := make(chan *message, c.Group.PwdLog.Chan)
s.pwdLogMergeChans[i] = ch
go s.pwdlogmergeproc(ch)
}
go s.pwdlogconsumeproc()
go s.contactBindLogcommitproc()
for i := 0; i < c.Group.ContactBindLog.Num; i++ {
ch := make(chan *message, c.Group.ContactBindLog.Chan)
s.contactBindLogMergeChans[i] = ch
go s.contactBindLogMergeproc(ch)
}
go s.contactBindLogconsumeproc()
// start auth bin log token proc
go s.authBinLogcommitproc()
for i := 0; i < c.Group.AuthBinLog.Num; i++ {
ch := make(chan *message, c.Group.AuthBinLog.Chan)
s.authBinLogMergeChans[i] = ch
go s.authBinLogmergeproc(ch)
}
go s.authBinLogconsumeproc()
// end auth bin log token proc
go s.syncPwdLog()
return
}
// Ping ping check service health.
func (s *Service) Ping(c context.Context) (err error) {
err = s.d.Ping(c)
return
}
// Close close service.
func (s *Service) Close() (err error) {
if s.dsToken != nil {
s.dsToken.Close()
}
if s.dsUser != nil {
s.dsUser.Close()
}
if s.dsLog != nil {
s.dsLog.Close()
}
if s.d != nil {
s.d.Close()
}
if s.dsContactBindLog != nil {
s.dsContactBindLog.Close()
}
if s.userLogPub != nil {
s.userLogPub.Close()
}
if s.dsPwdLog != nil {
s.dsPwdLog.Close()
}
return
}

View File

@@ -0,0 +1,54 @@
package service
import (
"context"
"fmt"
"sync"
"testing"
"time"
"go-common/app/job/main/passport/conf"
idfgmdl "go-common/app/service/main/identify-game/model"
"go-common/library/log"
. "github.com/smartystreets/goconvey/convey"
)
var (
once sync.Once
s *Service
)
func startService() {
if err := conf.Init(); err != nil {
panic(fmt.Sprintf("conf.Init() error(%v)", err))
}
// init log
log.Init(conf.Conf.Xlog)
s = New(conf.Conf)
}
func TestNew(t *testing.T) {
once.Do(startService)
Convey("new", t, func() {
So(s.gameAppIDs[0], ShouldEqual, _gameAppID)
t.Logf("s.gameAppIDs: %v", s.gameAppIDs)
So(s.c.URI, ShouldNotBeNil)
So(s.c.URI.DelCache, ShouldNotBeEmpty)
So(s.c.URI.SetToken, ShouldNotBeEmpty)
t.Logf("s.c.URI: %+v", s.c.URI)
})
}
func TestDelCache(t *testing.T) {
once.Do(startService)
time.Sleep(time.Second * 1)
Convey("del cache", t, func() {
arg := &idfgmdl.CleanCacheArgs{
Token: "foo",
}
err := s.igRPC.DelCache(context.Background(), arg)
So(err, ShouldBeNil)
})
}

View File

@@ -0,0 +1,123 @@
package service
import (
"context"
"encoding/json"
"strings"
"time"
"go-common/app/job/main/passport/model"
"go-common/library/log"
"go-common/library/queue/databus"
)
func (s *Service) userconsumeproc() {
mergeRoutineNum := int64(s.c.Group.User.Num)
msgs := s.dsUser.Messages()
for {
msg, ok := <-msgs
if !ok {
log.Error("s.userconsumeproc closed")
return
}
// marked head to first commit
m := &message{data: msg}
p := new(model.PMsg)
if err := json.Unmarshal(msg.Value, p); err != nil {
log.Error("json.Unmarshal(%s) error(%v)", string(msg.Value), err)
continue
}
s.userMu.Lock()
if s.userHead == nil {
s.userHead = m
s.userLast = m
} else {
s.userLast.next = m
s.userLast = m
}
s.userMu.Unlock()
m.object = p
// use specify goroutine to merge messages
s.userMergeChans[p.Data.Mid%mergeRoutineNum] <- m
log.Info("userconsumeproc key:%s partition:%d offset:%d", msg.Key, msg.Partition, msg.Offset)
}
}
func (s *Service) usercommitproc() {
commits := make(map[int32]*databus.Message, s.c.Group.User.Size)
for {
done := <-s.userDoneChan
// merge partitions to commit offset
for _, d := range done {
d.done = true
}
s.userMu.Lock()
for ; s.userHead != nil && s.userHead.done; s.userHead = s.userHead.next {
commits[s.userHead.data.Partition] = s.userHead.data
}
s.userMu.Unlock()
for k, m := range commits {
log.Info("usercommitproc committed, key:%s partition:%d offset:%d", m.Key, m.Partition, m.Offset)
m.Commit()
delete(commits, k)
}
}
}
func (s *Service) usermergeproc(c chan *message) {
var (
max = s.c.Group.User.Size
merges = make([]*model.PMsg, 0, max)
marked = make([]*message, 0, max)
ticker = time.NewTicker(time.Duration(s.c.Group.User.Ticker))
)
for {
select {
case msg, ok := <-c:
if !ok {
log.Error("s.usermergeproc closed")
return
}
p, assertOk := msg.object.(*model.PMsg)
if assertOk && strings.HasPrefix(p.Table, "aso_app_perm") && p.Action != "" {
merges = append(merges, p)
}
marked = append(marked, msg)
if len(marked) < max && len(merges) < max {
continue
}
case <-ticker.C:
}
if len(merges) > 0 {
s.setTokens(merges)
merges = make([]*model.PMsg, 0, max)
}
if len(marked) > 0 {
s.userDoneChan <- marked
marked = make([]*message, 0, max)
}
}
}
// setTokens for set tokens.
func (s *Service) setTokens(msgs []*model.PMsg) {
for _, msg := range msgs {
s.setToken(msg.Action, msg.Data)
}
}
// setToken set single token.
func (s *Service) setToken(action string, t *model.Token) {
if action == "" || t == nil || t.Token == "" {
return
}
switch action {
case "insert":
for {
if err := s.d.SetToken(context.TODO(), t); err == nil {
return
}
time.Sleep(time.Second)
}
}
}

View File

@@ -0,0 +1,32 @@
package service
import (
"context"
"time"
"go-common/library/log"
)
func (s *Service) syncPwdLog() {
id := s.c.Sync.SyncPwdID
for {
pwds, err := s.d.BatchGetPwdLog(context.Background(), id)
if err != nil {
log.Error("failed to batch get pwd log, s.d.BatchGetPwdLog(%d), error(%v)", id, err)
time.Sleep(1 * time.Second)
continue
}
log.Info("SyncPwdID (%d), len(pwds) (%d)", id, len(pwds))
if len(pwds) == 0 {
break
}
for _, pwd := range pwds {
if err := s.d.AddPwdLogHBase(context.Background(), pwd); err != nil {
log.Error("failed to add pwd log to hbase, service.dao.AddLoginLogHBase(%+v) error(%v)", pwd, err)
time.Sleep(1 * time.Second)
continue
}
id = pwd.ID
}
}
}

View File

@@ -0,0 +1,16 @@
{
"swagger": "2.0",
"info": {
"title": "go-common api",
"description": "api",
"version": "1.0",
"contact": {
"email": "lintanghui@bilibili.com"
},
"license": {
"name": "Apache 2.0",
"url": "http://www.apache.org/licenses/LICENSE-2.0.html"
}
},
"paths": {}
}