Create & Init Project...

This commit is contained in:
2019-04-22 18:49:16 +08:00
commit fc4fa37393
25440 changed files with 4054998 additions and 0 deletions

View File

@@ -0,0 +1,22 @@
package(default_visibility = ["//visibility:public"])
filegroup(
name = "package-srcs",
srcs = glob(["**"]),
tags = ["automanaged"],
visibility = ["//visibility:private"],
)
filegroup(
name = "all-srcs",
srcs = [
":package-srcs",
"//app/interface/live/push-live/cmd:all-srcs",
"//app/interface/live/push-live/conf:all-srcs",
"//app/interface/live/push-live/dao:all-srcs",
"//app/interface/live/push-live/http:all-srcs",
"//app/interface/live/push-live/model:all-srcs",
"//app/interface/live/push-live/service:all-srcs",
],
tags = ["automanaged"],
)

View File

@@ -0,0 +1,60 @@
# v1.0.0
1.新增直播推送服务
# v1.0.1
1.优化推送逻辑
#v1.0.2
1.修改代码符合代码规范
2.修改单元自测
#v1.0.3
1.调用推送平台增加Authorization header防止敏感信息写入日志
2.优化代码规范
#v1.0.4
1.优化推送uuid逻辑
2.修改推送文案
#v1.0.5
1.推送文案模版化,随便产品怎么改
#v1.0.6
1.优化推送逻辑去除资源ping
2.修复mutex锁逻辑
#v1.0.7
1.推送添加group参数用于区分普通关注与特别关注
#v1.0.11
1.推送支持平滑间隔时间,时间后台动态可配
间隔时间内用户不会重复收到推送
#v1.0.12
1.修正uuid问题
#v1.0.13
1.修复因为redis pool active连接数不够导致的redis操作失败mid数
#v1.0.14
1.升级hbase sdk至v2版本
#v1.1.1
1.支持直播通用类型消息推送
2.拆分逻辑,优化代码结构,完善自测
#v1.1.2
1.更新filter中redis为Dial方式
#v1.1.3
1.common message新增link_type字段
#v1.1.4
1.增加推送间隔为0的逻辑
#v1.2
1.新增每日推送配置逻辑
2.增加支持预约逻辑
#v1.2.1
1.增加开播提醒黑名单过滤逻辑

View File

@@ -0,0 +1,7 @@
# Owner
kuangxibin
# Author
kuangxibin
# Reviewer

View File

@@ -0,0 +1,12 @@
# See the OWNERS docs at https://go.k8s.io/owners
approvers:
- kuangxibin
labels:
- interface
- interface/live/push-live
- live
options:
no_parent_owners: true
reviewers:
- kuangxibin

View File

@@ -0,0 +1,14 @@
# push-live
# 项目简介
1.
# 编译环境
# 依赖包
# 编译执行

View File

@@ -0,0 +1,43 @@
package(default_visibility = ["//visibility:public"])
load(
"@io_bazel_rules_go//go:def.bzl",
"go_binary",
"go_library",
)
go_binary(
name = "cmd",
embed = [":go_default_library"],
tags = ["automanaged"],
)
go_library(
name = "go_default_library",
srcs = ["main.go"],
data = ["push-live-test.toml"],
importpath = "go-common/app/interface/live/push-live/cmd",
tags = ["automanaged"],
visibility = ["//visibility:public"],
deps = [
"//app/interface/live/push-live/conf:go_default_library",
"//app/interface/live/push-live/http:go_default_library",
"//app/interface/live/push-live/service:go_default_library",
"//library/log:go_default_library",
"//library/net/trace:go_default_library",
],
)
filegroup(
name = "package-srcs",
srcs = glob(["**"]),
tags = ["automanaged"],
visibility = ["//visibility:private"],
)
filegroup(
name = "all-srcs",
srcs = [":package-srcs"],
tags = ["automanaged"],
visibility = ["//visibility:public"],
)

View File

@@ -0,0 +1,51 @@
package main
import (
"flag"
"os"
"os/signal"
"syscall"
"go-common/app/interface/live/push-live/conf"
"go-common/app/interface/live/push-live/http"
"go-common/app/interface/live/push-live/service"
"go-common/library/log"
"go-common/library/net/trace"
"time"
)
func main() {
flag.Parse()
if err := conf.Init(); err != nil {
log.Error("conf.Init() error(%v)", err)
panic(err)
}
// init log
log.Init(conf.Conf.Log)
// init trace
trace.Init(conf.Conf.Tracer)
defer trace.Close()
defer log.Close()
log.Info("push-live start")
// service init
srv := service.New(conf.Conf)
http.Init(conf.Conf, srv)
// init pprof conf.Conf.Perf
// init signal
c := make(chan os.Signal, 1)
signal.Notify(c, syscall.SIGHUP, syscall.SIGQUIT, syscall.SIGTERM, syscall.SIGINT)
for {
s := <-c
log.Info("push-live get a signal %s", s.String())
switch s {
case syscall.SIGQUIT, syscall.SIGTERM, syscall.SIGINT:
srv.Close()
log.Info("push-live exit")
time.Sleep(time.Second)
return
case syscall.SIGHUP:
default:
return
}
}
}

View File

@@ -0,0 +1,143 @@
# This is a TOML document. Boom
version = "1.0.0"
user = "root"
pid = "/tmp/push-live.pid"
dir = "/data/dev/go/src/go-common/app/interface/live/push-live"
perf = "0.0.0.0:6420"
env = "dev"
[log]
dir = "/data/log/push-live"
family = "push-live"
[tracer]
family = "push-live"
proto = "udp"
addr = "172.16.33.46:5140"
[bm]
[bm.inner]
addr = "0.0.0.0:9901"
maxListen = 10
timeout = "1s"
[bm.local]
addr = "0.0.0.0:9902"
maxListen = 10
timeout = "1s"
[mysql]
addr = "172.22.34.101:3312"
dsn = "livetestuat:livetestuat20180711@tcp(172.22.34.101:3312)/app_switch?timeout=5s&readTimeout=30s&writeTimeout=5s&parseTime=true&loc=Local&charset=utf8,utf8mb4"
active = 5
idle = 10
idleTimeout ="4h"
queryTimeout = "2s"
execTimeout = "2s"
tranTimeout = "2s"
[mysql.breaker]
window = "3s"
sleep = "100ms"
bucket = 10
ratio = 0.5
request = 100
[hbase]
master = ""
meta = ""
dialTimeout = "1s"
readTimeout = "2s"
readsTimeout = "5s"
writeTimeout = "2s"
writesTimeout = "5s"
[hbase.zookeeper]
root = ""
addrs = ["172.18.33.131:2181","172.18.33.168:2181","172.18.33.169:2181"]
timeout = "30s"
[blackListHBase]
master = ""
meta = ""
dialTimeout = "1s"
readTimeout = "2s"
readsTimeout = "5s"
writeTimeout = "2s"
writesTimeout = "5s"
[blackListHBase.zookeeper]
root = ""
addrs = ["172.19.40.5:2181","172.19.40.6:2181","172.19.40.7:2181"]
timeout = "30s"
[liveRoomSub]
key = "ec4c0820d525d67b"
secret = "e20f8f664bf10722efeb6aac0cc16011"
group = "StartLiveNotify-LiveLive-S"
topic = "StartLiveNotify-T"
action ="sub"
name = "interface/push-live"
proto = "tcp"
addr = "172.18.33.50:6205"
idle = 10
active = 100
dialTimeout = "10s"
readTimeout = "40s"
writeTimeout = "10s"
idleTimeout = "60s"
[liveCommonSub]
key = "ec4c0820d525d67b"
secret = "e20f8f664bf10722efeb6aac0cc16011"
group = "LivePushCommon-LiveLive-S"
topic = "LivePushCommon-T"
action ="sub"
name = "interface/push-live"
proto = "tcp"
addr = "172.18.33.50:6205"
idle = 10
active = 100
dialTimeout = "10s"
readTimeout = "40s"
writeTimeout = "10s"
idleTimeout = "60s"
[push]
multiAPI = "http://uat-api.bilibili.co/x/internal/push-strategy/task/add"
appID = 1
businessID = 4
businessToken = "n00nso54h75t5irj8phmunvymh4cl3yt"
linkType = 3
pushRetryTimes = 3
pushOnceLimit = 500000
defaultCopyWriting = "你关注的【%s】正在直播~"
specialCopyWriting = "你特别关注的【%s】正在直播~"
consumerProcNum = 10
intervalLimit = 5000
[push.PushFilterIgnores]
smooth = [101]
limit = [111]
[HTTPClient]
dial = "1s"
timeout = "5s"
keepAlive = "60s"
key = "fb06a25c6338edbc"
secret = "fd10bd177559780c2e4a44f1fa47fa83"
[HTTPClient.breaker]
window ="1s"
sleep ="10ms"
bucket = 10
ratio = 0.5
request = 100
[redis]
[redis.pushInterval]
name = "push-live/pushInterval"
proto = "tcp"
addr = "172.18.33.82:6379"
active = 20
idle = 10
dialTimeout = "50ms"
readTimeout = "100ms"
writeTimeout = "100ms"
idleTimeout = "80s"
expire = "2h"

View File

@@ -0,0 +1,40 @@
package(default_visibility = ["//visibility:public"])
load(
"@io_bazel_rules_go//go:def.bzl",
"go_library",
)
go_library(
name = "go_default_library",
srcs = ["conf.go"],
importpath = "go-common/app/interface/live/push-live/conf",
tags = ["automanaged"],
visibility = ["//visibility:public"],
deps = [
"//library/cache/redis:go_default_library",
"//library/conf:go_default_library",
"//library/database/hbase.v2:go_default_library",
"//library/database/sql:go_default_library",
"//library/log:go_default_library",
"//library/net/http/blademaster:go_default_library",
"//library/net/trace:go_default_library",
"//library/queue/databus:go_default_library",
"//library/time:go_default_library",
"//vendor/github.com/BurntSushi/toml:go_default_library",
],
)
filegroup(
name = "package-srcs",
srcs = glob(["**"]),
tags = ["automanaged"],
visibility = ["//visibility:private"],
)
filegroup(
name = "all-srcs",
srcs = [":package-srcs"],
tags = ["automanaged"],
visibility = ["//visibility:public"],
)

View File

@@ -0,0 +1,142 @@
package conf
import (
"errors"
"flag"
"go-common/library/cache/redis"
"go-common/library/conf"
"go-common/library/database/hbase.v2"
"go-common/library/database/sql"
"go-common/library/log"
bm "go-common/library/net/http/blademaster"
"go-common/library/net/trace"
"go-common/library/queue/databus"
xtime "go-common/library/time"
"github.com/BurntSushi/toml"
)
// global var
var (
confPath string
client *conf.Client
// Conf config
Conf = &Config{}
)
// Config config set
type Config struct {
// base
// env
Env string
// elk
Log *log.Config
// HTTPClient .
HTTPClient *bm.ClientConfig
// http
BM *HTTPServers
// tracer
Tracer *trace.Config
// MySQL
MySQL *sql.Config
// hbase
HBase *HBaseConfig
BlackListHBase *HBaseConfig
// databuse
LiveRoomSub *databus.Config // 开播提醒
LiveCommonSub *databus.Config // 直播通用
// push
Push *push
// redis
Redis *Redis
}
// HBaseConfig extra hbase config for compatible
type HBaseConfig struct {
*hbase.Config
WriteTimeout xtime.Duration
ReadTimeout xtime.Duration
}
// HTTPServers Http Servers
type HTTPServers struct {
Inner *bm.ServerConfig
Local *bm.ServerConfig
}
type push struct {
MultiAPI string
AppID int
BusinessID int
BusinessToken string
LinkType int
PushRetryTimes int
PushOnceLimit int
DefaultCopyWriting string
SpecialCopyWriting string
ConsumerProcNum int
IntervalLimit int
PushFilterIgnores struct {
Smooth, Limit []int
}
}
// Redis Redis.PushInterval config
type Redis struct {
PushInterval *struct {
*redis.Config
Expire xtime.Duration
}
}
func init() {
flag.StringVar(&confPath, "conf", "", "default config path")
}
// Init init conf
func Init() error {
if confPath != "" {
return local()
}
return remote()
}
func local() (err error) {
_, err = toml.DecodeFile(confPath, &Conf)
return
}
func remote() (err error) {
if client, err = conf.New(); err != nil {
return
}
if err = load(); err != nil {
return
}
go func() {
for range client.Event() {
log.Info("config reload")
if load() != nil {
log.Error("config reload error (%v)", err)
}
}
}()
return
}
func load() (err error) {
var (
s string
ok bool
tmpConf *Config
)
if s, ok = client.Toml2(); !ok {
return errors.New("load config center error")
}
if _, err = toml.Decode(s, &tmpConf); err != nil {
return errors.New("could not decode config")
}
*Conf = *tmpConf
return
}

View File

@@ -0,0 +1,75 @@
package(default_visibility = ["//visibility:public"])
load(
"@io_bazel_rules_go//go:def.bzl",
"go_test",
"go_library",
)
go_test(
name = "go_default_test",
srcs = [
"config_test.go",
"d_test.go",
"filter_test.go",
"hbase_test.go",
"push_test.go",
"switch_test.go",
"task_test.go",
],
embed = [":go_default_library"],
rundir = ".",
tags = ["automanaged"],
deps = [
"//app/interface/live/push-live/conf:go_default_library",
"//app/interface/live/push-live/model:go_default_library",
"//library/cache/redis:go_default_library",
"//library/log:go_default_library",
"//vendor/github.com/smartystreets/goconvey/convey:go_default_library",
],
)
go_library(
name = "go_default_library",
srcs = [
"blacklist.go",
"config.go",
"dao.go",
"filter.go",
"hbase.go",
"push.go",
"switch.go",
"task.go",
],
importpath = "go-common/app/interface/live/push-live/dao",
tags = ["automanaged"],
visibility = ["//visibility:public"],
deps = [
"//app/interface/live/push-live/conf:go_default_library",
"//app/interface/live/push-live/model:go_default_library",
"//library/cache/redis:go_default_library",
"//library/database/hbase.v2:go_default_library",
"//library/database/sql:go_default_library",
"//library/log:go_default_library",
"//library/net/http/blademaster:go_default_library",
"//library/stat/prom:go_default_library",
"//library/sync/errgroup:go_default_library",
"//library/xstr:go_default_library",
"//vendor/github.com/pkg/errors:go_default_library",
"//vendor/github.com/tsuna/gohbase/hrpc:go_default_library",
],
)
filegroup(
name = "package-srcs",
srcs = glob(["**"]),
tags = ["automanaged"],
visibility = ["//visibility:private"],
)
filegroup(
name = "all-srcs",
srcs = [":package-srcs"],
tags = ["automanaged"],
visibility = ["//visibility:public"],
)

View File

@@ -0,0 +1,58 @@
package dao
import (
"bytes"
"context"
"github.com/pkg/errors"
"github.com/tsuna/gohbase/hrpc"
"go-common/app/interface/live/push-live/model"
"go-common/library/log"
"strconv"
"strings"
)
var (
_hbaseTable = "live:push_blacklist"
_hbaseFamily = "blacklist"
errLinkValueSplit = errors.New("link_value split result nil.")
)
// GetBlackList get blacklist from hbase by target id
func (d *Dao) GetBlackList(c context.Context, task *model.ApPushTask) (mids map[int64]bool, err error) {
var (
key string
result *hrpc.Result
ctx, cancel = context.WithTimeout(c, d.blackListHBaseReadTimeout)
emptyByte = []byte("")
fbytes = []byte(_hbaseFamily)
)
defer cancel()
split := strings.Split(task.LinkValue, ",")
if split == nil {
err = errLinkValueSplit
return
}
key = split[0]
mids = make(map[int64]bool)
if result, err = d.blackListHBase.GetStr(ctx, _hbaseTable, key); err != nil {
log.Error("[dao.blacklist|GetBlackList] d.blackListHBase.Get error(%v) querytable(%v), roomid(%s), task(%v)",
err, _hbaseTable, key, task)
return
}
if result == nil {
return
}
for _, c := range result.Cells {
if c != nil && bytes.Equal(c.Family, fbytes) && !bytes.Equal(c.Qualifier, emptyByte) {
uid, e := strconv.ParseInt(string(c.Qualifier), 10, 64)
if e != nil {
continue
}
mids[uid] = true
}
}
log.Info("[dao.blacklist|GetBlackList] get blacklist(%v), roomid(%s), task(%v)", mids, key, task)
return
}

View File

@@ -0,0 +1,43 @@
package dao
import (
"context"
"go-common/app/interface/live/push-live/model"
"go-common/library/log"
)
const (
_getPushConfig = "SELECT `type` FROM ap_push_config WHERE value=? ORDER BY `order` ASC" // 获取推送选项配置
_getPushInterval = "SELECT `order` FROM ap_push_config WHERE type=?" // 获取推送间隔时间
)
// GetPushConfig 从DB中获取推送配置
func (d *Dao) GetPushConfig(c context.Context) (types []string, err error) {
var t string
types = make([]string, 0)
rows, err := d.db.Query(c, _getPushConfig, model.LivePushConfigOn)
if err != nil {
log.Error("[dao.config|GetPushConfig] db.Query() error(%v)", err)
return
}
for rows.Next() {
if err = rows.Scan(&t); err != nil {
log.Error("[dao.config|GetPushConfig] rows.Scan() error(%v)", err)
return
}
types = append(types, t)
}
return
}
// GetPushInterval 获取推送时间间隔
func (d *Dao) GetPushInterval(c context.Context) (interval int32, err error) {
var i int32
row := d.db.QueryRow(c, _getPushInterval, model.PushIntervalKey)
if err = row.Scan(&i); err != nil {
log.Error("[dao.config|GetPushInterval] row.Scan() error(%v)", err)
return
}
interval = i * 60 // min to sec
return
}

View File

@@ -0,0 +1,26 @@
package dao
import (
"context"
"testing"
. "github.com/smartystreets/goconvey/convey"
)
func Test_config(t *testing.T) {
initd()
Convey("Test config get", t, func() {
cf, err := d.GetPushConfig(context.TODO())
t.Logf("the result included(%v) err(%v)", cf, err)
So(err, ShouldEqual, nil)
})
}
func Test_GetPushInterval(t *testing.T) {
initd()
Convey("Test GetPushInterval", t, func() {
cf, err := d.GetPushInterval(context.TODO())
t.Logf("the result included(%v) err(%v)", cf, err)
So(err, ShouldEqual, nil)
})
}

View File

@@ -0,0 +1,17 @@
package dao
import (
"flag"
"go-common/app/interface/live/push-live/conf"
"path/filepath"
)
var d *Dao
func initd() {
dir, _ := filepath.Abs("../cmd/push-live-test.toml")
flag.Set("conf", dir)
flag.Set("conf_env", "uat")
conf.Init()
d = New(conf.Conf)
}

View File

@@ -0,0 +1,96 @@
package dao
import (
"context"
"time"
"go-common/app/interface/live/push-live/conf"
"go-common/library/cache/redis"
"go-common/library/database/hbase.v2"
xsql "go-common/library/database/sql"
"go-common/library/log"
xhttp "go-common/library/net/http/blademaster"
"go-common/library/stat/prom"
)
// Dao dao
type Dao struct {
c *conf.Config
db *xsql.DB
httpClient *xhttp.Client
relationHBase *hbase.Client
relationHBaseReadTimeout time.Duration
blackListHBase *hbase.Client
blackListHBaseReadTimeout time.Duration
}
// Prometheus
var (
errorsCount = prom.BusinessErrCount
infosCount = prom.BusinessInfoCount
)
// New init mysql db
func New(c *conf.Config) (dao *Dao) {
dao = &Dao{
c: c,
db: xsql.NewMySQL(c.MySQL),
relationHBase: hbase.NewClient(c.HBase.Config),
relationHBaseReadTimeout: time.Duration(c.HBase.ReadTimeout),
httpClient: xhttp.NewClient(c.HTTPClient),
blackListHBase: hbase.NewClient(c.BlackListHBase.Config),
blackListHBaseReadTimeout: time.Duration(c.BlackListHBase.ReadTimeout),
}
return
}
// RedisOption return redis options
func (d *Dao) RedisOption() []redis.DialOption {
cnop := redis.DialConnectTimeout(time.Duration(d.c.Redis.PushInterval.DialTimeout))
rdop := redis.DialReadTimeout(time.Duration(d.c.Redis.PushInterval.ReadTimeout))
wrop := redis.DialWriteTimeout(time.Duration(d.c.Redis.PushInterval.WriteTimeout))
return []redis.DialOption{cnop, rdop, wrop}
}
// Close close the resource.
func (d *Dao) Close() (err error) {
if err = d.relationHBase.Close(); err != nil {
log.Error("[dao.dao|Close] d.relationHBase.Close() error(%v)", err)
PromError("hbase:close")
}
if err = d.db.Close(); err != nil {
log.Error("[dao.dao|Close] d.db.Close() error(%v)", err)
PromError("db:close")
}
return
}
// PromError prom error
func PromError(name string) {
errorsCount.Incr(name)
}
// PromInfo add prom info
func PromInfo(name string) {
infosCount.Incr(name)
}
//PromInfoAdd add prom info by value
func PromInfoAdd(name string, value int64) {
infosCount.Add(name, value)
}
// Ping dao ping
func (d *Dao) Ping(c context.Context) (err error) {
if err = d.db.Ping(c); err != nil {
PromError("mysql:Ping")
log.Error("[dao.dao|Ping] d.db.Ping error(%v)", err)
return
}
if err = d.relationHBase.Ping(c); err != nil {
PromError("hbase:Ping")
log.Error("[dao.dao|Ping] d.relationHBase.Ping error(%v)", err)
return
}
return
}

View File

@@ -0,0 +1,263 @@
package dao
import (
"context"
"fmt"
"go-common/app/interface/live/push-live/model"
"go-common/library/cache/redis"
"go-common/library/log"
)
const (
_intervalUserkey = "i:%d" // 用户推送间隔缓存
_limitUserDailyKey = "daily:%d" //用户每日推送额度缓存
_defaultPushLimit = 4 // 用户每日默认最大推送额度
)
// FilterConfig FilterConfig
type FilterConfig struct {
Business int
IntervalExpired int32
IntervalValue string
DailyExpired float64
Task *model.ApPushTask
}
// Filter Filter
type Filter struct {
conf *FilterConfig
conn redis.Conn
}
// FilterChain FilterChain
type FilterChain map[string]func(ctx context.Context, mid int64) (bool, error)
// NewFilter NewFilter
func (d *Dao) NewFilter(conf *FilterConfig) (f *Filter, err error) {
var conn redis.Conn
// redis conn
conn, err = redis.Dial(d.c.Redis.PushInterval.Proto, d.c.Redis.PushInterval.Addr, d.RedisOption()...)
if err != nil {
log.Error("[dao.filter|NewFilter] redis.Dial error(%v), conf(%v)", err, conf)
return
}
f = &Filter{
conf: conf,
conn: conn,
}
return
}
// NewFilterChain NewFilterChain
func (d *Dao) NewFilterChain(f *Filter) FilterChain {
funcs := make(FilterChain)
if d.needLimit(f.conf.Business) {
funcs["limit"] = f.dailyLimitFilter
}
if d.needSmooth(f.conf.Business) && f.conf.IntervalExpired > 0 {
if f.conf.Business == model.ActivityBusiness {
funcs["smooth"] = f.appointSmoothFilter
} else {
funcs["smooth"] = f.intervalSmoothFilter
}
}
return funcs
}
// needSmooth
func (d *Dao) needSmooth(business int) bool {
return !ignoreFilter(business, d.c.Push.PushFilterIgnores.Smooth)
}
// needLimit
func (d *Dao) needLimit(business int) bool {
return !ignoreFilter(business, d.c.Push.PushFilterIgnores.Limit)
}
// Done do some close work
func (f *Filter) Done() {
if f.conn != nil {
f.conn.Close()
}
}
// dailyLimitFilter 判断是否到达每日推送上限
func (f *Filter) dailyLimitFilter(ctx context.Context, mid int64) (b bool, err error) {
var (
left int
key = fmt.Sprintf(_limitUserDailyKey, mid)
)
// fetch daily push count
left, err = redis.Int(f.conn.Do("GET", key))
if err != nil {
if err == redis.ErrNil {
// key not exists, should return false & nil, first push today
err = nil
return
}
// actually error occurs
return
}
// daily push limit
if left <= 0 {
b = true
return
}
return
}
// intervalSmoothFilter 判断是否被平滑推送逻辑过滤
func (f *Filter) intervalSmoothFilter(ctx context.Context, mid int64) (b bool, err error) {
var reply interface{}
key := fmt.Sprintf(_intervalUserkey, mid)
reply, err = f.conn.Do("SET", key, f.conf.IntervalValue, "EX", f.conf.IntervalExpired, "NX")
if err != nil {
return
}
// key exists, nil returned
// key not exists, will return OK
if reply == nil {
b = true
return
}
return
}
// appointSmoothFilter 预约逻辑的平滑
func (f *Filter) appointSmoothFilter(ctx context.Context, mid int64) (b bool, err error) {
var reply interface{}
key := fmt.Sprintf(_intervalUserkey, mid)
reply, err = f.conn.Do("SET", key, f.conf.IntervalValue, "EX", f.conf.IntervalExpired, "NX")
if err != nil {
return
}
// key exists, nil returned
// key not exists, will return OK
if reply == nil {
// 活动预约有特殊判断逻辑
reply, err = redis.String(f.conn.Do("GET", key))
if err != nil {
return
}
// 相同房间会被过滤
if reply == f.conf.IntervalValue {
b = true
}
return
}
return
}
// BatchFilter 对输入mid序列执行所有过滤方法返回过滤结果
func (f *Filter) BatchFilter(ctx context.Context, filterChain FilterChain, mids []int64) (resMids []int64) {
if len(mids) == 0 || len(filterChain) == 0 {
return
}
var (
isFiltered bool
err error
filterMids = make(map[string][]int64)
errMids = make(map[string][]int64)
)
defer func() {
filterMids = nil
errMids = nil
}()
resMids = make([]int64, 0, len(mids))
// 记录被过滤掉的mid过滤发生错误的mid
for name := range filterChain {
filterMids[name] = make([]int64, 0, len(mids))
errMids[name] = make([]int64, 0, len(mids))
}
MidLoop:
for _, mid := range mids {
for name, fc := range filterChain {
isFiltered, err = fc(ctx, mid)
// error occurs, next mid
if err != nil {
errMids[name] = append(errMids[name], mid)
continue MidLoop
}
// filtered by any filterChain func, next mid
if isFiltered {
filterMids[name] = append(filterMids[name], mid)
continue MidLoop
}
}
// mid here is filter result, should push
resMids = append(resMids, mid)
}
// log
for name, ids := range filterMids {
if len(ids) == 0 {
continue
}
log.Info("[dao.filter|BatchFilter] BatchFilter filterMids, task(%v), len(%d), name(%s), mids(%d)",
f.conf.Task, len(ids), name, len(mids))
}
for name, ids := range errMids {
if len(ids) == 0 {
continue
}
log.Error("[dao.filter|BatchFilter] BatchFilter errMids, task(%v), len(%d), name(%s), err(%v)",
f.conf.Task, len(ids), name, err)
}
return
}
// BatchDecreaseLimit 批量减少配额
func (f *Filter) BatchDecreaseLimit(ctx context.Context, mids []int64) (total int, err error) {
defer func() {
if f != nil {
f.Done()
}
log.Info("[dao.filter|BatchDecreaseLimit] business(%d), input(%d), exec(%d), err(%v)",
f.conf.Business, len(mids), total, err)
}()
if len(mids) == 0 {
return
}
initLeft := _defaultPushLimit - 1
for _, mid := range mids {
key := fmt.Sprintf(_limitUserDailyKey, mid)
left, err := redis.Int(f.conn.Do("GET", key))
if err != nil {
if err == redis.ErrNil {
// key not exists
f.conn.Do("SET", key, initLeft, "EX", f.conf.DailyExpired)
total++
}
continue
}
f.conn.Do("SET", key, left-1, "EX", f.conf.DailyExpired)
total++
}
return
}
// ignoreFilter 判断business是否能够不需要过滤
func ignoreFilter(business int, ignores []int) bool {
var f = false
for _, ignore := range ignores {
if business == ignore {
f = true
}
}
return f
}
// GetIntervalKey return interval smooth redis key
func GetIntervalKey(mid int64) string {
return fmt.Sprintf(_intervalUserkey, mid)
}
// GetDailyLimitKey return daily limit redis key
func GetDailyLimitKey(mid int64) string {
return fmt.Sprintf(_limitUserDailyKey, mid)
}

View File

@@ -0,0 +1,335 @@
package dao
import (
"context"
. "github.com/smartystreets/goconvey/convey"
"go-common/app/interface/live/push-live/model"
"go-common/library/cache/redis"
"go-common/library/log"
"math/rand"
"testing"
)
func setRedis(conn redis.Conn, key string, value interface{}, ttl int32) error {
_, err := conn.Do("SET", key, value, "EX", ttl)
return err
}
// delRedis
func delRedis(conn redis.Conn, key string) error {
_, err := conn.Do("DEL", key)
return err
}
func filterClean(f *Filter, mids []int64) {
for _, mid := range mids {
keys := []string{
GetDailyLimitKey(mid),
GetIntervalKey(mid),
}
for _, key := range keys {
delRedis(f.conn, key)
}
}
f.Done()
f = nil
}
func TestDao_needSmooth(t *testing.T) {
initd()
Convey("test business need smooth", t, func() {
var (
business int
b bool
)
Convey("test need smooth", func() {
business = rand.Intn(100)
b = d.needSmooth(business)
So(b, ShouldEqual, true)
business = 111
b = d.needSmooth(business)
So(b, ShouldEqual, true)
})
Convey("test no need smooth", func() {
business = 101
b = d.needSmooth(business)
So(b, ShouldEqual, false)
})
})
}
func TestDao_needLimit(t *testing.T) {
initd()
Convey("test business need limit", t, func() {
var (
business int
b bool
)
Convey("test need limit", func() {
business = rand.Intn(110)
b = d.needSmooth(business)
So(b, ShouldEqual, true)
})
Convey("test no need smooth", func() {
business = 111
b = d.needSmooth(business)
So(b, ShouldEqual, true)
})
})
}
func TestDao_NewFilterChain(t *testing.T) {
initd()
Convey("test new filter chain", t, func() {
var (
f *Filter
conf *FilterConfig
fc FilterChain
err error
)
Convey("test business no need to filter", func() {
conf = &FilterConfig{
Business: 111,
}
f, err = d.NewFilter(conf)
So(err, ShouldBeNil)
fc = d.NewFilterChain(f)
So(len(fc), ShouldEqual, 0)
})
Convey("test business with filter", func() {
conf = &FilterConfig{
Business: 101,
}
f, err = d.NewFilter(conf)
So(err, ShouldBeNil)
fc = d.NewFilterChain(f)
So(len(fc), ShouldEqual, 1)
// both business and IntervalExpired is necessary
conf = &FilterConfig{
Business: rand.Intn(100),
IntervalExpired: rand.Int31(),
}
f, err = d.NewFilter(conf)
So(err, ShouldBeNil)
fc = d.NewFilterChain(f)
So(len(fc), ShouldEqual, 2)
})
})
}
func TestDao_dailyLimit(t *testing.T) {
var (
ctx = context.Background()
mid int64
key string
f *Filter
err error
b bool
)
Convey("test daily limit filter", t, func() {
mid = rand.Int63n(999999)
key = GetDailyLimitKey(mid)
filterConf := &FilterConfig{}
f, err = d.NewFilter(filterConf)
So(err, ShouldBeNil)
log.Info("TestDao_dailyLimit mid(%d), key(%s), filter(%v)", mid, key, f)
// del key first
err = delRedis(f.conn, key)
So(err, ShouldBeNil)
// try get with nil return
b, err = f.dailyLimitFilter(ctx, mid)
So(b, ShouldEqual, false)
So(err, ShouldBeNil)
// then set a valid value
setRedis(f.conn, key, rand.Intn(4)+1, 30)
b, err = f.dailyLimitFilter(ctx, mid)
So(b, ShouldEqual, false)
So(err, ShouldBeNil)
// then set value should be filtered
setRedis(f.conn, key, -1, 30)
b, err = f.dailyLimitFilter(ctx, mid)
So(b, ShouldEqual, true)
So(err, ShouldBeNil)
// then test with conn error
delRedis(f.conn, key)
f.Done()
b, err = f.dailyLimitFilter(ctx, mid)
So(err, ShouldNotBeNil)
})
}
func TestDao_intervalSmooth(t *testing.T) {
var (
ctx = context.Background()
mid int64
key string
f *Filter
err error
b bool
)
Convey("test interval smooth filter", t, func() {
mid = rand.Int63n(999999)
key = GetIntervalKey(mid)
// new filter
task := &model.ApPushTask{
LinkValue: "test",
}
fc := &FilterConfig{
IntervalExpired: 300,
Task: task,
}
f, err = d.NewFilter(fc)
So(err, ShouldBeNil)
log.Info("TestDao_intervalSmooth mid(%d), key(%s), filter(%v)", mid, key, f)
// del key first
err = delRedis(f.conn, key)
So(err, ShouldBeNil)
// first setnx should success
b, err = f.intervalSmoothFilter(ctx, mid)
So(b, ShouldEqual, false)
So(err, ShouldBeNil)
// second setnx should fail
b, err = f.intervalSmoothFilter(ctx, mid)
So(b, ShouldEqual, true)
So(err, ShouldBeNil)
// test error
delRedis(f.conn, key)
f.Done()
b, err = f.intervalSmoothFilter(ctx, mid)
So(err, ShouldNotBeNil)
})
}
func TestDao_BatchFilter(t *testing.T) {
initd()
Convey("test mids filter by different business", t, func() {
var (
ctx = context.Background()
business int
mids, resMids []int64
conf *FilterConfig
fc FilterChain
f *Filter
err error
)
Convey("test empty mids or filter chain", func() {
// empty mids
business = rand.Int()
conf = &FilterConfig{Business: business}
f, err = d.NewFilter(conf)
So(err, ShouldBeNil)
fc = d.NewFilterChain(f)
resMids = f.BatchFilter(ctx, fc, mids)
So(len(resMids), ShouldEqual, 0)
// empty fc with business 111
business = 111
conf = &FilterConfig{Business: business}
f, err = d.NewFilter(conf)
So(err, ShouldBeNil)
fc = d.NewFilterChain(f)
So(len(fc), ShouldEqual, 0)
resMids = f.BatchFilter(ctx, fc, mids)
So(len(resMids), ShouldEqual, 0)
// test business 101 limit filter case
business = 101
total := 10
for i := 0; i < total; i++ {
mids = append(mids, rand.Int63())
}
conf = &FilterConfig{Business: business}
f, err = d.NewFilter(conf)
So(err, ShouldBeNil)
fc = d.NewFilterChain(f)
So(len(fc), ShouldEqual, 1)
resMids = f.BatchFilter(ctx, fc, mids)
So(len(resMids), ShouldEqual, total)
// clean test mids
filterClean(f, mids)
})
Convey("test filter", func() {
var b bool
business = rand.Intn(99) + 1 // should through all filters
total := 10
shouldFilterMids := make([]int64, 0, total)
for i := 0; i < total; i++ {
mid := rand.Int63()
if i%3 == 0 {
shouldFilterMids = append(shouldFilterMids, mid)
}
mids = append(mids, mid)
}
task := &model.ApPushTask{
LinkValue: "test",
}
conf = &FilterConfig{
Business: business,
IntervalExpired: 300,
DailyExpired: 300,
Task: task}
f, err = d.NewFilter(conf)
So(err, ShouldBeNil)
fc = d.NewFilterChain(f)
So(len(fc), ShouldEqual, 2)
// init should filtered mids, half interval smooth and another daily limit
for i, mid := range shouldFilterMids {
if i%2 == 0 {
b, err = f.intervalSmoothFilter(ctx, mid)
So(b, ShouldEqual, false)
} else {
key := GetDailyLimitKey(mid)
err = setRedis(f.conn, key, 0, int32(f.conf.DailyExpired))
}
So(err, ShouldBeNil)
}
// do filter
resMids = f.BatchFilter(ctx, fc, mids)
So(len(resMids), ShouldEqual, len(mids)-len(shouldFilterMids))
// clean
filterClean(f, mids)
})
})
}
func TestDao_BatchDecreaseLimit(t *testing.T) {
initd()
Convey("test batch decrease daily limit", t, func() {
var (
ctx = context.Background()
mids []int64
total, limitTotal int
conf *FilterConfig
f *Filter
err error
)
total = 10
for i := 0; i < total; i++ {
mids = append(mids, rand.Int63())
}
log.Info("TestDao_BatchDecreaseLimit mids(%v)", mids)
conf = &FilterConfig{
DailyExpired: 300,
}
f, err = d.NewFilter(conf)
So(err, ShouldBeNil)
// do limit decrease
limitTotal, err = f.BatchDecreaseLimit(ctx, mids)
So(err, ShouldBeNil)
So(limitTotal, ShouldEqual, total)
// clean
filterClean(f, mids)
})
}

View File

@@ -0,0 +1,140 @@
package dao
import (
"bytes"
"context"
"crypto/md5"
"encoding/json"
"fmt"
"github.com/tsuna/gohbase/hrpc"
"go-common/app/interface/live/push-live/model"
"go-common/library/log"
"go-common/library/sync/errgroup"
"sync"
)
const _hbaseShard = 200
var (
hbaseTable = "ugc:PushArchive"
hbaseFamily = "relation"
hbaseFamilyB = []byte(hbaseFamily)
)
// Fans gets the upper's fans.
func (d *Dao) Fans(c context.Context, upper int64, types int) (fans map[int64]bool, fansSP map[int64]bool, err error) {
var mutex sync.Mutex
fans = make(map[int64]bool)
fansSP = make(map[int64]bool)
group := errgroup.Group{}
for i := 0; i < _hbaseShard; i++ {
shard := int64(i)
group.Go(func() (e error) {
key := _rowKey(upper, shard)
relations, e := d.fansByKey(context.TODO(), key)
if e != nil {
return
}
mutex.Lock()
for fansID, fansType := range relations {
switch types {
// 返回普通关注
case model.RelationAttention:
if fansType == types {
fans[fansID] = true
}
// 返回特别关注
case model.RelationSpecial:
if fansType == types {
fansSP[fansID] = true
}
// 同时返回普通关注与特别关注
case model.RelationAll:
if fansType == model.RelationSpecial {
fansSP[fansID] = true
} else if fansType == model.RelationAttention {
fans[fansID] = true
}
default:
return
}
}
mutex.Unlock()
return
})
}
group.Wait()
return
}
// SeparateFans Separate the upper's fans by 1 or 2.
func (d *Dao) SeparateFans(c context.Context, upper int64, fansIn map[int64]bool) (fans map[int64]bool, fansSP map[int64]bool, err error) {
var mutex sync.Mutex
special := make(map[int64]bool)
fans = make(map[int64]bool)
fansSP = make(map[int64]bool)
group := errgroup.Group{}
for i := 0; i < _hbaseShard; i++ {
shard := int64(i)
group.Go(func() (e error) {
key := _rowKey(upper, shard)
relations, e := d.fansByKey(context.TODO(), key)
if e != nil {
return
}
mutex.Lock()
// 获取所有特别关注
for fansID, fansType := range relations {
if fansType == model.RelationSpecial {
special[fansID] = true
}
}
mutex.Unlock()
return
})
}
group.Wait()
for id := range fansIn {
if _, ok := special[id]; ok {
// 特别关注
fansSP[id] = true
} else {
// 不是特别关注就是普通关注
fans[id] = true
}
}
return
}
func _rowKey(upper, fans int64) string {
k := fmt.Sprintf("%d_%d", upper, fans%_hbaseShard)
key := fmt.Sprintf("%x", md5.Sum([]byte(k)))
return key
}
func (d *Dao) fansByKey(c context.Context, key string) (relations map[int64]int, err error) {
relations = make(map[int64]int)
var (
query *hrpc.Get
result *hrpc.Result
ctx, cancel = context.WithTimeout(c, d.relationHBaseReadTimeout)
)
defer cancel()
if result, err = d.relationHBase.GetStr(ctx, hbaseTable, key); err != nil {
log.Error("d.relationHBase.Get error(%v) querytable(%v)", err, string(query.Table()))
// PromError("hbase:Get")
return
} else if result == nil {
return
}
for _, c := range result.Cells {
if c != nil && bytes.Equal(c.Family, hbaseFamilyB) {
if err = json.Unmarshal(c.Value, &relations); err != nil {
log.Error("json.Unmarshal() error(%v)", err)
return
}
break
}
}
return
}

View File

@@ -0,0 +1,43 @@
package dao
import (
"context"
"go-common/app/interface/live/push-live/model"
"testing"
. "github.com/smartystreets/goconvey/convey"
)
func Test_fans1(t *testing.T) {
initd()
Convey("Parse Json To Struct", t, func() {
fan := int64(27515316)
f, fsp1, err := d.Fans(context.TODO(), fan, model.RelationAttention)
t.Logf("the included(%v) includedSP(%v) err(%v)", f, fsp1, err)
f, fsp2, err := d.Fans(context.TODO(), fan, model.RelationSpecial)
t.Logf("the included(%v) includedSP(%v) err(%v)", f, fsp2, err)
f, fsp3, err := d.Fans(context.TODO(), fan, model.RelationAll)
t.Logf("the included(%v) includedSP(%v) err(%v)", f, fsp3, err)
So(len(fsp1)+len(fsp2), ShouldEqual, len(fsp3))
})
}
func Test_fans2(t *testing.T) {
initd()
Convey("Parse Json To Struct", t, func() {
upper := int64(27515316)
fans := make(map[int64]bool)
fans[1232032] = true
fans[21231134] = true
fans[27515398] = true
fans[27515275] = true
f1, f2, err := d.SeparateFans(context.TODO(), upper, fans)
t.Logf("the included(%v) includedSP(%v) err(%v)", f1, f2, err)
So(0, ShouldEqual, 0)
})
}

View File

@@ -0,0 +1,179 @@
package dao
import (
"bytes"
"context"
"crypto/md5"
"encoding/hex"
"fmt"
"go-common/app/interface/live/push-live/model"
"go-common/library/log"
"go-common/library/xstr"
"io"
"mime/multipart"
"net/http"
"net/url"
"sort"
"strconv"
"time"
)
type _response struct {
Code int `json:"code"`
Data int `json:"data"`
}
// BatchPush 批量推送,失败重试
func (d *Dao) BatchPush(fans *[]int64, task *model.ApPushTask) (total int) {
limit := d.c.Push.PushOnceLimit
retry := d.c.Push.PushRetryTimes
var times int
for {
var (
mids []int64
err error
)
uuid := d.GetUUID(task, times)
l := len(*fans)
if l == 0 {
break
} else if l <= limit {
mids = (*fans)[:l]
} else {
mids = (*fans)[:limit]
l = limit
}
*fans = (*fans)[l:]
for i := 0; i < retry; i++ {
// 每次投递成功结束循环
if err = d.Push(mids, task, uuid); err == nil {
total += len(mids) //单次投递成功数
break
}
time.Sleep(time.Duration(time.Second * 3))
}
times++
if err != nil {
// 重试若干次仍然失败需要记录日志并且配置elk告警
log.Error("[dao.push|BatchPush] retry push failed. error(%+v), retry times(%d), task(%+v)", err, retry, task)
}
}
return
}
// Push 调用推送接口
func (d *Dao) Push(fans []int64, task *model.ApPushTask, uuid string) (err error) {
if len(fans) == 0 {
log.Info("[dao.push|Push] empty fans. task(%+v)", task)
return
}
// 业务参数
businessID, token := d.getPushBusiness(task.Group)
buf := new(bytes.Buffer)
w := multipart.NewWriter(buf)
w.WriteField("app_id", strconv.Itoa(d.c.Push.AppID))
w.WriteField("business_id", strconv.Itoa(businessID))
w.WriteField("alert_title", d.GetPushTemplate(task.Group, task.AlertTitle))
w.WriteField("alert_body", task.AlertBody)
w.WriteField("mids", xstr.JoinInts(fans))
w.WriteField("link_type", strconv.Itoa(task.LinkType))
w.WriteField("link_value", task.LinkValue)
w.WriteField("expire_time", strconv.Itoa(task.ExpireTime))
w.WriteField("group", task.Group)
w.WriteField("uuid", uuid)
w.Close()
// 签名
query := map[string]string{
"ts": strconv.FormatInt(time.Now().Unix(), 10),
"appkey": d.c.HTTPClient.Key,
}
query["sign"] = d.getSign(query, d.c.HTTPClient.Secret)
requestURL := fmt.Sprintf("%s?ts=%s&appkey=%s&sign=%s", d.c.Push.MultiAPI, query["ts"], query["appkey"], query["sign"])
// request
req, err := http.NewRequest(http.MethodPost, requestURL, buf)
if err != nil {
log.Error("[dao.push|Push] http.NewRequest error(%+v), url(%s), uuid(%s), task(%+v)",
err, requestURL, uuid, task)
PromError("[dao.push|Push] http:NewRequest")
return
}
req.Header.Set("Content-Type", w.FormDataContentType())
req.Header.Set("Authorization", "token="+token)
res := &_response{}
if err = d.httpClient.Do(context.TODO(), req, res); err != nil {
log.Error("[dao|push|Push] httpClient.Do error(%+v), url(%s), uuid(%s), task(%+v)",
err, requestURL, uuid, task)
PromError("[dao.push|Push] http:Do")
return
}
// response
if res.Code != 0 || res.Data == 0 {
log.Error("[dao.push|Push] push failed. url(%s), uuid(%s), response(%+v), task(%+v)", requestURL, uuid, res, task)
err = fmt.Errorf("[dao.push|Push] push failed. url(%s), uuid(%s), response(%+v), task(%+v)", requestURL, uuid, res, task)
} else {
log.Info("[dao.push|Push] push success. url(%s), uuid(%s), response(%+v), task(%+v)", requestURL, uuid, res, task)
}
return
}
// GetPushTemplate 根据类型返回不同的推送文案
func (d *Dao) GetPushTemplate(group string, part string) (template string) {
switch group {
case model.SpecialGroup:
template = fmt.Sprintf(d.c.Push.SpecialCopyWriting, part)
case model.AttentionGroup:
template = fmt.Sprintf(d.c.Push.DefaultCopyWriting, part)
default:
template = part
}
return
}
// GetUUID 构造一个每次请求的uuid
func (d *Dao) GetUUID(task *model.ApPushTask, times int) string {
var b bytes.Buffer
b.WriteString(strconv.Itoa(times))
b.WriteString(task.Group) // Group必须加入uuid计算区分单次开播提醒关注与特别关注
b.WriteString(strconv.Itoa(d.c.Push.BusinessID))
b.WriteString(strconv.FormatInt(task.TargetID, 10))
b.WriteString(strconv.Itoa(task.ExpireTime))
b.WriteString(strconv.FormatInt(time.Now().UnixNano(), 10))
mh := md5.Sum(b.Bytes())
uuid := hex.EncodeToString(mh[:])
return uuid
}
// getSign 获取签名
func (d *Dao) getSign(params map[string]string, secret string) (sign string) {
keys := []string{}
for k := range params {
keys = append(keys, k)
}
sort.Strings(keys)
buf := bytes.Buffer{}
for _, k := range keys {
if buf.Len() > 0 {
buf.WriteByte('&')
}
buf.WriteString(url.QueryEscape(k) + "=")
buf.WriteString(url.QueryEscape(params[k]))
}
hash := md5.New()
io.WriteString(hash, buf.String()+secret)
return fmt.Sprintf("%x", hash.Sum(nil))
}
// getPushBusiness 获取推送配置
func (d *Dao) getPushBusiness(group string) (businessID int, token string) {
// 预约走单独的白名单通道business id 和token不一样
if group == "activity_appointment" {
businessID = 41
token = "13aoowdzm0u8pcqdoulvj5vdkihohtcj"
} else {
businessID = d.c.Push.BusinessID
token = d.c.Push.BusinessToken
}
return
}

View File

@@ -0,0 +1,38 @@
package dao
import (
"go-common/app/interface/live/push-live/model"
"testing"
. "github.com/smartystreets/goconvey/convey"
)
func TestDao_getSign(t *testing.T) {
initd()
Convey("should return correct sign string by given params and secret", t, func() {
params := map[string]string{
"aa": "abc",
"bb": "xyz",
"cc": "opq",
}
secret := "abc"
sign := d.getSign(params, secret)
So(sign, ShouldEqual, "4571d284b198823bbf62f34cf38c9307")
})
}
func TestService_GetPushTemplate(t *testing.T) {
initd()
Convey("should return correct template by different type", t, func() {
name := "test"
t1 := d.GetPushTemplate(model.AttentionGroup, name)
t2 := d.GetPushTemplate(model.SpecialGroup, name)
t3 := d.GetPushTemplate("test group", name)
So(t1, ShouldEqual, "你关注的【test】正在直播~")
So(t2, ShouldEqual, "你特别关注的【test】正在直播~")
// default type template
So(t3, ShouldEqual, name)
})
}

View File

@@ -0,0 +1,44 @@
package dao
import (
"context"
"fmt"
"go-common/app/interface/live/push-live/model"
"go-common/library/log"
"github.com/pkg/errors"
)
const (
_shard = 10 //分表十张
_getMidsByTargetID = "SELECT uid FROM app_switch_config_%s WHERE target_id=? AND type=? AND switch=?"
)
// tableIndex return index by target_id
func tableIndex(targetID int64) string {
return fmt.Sprintf("%02d", targetID%_shard)
}
// GetFansBySwitch 获取直播开关数据
func (d *Dao) GetFansBySwitch(c context.Context, targetID int64) (fans map[int64]bool, err error) {
var mid int64
fans = make(map[int64]bool)
sql := fmt.Sprintf(_getMidsByTargetID, tableIndex(targetID))
rows, err := d.db.Query(c, sql, targetID, model.LivePushType, model.LivePushSwitchOn)
if err != nil {
err = errors.WithStack(err)
fmt.Printf("%v", err)
log.Error("[dao.switch|GetSwitchMids] db.Query() error(%v)", err)
return
}
for rows.Next() {
if err = rows.Scan(&mid); err != nil {
err = errors.WithStack(err)
fmt.Printf("%v", err)
log.Error("[dao.switch|GetSwitchMids] rows.Scan() error(%v)", err)
return
}
fans[mid] = true
}
return
}

View File

@@ -0,0 +1,19 @@
package dao
import (
"context"
"testing"
. "github.com/smartystreets/goconvey/convey"
)
func Test_switch(t *testing.T) {
initd()
Convey("Parse Json To Struct", t, func() {
target := int64(27515316)
fs, err := d.GetFansBySwitch(context.TODO(), target)
t.Logf("the result included(%v) err(%v)", fs, err)
So(err, ShouldEqual, nil)
})
}

View File

@@ -0,0 +1,25 @@
package dao
import (
"context"
"go-common/app/interface/live/push-live/model"
"go-common/library/log"
"github.com/pkg/errors"
)
const (
_createNewTask = "INSERT INTO ap_push_task(type,target_id,alert_title,alert_body,mid_source,link_type,link_value,expire_time,total) VALUES (?,?,?,?,?,?,?,?,?)"
)
// CreateNewTask 新增推送任务记录
func (d *Dao) CreateNewTask(c context.Context, task *model.ApPushTask) (affected int64, err error) {
res, err := d.db.Exec(c, _createNewTask, model.LivePushType, task.TargetID, task.AlertTitle,
task.AlertBody, task.MidSource, task.LinkType, task.LinkValue, task.ExpireTime, task.Total)
if err != nil {
err = errors.WithStack(err)
log.Error("[dao.task|CreateNewTask] db.Exec() error(%v)", err)
return
}
return res.RowsAffected()
}

View File

@@ -0,0 +1,30 @@
package dao
import (
"context"
"go-common/app/interface/live/push-live/model"
"math/rand"
"testing"
. "github.com/smartystreets/goconvey/convey"
)
func Test_task(t *testing.T) {
initd()
Convey("Parse Json To Struct", t, func() {
task := &model.ApPushTask{
Type: rand.Intn(9999) + 1,
TargetID: rand.Int63n(9999) + 1,
AlertTitle: "title",
AlertBody: "body",
MidSource: rand.Intn(15),
LinkType: rand.Intn(10),
LinkValue: "link_value",
Total: rand.Intn(9999),
}
affected, err := d.CreateNewTask(context.TODO(), task)
t.Logf("the result included(%v) err(%v)", affected, err)
So(err, ShouldEqual, nil)
})
}

View File

@@ -0,0 +1,35 @@
package(default_visibility = ["//visibility:public"])
load(
"@io_bazel_rules_go//go:def.bzl",
"go_library",
)
go_library(
name = "go_default_library",
srcs = ["http.go"],
importpath = "go-common/app/interface/live/push-live/http",
tags = ["automanaged"],
visibility = ["//visibility:public"],
deps = [
"//app/interface/live/push-live/conf:go_default_library",
"//app/interface/live/push-live/service:go_default_library",
"//library/ecode:go_default_library",
"//library/log:go_default_library",
"//library/net/http/blademaster:go_default_library",
],
)
filegroup(
name = "package-srcs",
srcs = glob(["**"]),
tags = ["automanaged"],
visibility = ["//visibility:private"],
)
filegroup(
name = "all-srcs",
srcs = [":package-srcs"],
tags = ["automanaged"],
visibility = ["//visibility:public"],
)

View File

@@ -0,0 +1,74 @@
package http
import (
"errors"
"net/http"
"go-common/app/interface/live/push-live/conf"
"go-common/app/interface/live/push-live/service"
"go-common/library/ecode"
"go-common/library/log"
bm "go-common/library/net/http/blademaster"
)
var (
pushLiveSvr *service.Service
errInvalidParams = errors.New("invalid params")
)
// Init http
func Init(c *conf.Config, srv *service.Service) {
pushLiveSvr = srv
// init router
engineInner := bm.DefaultServer(c.BM.Inner)
innerRouter(engineInner)
if err := engineInner.Start(); err != nil {
log.Error("bm.DefaultServer error(%v)", err)
panic(err)
}
}
// innerRouter init inner router api path.
func innerRouter(e *bm.Engine) {
// init api
e.Ping(ping)
// http internal api
group := e.Group("/xlive/internal/push-live/")
{
// 用户每日推送额度占用
group.POST("/limit/decrease", decrease)
}
}
// ping check server ok.
func ping(c *bm.Context) {
if err := pushLiveSvr.Ping(c); err != nil {
log.Error("[http.http|ping] push-live ping error(%v)", err)
c.AbortWithStatus(http.StatusServiceUnavailable)
}
}
// decrease decrease user daily push limit
func decrease(c *bm.Context) {
params := c.Request.Form
business := params.Get("business")
uuid := params.Get("uuid")
targetID := params.Get("target_id")
mids := params.Get("mids")
// params check
if business == "" || uuid == "" || targetID == "" || mids == "" {
log.Error("[http.http|decrease] request params(%v) error(%v)", params, errInvalidParams)
c.JSON(nil, ecode.RequestErr)
return
}
if err := pushLiveSvr.LimitDecrease(c, business, targetID, uuid, mids); err != nil {
log.Error("[http.http|decrease] pushLiveSvr.LimitDecrease error(%v), params(%v)", err, params)
c.JSON(nil, ecode.RequestErr)
return
}
log.Info("[http.http|decrease] decrease success, params(%v)", params)
c.JSON(nil, nil)
}

View File

@@ -0,0 +1,28 @@
package(default_visibility = ["//visibility:public"])
load(
"@io_bazel_rules_go//go:def.bzl",
"go_library",
)
go_library(
name = "go_default_library",
srcs = ["model.go"],
importpath = "go-common/app/interface/live/push-live/model",
tags = ["automanaged"],
visibility = ["//visibility:public"],
)
filegroup(
name = "package-srcs",
srcs = glob(["**"]),
tags = ["automanaged"],
visibility = ["//visibility:private"],
)
filegroup(
name = "all-srcs",
srcs = [":package-srcs"],
tags = ["automanaged"],
visibility = ["//visibility:public"],
)

View File

@@ -0,0 +1,97 @@
package model
// ApPushTask struct of table link_push.ap_push_task
type ApPushTask struct {
ID int64 `json:"id"`
Type int `json:"type"`
TargetID int64 `json:"target_id"`
AlertTitle string `json:"alert_title"`
AlertBody string `json:"alert_body"`
MidSource int `json:"mid_source"`
LinkType int `json:"link_type"`
LinkValue string `json:"link_value"`
Total int `json:"total"`
ExpireTime int `json:"expire_time"`
Group string
}
// StartLiveMessage StartLiveNotify-T message
type StartLiveMessage struct {
TargetID int64 `json:"target_id"`
Uname string `json:"uname"`
LinkValue string `json:"link_value"`
ExpireTime int `json:"expire_time"`
RoomTitle string `json:"room_title"`
}
// LiveCommonMessage LivePushCommon-T message
type LiveCommonMessage struct {
Topic string `json:"topic"`
MsgID string `json:"msg_id"`
MsgKey string `json:"msg_key"`
MsgContent LiveCommonMessageContent `json:"msg_content"`
}
// LiveCommonMessageContent LivePushCommon-T message.msg_content
type LiveCommonMessageContent struct {
Business int `json:"business"`
Group string `json:"group"`
Mids string `json:"mids"`
AlertTitle string `json:"alert_title"`
AlertBody string `json:"alert_body"`
LinkValue string `json:"link_value"`
LinkType int `json:"link_type"`
ExpireTime int `json:"expire_time"`
}
// 直播开关DB相关配置
const (
LivePushType = 1001
LivePushSwitchOn = 1
LivePushConfigOn = 1
PushIntervalKey = "push_interval"
PushIntervalDefault = 1800
)
/**
* 推送类型
* 注意这里复用这个常量定义1 & 2是hbase中关系链的类型但是RelationAll=3不是这里只是个业务概念上的类型
* 表示取所有关注数据
*/
const (
// RelationAttention 关注
RelationAttention = iota + 1
// RelationSpecial 特别关注
RelationSpecial
// RelationAll 关注+特别关注
RelationAll
)
// 推送后台策略,DB中的记录
const (
StrategySwitch = "Switch" //开启推送开关
StrategySpecial = "Special" //特别关注
StrategyFans = "Fans" //关注
StrategySwitchSpecial = "SwitchAndSpecial" //开启开关且特别关注
)
// 推送任务标记mid来源组合来源则取交则可
const (
TaskSourceSwitch = 1
TaskSourceSpecial = 2
TaskSourceFans = 4
TaskSourceSwitchSpe = 8
)
// 开播提醒消息的group信息
const (
AttentionGroup = "follow" // 关注
SpecialGroup = "sfollow" // 特别关注
ActivityAppointment = "activity_appointment" // 预约
)
// 业务business配置
const (
StartLiveBusiness = 1
ActivityBusiness = 111
)

View File

@@ -0,0 +1,65 @@
package(default_visibility = ["//visibility:public"])
load(
"@io_bazel_rules_go//go:def.bzl",
"go_test",
"go_library",
)
go_test(
name = "go_default_test",
srcs = [
"common_message_test.go",
"mids_test.go",
"push_test.go",
"service_test.go",
"start_live_test.go",
],
embed = [":go_default_library"],
tags = ["automanaged"],
deps = [
"//app/interface/live/push-live/conf:go_default_library",
"//app/interface/live/push-live/dao:go_default_library",
"//app/interface/live/push-live/model:go_default_library",
"//library/cache/redis:go_default_library",
"//vendor/github.com/smartystreets/goconvey/convey:go_default_library",
],
)
go_library(
name = "go_default_library",
srcs = [
"common_message.go",
"mids.go",
"push.go",
"service.go",
"start_live.go",
],
importpath = "go-common/app/interface/live/push-live/service",
tags = ["automanaged"],
visibility = ["//visibility:public"],
deps = [
"//app/interface/live/push-live/conf:go_default_library",
"//app/interface/live/push-live/dao:go_default_library",
"//app/interface/live/push-live/model:go_default_library",
"//library/cache/redis:go_default_library",
"//library/log:go_default_library",
"//library/queue/databus:go_default_library",
"//library/sync/errgroup:go_default_library",
"//vendor/github.com/pkg/errors:go_default_library",
],
)
filegroup(
name = "package-srcs",
srcs = glob(["**"]),
tags = ["automanaged"],
visibility = ["//visibility:private"],
)
filegroup(
name = "all-srcs",
srcs = [":package-srcs"],
tags = ["automanaged"],
visibility = ["//visibility:public"],
)

View File

@@ -0,0 +1,94 @@
package service
import (
"context"
"encoding/json"
"go-common/app/interface/live/push-live/dao"
"go-common/app/interface/live/push-live/model"
"go-common/library/cache/redis"
"go-common/library/log"
"go-common/library/queue/databus"
)
// LiveCommonMessage 直播通用消息
func (s *Service) LiveCommonMessage(ctx context.Context, msg *databus.Message) (err error) {
defer msg.Commit()
var (
mids []int64
mMap = make(map[int64]bool) // mid去重
midMap = make(map[int][]int64) // 最终格式化后的mid map
)
m := new(model.LiveCommonMessage)
if err = json.Unmarshal(msg.Value, &m); err != nil {
log.Error("[service.common_message|LiveCommonMessage] json Unmarshal error(%v), model(%v)", err, m)
return
}
task := s.InitCommonTask(m)
if mids, err = s.convertStrToInt64(m.MsgContent.Mids); err != nil {
log.Error("[service.push|LiveCommonMessage] format Mids error(%v), task(%v), model(%v)", err, task, m)
return
}
// remove duplicated mid
for _, mid := range mids {
mMap[mid] = true
}
// mid filter
business := m.MsgContent.Business
filteredMids := s.midFilter(mMap, business, task)
midMap[business] = filteredMids
log.Info("[service.push|LiveCommonMessage] message info: before(%d), after(%d), model(%v), task(%v)",
len(mMap), len(midMap[business]), m, task)
total := s.Push(task, midMap)
// create push task
go s.CreatePushTask(task, total)
go s.setPushInterval(business, s.safeGetExpired(), filteredMids, task)
log.Info("[service.push|LiveCommonMessage] common message push done, total(%d), err(%v)", total, err)
return
}
// InitCommonTask Init push task by common message model
func (s *Service) InitCommonTask(m *model.LiveCommonMessage) (task *model.ApPushTask) {
task = &model.ApPushTask{
Type: model.LivePushType,
TargetID: 0,
AlertTitle: m.MsgContent.AlertTitle,
AlertBody: m.MsgContent.AlertBody,
MidSource: m.MsgContent.Business,
LinkType: m.MsgContent.LinkType,
LinkValue: m.MsgContent.LinkValue,
ExpireTime: m.MsgContent.ExpireTime,
Group: m.MsgContent.Group,
}
return task
}
// setPushInterval 活动预约对每个mid设置推送平滑key
func (s *Service) setPushInterval(business int, expired int32, mids []int64, task *model.ApPushTask) (total int, err error) {
if business != 111 {
return
}
var conn redis.Conn
defer func() {
if conn != nil {
conn.Close()
}
}()
// redis conn
conn, err = redis.Dial(s.c.Redis.PushInterval.Proto, s.c.Redis.PushInterval.Addr, s.dao.RedisOption()...)
if err != nil {
log.Error("[service.common_message|setPushInterval] redis.Dial error(%v), task(%v), mids(%d)",
err, task, len(mids))
return
}
for _, mid := range mids {
key := dao.GetIntervalKey(mid)
_, err = conn.Do("SET", key, task.LinkValue, "EX", expired)
if err != nil {
log.Error("[service.common_message|setPushInterval] set redis error(%v), task(%v), mid(%d)",
err, task, mid)
continue
}
total++
}
return
}

View File

@@ -0,0 +1,98 @@
package service
import (
. "github.com/smartystreets/goconvey/convey"
"go-common/app/interface/live/push-live/dao"
"go-common/app/interface/live/push-live/model"
"go-common/library/cache/redis"
"math/rand"
"strconv"
"testing"
)
func makeTestCommonPushTask(title, body, linkValue, group string, business, expireTime int) (task *model.ApPushTask) {
m := &model.LiveCommonMessage{}
m.MsgContent = model.LiveCommonMessageContent{
Business: business,
Group: group,
Mids: "",
AlertTitle: title,
AlertBody: body,
LinkValue: linkValue,
ExpireTime: expireTime,
}
task = s.InitCommonTask(m)
return
}
func TestService_InitCommonTask(t *testing.T) {
initd()
Convey("should return init struct", t, func() {
title := "room_title"
body := "测试"
group := "group"
linkValue := strconv.Itoa(rand.Intn(9999))
expireTime := rand.Intn(10000) + 1
business := rand.Intn(9999)
task := makeTestCommonPushTask(title, body, linkValue, group, business, expireTime)
So(task.AlertTitle, ShouldResemble, title)
So(task.AlertBody, ShouldResemble, body)
So(task.ExpireTime, ShouldResemble, expireTime)
So(task.LinkValue, ShouldResemble, linkValue)
So(task.MidSource, ShouldEqual, business)
So(task.Group, ShouldEqual, group)
})
}
func TestService_setPushInterval(t *testing.T) {
initd()
Convey("test setPushInterval", t, func() {
var (
resTotal int
total int
business int
task *model.ApPushTask
mids []int64
err error
)
Convey("test business will not exec logic", func() {
business = rand.Intn(100)
task = &model.ApPushTask{}
total = 10
mids = makeMids(total)
resTotal, err = s.setPushInterval(business, rand.Int31(), mids, task)
So(err, ShouldBeNil)
So(resTotal, ShouldEqual, 0)
})
Convey("test business will exec logic", func() {
var conn redis.Conn
business = 111
task = &model.ApPushTask{
LinkValue: "test",
}
total = 10
mids = makeMids(total)
resTotal, err = s.setPushInterval(business, 300, mids, task)
So(err, ShouldBeNil)
So(resTotal, ShouldEqual, total)
// clean
conn, err = redis.Dial(s.c.Redis.PushInterval.Proto, s.c.Redis.PushInterval.Addr, s.dao.RedisOption()...)
So(err, ShouldBeNil)
for _, mid := range mids {
key := dao.GetIntervalKey(mid)
conn.Do("DEL", key)
}
conn.Close()
})
})
}
func makeMids(total int) []int64 {
mids := make([]int64, 0, total)
for i := 0; i < total; i++ {
mids = append(mids, rand.Int63())
}
return mids
}

View File

@@ -0,0 +1,96 @@
package service
import (
"context"
"go-common/app/interface/live/push-live/dao"
"go-common/app/interface/live/push-live/model"
"go-common/library/log"
"strings"
"sync"
"time"
)
// MidFilter 收敛所有mid过滤逻辑入口
func (s *Service) midFilter(ml map[int64]bool, business int, task *model.ApPushTask) (midMap []int64) {
var (
mutex sync.Mutex
i int
midsList [][]int64
wg sync.WaitGroup
needDecrease = needDecrease(business)
filterConf = &dao.FilterConfig{
Business: business,
IntervalExpired: s.safeGetExpired(),
IntervalValue: intervalValueByLinkValue(task.LinkValue),
DailyExpired: dailyExpired(time.Now()),
Task: task}
)
midMap = make([]int64, 0, len(ml))
// split mids by limit
mids := make([]int64, 0, s.c.Push.IntervalLimit)
for mid := range ml {
mids = append(mids, mid)
i++
if i == s.c.Push.IntervalLimit {
i = 0
midsList = append(midsList, mids)
mids = make([]int64, 0, s.c.Push.IntervalLimit)
}
}
if len(mids) > 0 {
midsList = append(midsList, mids)
}
// filter goroutines
for i := 0; i < len(midsList); i++ {
wg.Add(1)
go func(index int, mids []int64) {
var (
filteredMids []int64
f *dao.Filter
err error
ctx = context.TODO()
)
defer func() {
log.Info("[service.mids|midFilter] BatchFilter before(%d), after(%d), task(%v), business(%d), err(%v)",
len(mids), len(filteredMids), task, business, err)
wg.Done()
}()
// new filter
f, err = s.dao.NewFilter(filterConf)
if err != nil {
return
}
filteredMids = f.BatchFilter(ctx, s.dao.NewFilterChain(f), mids)
if len(filteredMids) == 0 {
f.Done()
return
}
// after filter, do something
if needDecrease {
go f.BatchDecreaseLimit(ctx, filteredMids)
}
mutex.Lock()
midMap = append(midMap, filteredMids...)
mutex.Unlock()
}(i, midsList[i])
}
wg.Wait()
log.Info("[service.mids|midFilter] filtered task(%v), before(%d), after(%d), type(%d)",
task, len(ml), len(midMap), business)
return
}
// intervalValueByLinkValue get roomid by link value
func intervalValueByLinkValue(linkValue string) string {
s := strings.Split(linkValue, ",")
return s[0]
}
// needDecrease
func needDecrease(business int) bool {
return business != model.ActivityBusiness
}

View File

@@ -0,0 +1,53 @@
package service
import (
"testing"
. "github.com/smartystreets/goconvey/convey"
"go-common/app/interface/live/push-live/dao"
"go-common/app/interface/live/push-live/model"
"go-common/library/cache/redis"
"math/rand"
)
func TestService_MidFilter(t *testing.T) {
initd()
Convey("test mid filter", t, func() {
var (
total int
midList map[int64]bool
business int
resMids []int64
task *model.ApPushTask
err error
conn redis.Conn
)
business = rand.Intn(99) + 1 // should through all filters
// init mids input
total = 10
midList = make(map[int64]bool, total)
for i := 0; i < total; i++ {
mid := rand.Int63()
midList[mid] = true
}
// init task
task = &model.ApPushTask{
LinkValue: "test",
}
// do mid filter
resMids = s.midFilter(midList, business, task)
So(len(resMids), ShouldEqual, total)
// clean
conn, err = redis.Dial(s.c.Redis.PushInterval.Proto, s.c.Redis.PushInterval.Addr, s.dao.RedisOption()...)
So(err, ShouldBeNil)
for mid := range midList {
keys := []string{
dao.GetDailyLimitKey(mid),
dao.GetIntervalKey(mid),
}
for _, key := range keys {
conn.Do("DEL", key)
}
}
})
}

View File

@@ -0,0 +1,92 @@
package service
import (
"context"
"go-common/app/interface/live/push-live/model"
"go-common/library/log"
"time"
)
// liveMessageConsumeproc Live push related message handler
func (s *Service) liveMessageConsumeproc() {
defer func() {
log.Warn("liveMessageConsumeproc exited.")
s.wg.Done()
}()
var (
liveStartMsgs = s.liveStartSub.Messages()
liveCommonMsgs = s.liveCommonSub.Messages()
)
for {
select {
case msg, ok := <-liveStartMsgs:
if !ok {
log.Warn("[service.push|liveMessageConsumeproc] liveStartSub has been closed.")
return
}
log.Info("[service.push|liveMessageConsumeproc] consume liveStartSub key(%s) offset(%d) message(%s)",
msg.Key, msg.Offset, msg.Value)
s.LiveStartMessage(context.TODO(), msg)
case msg, ok := <-liveCommonMsgs:
if !ok {
log.Warn("[service.push|liveMessageConsumeproc] liveCommonSub has been closed.")
return
}
log.Info("[service.push|liveMessageConsumeproc] consume liveCommonSub key(%s) offset(%d) message(%s)",
msg.Key, msg.Offset, msg.Value)
s.LiveCommonMessage(context.TODO(), msg)
default:
time.Sleep(time.Second * 3)
continue
}
}
}
// Push 组装业务参数,调用推送平台接口
func (s *Service) Push(task *model.ApPushTask, midMap map[int][]int64) (total int) {
var shouldPushCount int
for t, list := range midMap {
length := len(list)
shouldPushCount += length
if length > 0 {
// 调用批量推送方法批量推送逻辑会切分mid与出错重试最后返回实际推送成功数量
task.Group = s.GetPushGroup(t, task.Group)
pushCount := s.dao.BatchPush(&list, task)
log.Info("[service.push|Push] push type(%d), count(%d), target_id(%v)", t, pushCount, task.TargetID)
total += pushCount
}
}
if shouldPushCount == 0 {
log.Info("[service.push|Push] None to push, task(%+v)", task)
return
}
log.Info("[service.push|Push] push done.should(%d), actual(%d), task(%+v).", shouldPushCount, total, task)
return
}
// CreatePushTask create push task
func (s *Service) CreatePushTask(task *model.ApPushTask, total int) (affected int64, err error) {
task.Total = total
affected, err = s.dao.CreateNewTask(context.TODO(), task)
if err != nil || affected == 0 {
log.Error("[service.push|CreatePushTask] CreateNewTask error(%v), task(%+v)", err, task)
return
}
log.Info("[service.push|CreatePushTask] CreateNewTask success, task(%+v)", task)
return
}
// GetPushGroup 获取不同类型的group
// 兼容逻辑: 开播提醒topic有指定的group(并且单次开播需要区分关注与特别关注两个group)其余common message topic会传group
func (s *Service) GetPushGroup(t int, g string) string {
var group string
switch t {
case model.RelationAttention:
group = model.AttentionGroup
case model.RelationSpecial:
group = model.SpecialGroup
default:
group = g
}
return group
}

View File

@@ -0,0 +1,59 @@
package service
import (
. "github.com/smartystreets/goconvey/convey"
"go-common/app/interface/live/push-live/model"
"math/rand"
"strconv"
"testing"
)
func makeTestInitPushTask(targetID int64, uname, linkValue,
roomTitle string, expireTime int) (task *model.ApPushTask) {
m := &model.StartLiveMessage{
TargetID: targetID,
Uname: uname,
LinkValue: linkValue,
RoomTitle: roomTitle,
ExpireTime: expireTime,
}
task = s.InitPushTask(m)
return
}
func TestService_Push(t *testing.T) {
initd()
Convey("test push func", t, func() {
// test empty mids
targetID := rand.Int63n(100) + 1
uname := "测试"
linkValue := strconv.Itoa(rand.Intn(9999))
roomTitle := "room_title"
expireTime := rand.Intn(10000) + 1
task := makeTestInitPushTask(targetID, uname, linkValue, roomTitle, expireTime)
midMap := make(map[int][]int64)
midMap[model.RelationAttention] = []int64{}
total := s.Push(task, midMap)
So(total, ShouldEqual, 0)
})
}
func TestService_GetPushGroup(t *testing.T) {
initd()
Convey("test get group by different push type", t, func() {
var (
group string
testGroup = "test_group"
)
group = s.GetPushGroup(model.RelationAttention, "")
So(group, ShouldEqual, model.AttentionGroup)
group = s.GetPushGroup(model.RelationSpecial, "")
So(group, ShouldEqual, model.SpecialGroup)
group = s.GetPushGroup(rand.Intn(9999), testGroup)
So(group, ShouldEqual, testGroup)
})
}

View File

@@ -0,0 +1,238 @@
package service
import (
"context"
"errors"
"fmt"
"math"
"strconv"
"strings"
"sync"
"time"
"go-common/app/interface/live/push-live/conf"
"go-common/app/interface/live/push-live/dao"
"go-common/library/cache/redis"
"go-common/library/log"
"go-common/library/queue/databus"
)
var (
_limitDecreaseUUIDKey = "ld:%s" // 接口请求防重复key
errLimitRequestRepeat = errors.New("limit decrease request repeat")
errConvertMidString = errors.New("convert mid string error")
errConvertBusiness = errors.New("convert business error")
)
// Service struct
type Service struct {
c *conf.Config
dao *dao.Dao
liveStartSub *databus.Databus
liveCommonSub *databus.Databus
wg sync.WaitGroup
closeCh chan bool
pushTypes []string
intervalExpired int32
mutex sync.RWMutex
}
// New init
func New(c *conf.Config) (s *Service) {
s = &Service{
c: c,
dao: dao.New(c),
liveStartSub: databus.New(c.LiveRoomSub),
liveCommonSub: databus.New(c.LiveCommonSub),
closeCh: make(chan bool),
pushTypes: make([]string, 0, 4),
mutex: sync.RWMutex{},
}
s.wg.Add(1)
go s.loadPushConfig()
for i := 0; i < c.Push.ConsumerProcNum; i++ {
s.wg.Add(1)
go s.liveMessageConsumeproc()
}
return s
}
// loadPushConfig Load push config
func (s *Service) loadPushConfig() {
var ctx = context.TODO()
defer s.wg.Done()
for {
select {
case _, ok := <-s.closeCh:
if !ok {
log.Info("[service.push|loadPushConfig] s.loadPushConfig is closed by closeCh")
return
}
default:
}
// get push delay time
interval, err := s.dao.GetPushInterval(ctx)
if err != nil || interval < 0 {
time.Sleep(time.Duration(time.Minute))
continue
}
s.mutex.Lock()
s.intervalExpired = interval
s.mutex.Unlock()
// get push options
types, err := s.dao.GetPushConfig(ctx)
if err != nil || len(types) == 0 {
time.Sleep(time.Duration(time.Minute))
continue
}
s.mutex.Lock()
s.pushTypes = types
s.mutex.Unlock()
time.Sleep(time.Duration(time.Minute))
}
}
// safeGetExpired
func (s *Service) safeGetExpired() int32 {
s.mutex.RLock()
expired := s.intervalExpired
s.mutex.RUnlock()
return expired
}
// LimitDecrease do mid string limit decrease
func (s *Service) LimitDecrease(ctx context.Context, business, targetID, uuid, midStr string) (err error) {
var (
f *dao.Filter
mids []int64
b int
)
// 判断请求是否重复
err = s.limitDecreaseUnique(getUniqueKey(business, targetID, uuid))
if err != nil {
log.Error("[service.service|LimitDecrease] limitDecreaseUnique error(%v), uuid(%s), business(%s), targetID(%s), mid(%s)",
err, uuid, business, targetID, midStr)
return
}
b, err = strconv.Atoi(business)
if err != nil {
log.Error("[service.service|LimitDecrease] strconv business params error(%v)", err)
err = errConvertBusiness
return
}
filterConf := &dao.FilterConfig{
Business: b,
DailyExpired: dailyExpired(time.Now())}
// convert mid string to []int64
mids, err = s.convertStrToInt64(midStr)
if err != nil {
log.Error("[service.service|LimitDecrease] convertStrToInt64 error(%v), business(%s), uuid(%s), mids(%s)",
err, business, uuid, midStr)
err = errConvertMidString
return
}
// aysnc decrease limit
f, err = s.dao.NewFilter(filterConf)
if err != nil {
log.Error("[service.service|LimitDecrease] new filter error(%v), business(%s), uuid(%s), mids(%v)",
err, business, uuid, mids)
return
}
go f.BatchDecreaseLimit(ctx, mids)
return
}
// Ping Service
func (s *Service) Ping(c context.Context) (err error) {
return nil
}
// Close Service
func (s *Service) Close() {
close(s.closeCh)
s.subClose()
s.wg.Wait()
s.dao.Close()
}
// subClose Close all sub channels
func (s *Service) subClose() {
s.liveCommonSub.Close()
s.liveStartSub.Close()
}
// dailyExpired
func dailyExpired(from time.Time) float64 {
tm1 := time.Date(from.Year(), from.Month(), from.Day(), 0, 0, 0, 0, from.Location())
tm2 := tm1.AddDate(0, 0, 1)
return math.Floor(tm2.Sub(from).Seconds())
}
// convertStrToInt64 convert mid string to []int64 slice
func (s *Service) convertStrToInt64(m string) (mInts []int64, err error) {
var (
mSplit []string
errCount int
)
if m == "" {
return
}
mSplit = strings.Split(m, ",")
for _, mStr := range mSplit {
mInt, convErr := strconv.Atoi(mStr)
if convErr != nil {
log.Error("[service.push|formatMidstr] convert mid(%v), error(%v)", mStr, convErr)
errCount++
continue
}
mInts = append(mInts, int64(mInt))
}
if errCount == len(mSplit) {
err = fmt.Errorf("[service.push|formatMidstr] convert all mid failed, midstr(%s)", m)
}
return
}
// limitDecreaseUnique
func (s *Service) limitDecreaseUnique(key string) (err error) {
var (
conn redis.Conn
reply interface{}
)
defer func() {
if conn != nil {
conn.Close()
}
}()
conn, err = redis.Dial(s.c.Redis.PushInterval.Proto, s.c.Redis.PushInterval.Addr, s.dao.RedisOption()...)
if err != nil {
log.Error("[service.service|limitDecreaseUnique] redis.Dial error(%v)", err)
return
}
// redis cache exists judgement
reply, err = conn.Do("SET", key, time.Now(), "EX", dailyExpired(time.Now()), "NX")
if err != nil {
return
}
// key exists
if reply == nil {
err = errLimitRequestRepeat
return
}
return
}
// getUniqueKey get request unique key
func getUniqueKey(a, b, c string) string {
return fmt.Sprintf(_limitDecreaseUUIDKey, a+b+c)
}

View File

@@ -0,0 +1,84 @@
package service
import (
"context"
"flag"
. "github.com/smartystreets/goconvey/convey"
"go-common/app/interface/live/push-live/conf"
"go-common/library/cache/redis"
"path/filepath"
"testing"
)
var (
s *Service
targetID int64
)
func initd() {
dir, _ := filepath.Abs("../cmd/push-live-test.toml")
flag.Set("conf", dir)
conf.Init()
s = New(conf.Conf)
}
func TestService_ConvertStrToInt64(t *testing.T) {
initd()
Convey("test convert", t, func() {
mStr := "1,2,3"
mInt64 := []int64{
int64(1), int64(2), int64(3),
}
mRes, err := s.convertStrToInt64(mStr)
So(err, ShouldBeNil)
So(mRes, ShouldResemble, mInt64)
})
}
func TestService_limitDecreaseUnique(t *testing.T) {
initd()
Convey("test limit decrease request unique", t, func() {
var (
err error
conn redis.Conn
key string
)
Convey("test success request", func() {
key = "test_request_unique"
conn, err = redis.Dial(s.c.Redis.PushInterval.Proto, s.c.Redis.PushInterval.Addr, s.dao.RedisOption()...)
So(err, ShouldBeNil)
err = s.limitDecreaseUnique(key)
So(err, ShouldBeNil)
// clean
conn.Do("DEL", key)
conn.Close()
})
})
}
func TestService_LimitDecrease(t *testing.T) {
initd()
Convey("test LimitDecrease service", t, func() {
var (
ctx = context.Background()
business, targetID, uuid, midStr string
err error
conn redis.Conn
)
Convey("test success", func() {
business = "111"
targetID = "123"
uuid = "test"
midStr = "1,2,3"
conn, err = redis.Dial(s.c.Redis.PushInterval.Proto, s.c.Redis.PushInterval.Addr, s.dao.RedisOption()...)
So(err, ShouldBeNil)
err = s.LimitDecrease(ctx, business, targetID, uuid, midStr)
So(err, ShouldBeNil)
// clean
key := getUniqueKey(business, targetID, uuid)
conn.Do("DEL", key)
conn.Close()
})
})
}

View File

@@ -0,0 +1,185 @@
package service
import (
"context"
"encoding/json"
"fmt"
"go-common/app/interface/live/push-live/model"
"go-common/library/log"
"go-common/library/queue/databus"
"go-common/library/sync/errgroup"
"sync"
"github.com/pkg/errors"
)
// LiveStartMessage 直播开播提醒推送消息
func (s *Service) LiveStartMessage(ctx context.Context, msg *databus.Message) (err error) {
defer msg.Commit()
var total int
// message
m := new(model.StartLiveMessage)
if err = json.Unmarshal(msg.Value, &m); err != nil {
log.Error("[service.start_live|LiveStartMessage] json Unmarshal error(%v)", err)
return
}
task := s.InitPushTask(m)
midMap := s.GetMids(ctx, task)
// do push
total = s.Push(task, midMap)
// create push task
go s.CreatePushTask(task, total)
log.Info("[service.push|LiveStartMessage] start live push done, total(%d), task(%v), model(%v), err(%v)",
total, task, m, err)
return
}
// InitPushTask 初始化开播提醒推送task
func (s *Service) InitPushTask(m *model.StartLiveMessage) (task *model.ApPushTask) {
s.mutex.RLock()
currentPushTypes := s.pushTypes
s.mutex.RUnlock()
// push task model
task = &model.ApPushTask{
Type: model.LivePushType,
TargetID: m.TargetID,
AlertTitle: m.Uname,
AlertBody: m.RoomTitle,
MidSource: s.getSourceByTypes(currentPushTypes),
LinkType: s.c.Push.LinkType,
LinkValue: m.LinkValue,
ExpireTime: m.ExpireTime,
}
return task
}
// GetMids 开播提醒根据配置的策略从不同来源获取需要推送的用户id
func (s *Service) GetMids(c context.Context, task *model.ApPushTask) map[int][]int64 {
var (
mutex sync.Mutex
group = errgroup.Group{}
fans = make(map[int64]bool)
fansSP = make(map[int64]bool)
midMap = make(map[int][]int64)
midBlackList = make(map[int64]bool)
)
// 获取黑名单
mb, err := s.dao.GetBlackList(c, task)
if err != nil {
log.Error("[service.start_live|GetMids] get black list error(%v), task(%+v)", err, task)
} else {
midBlackList = mb
log.Info("[service.start_live|GetMids] get black list len(%d), task(%+v)", len(midBlackList), task)
}
// try get latest push options and expired time
s.mutex.RLock()
currentPushTypes := s.pushTypes
s.mutex.RUnlock()
// 开多个协程获取后求并集
for _, t := range currentPushTypes {
tp := string(t)
group.Go(func() (e error) {
var mFans, mSpe map[int64]bool
switch tp {
case model.StrategySwitch:
// 直播开关
mFans, mSpe, e = s.GetFansBySwitch(context.TODO(), task.TargetID)
case model.StrategySpecial:
// 只获取特别关注
mFans, mSpe, e = s.dao.Fans(context.TODO(), task.TargetID, model.RelationSpecial)
case model.StrategyFans:
// 只获取普通关注
mFans, mSpe, e = s.dao.Fans(context.TODO(), task.TargetID, model.RelationAttention)
case model.StrategySwitchSpecial:
// 只获取特别关注(直播开关中的特别关注)
mFans, mSpe, e = s.GetFansBySwitchAndSpecial(context.TODO(), task.TargetID)
default:
log.Error("[service.mids|GetMids] strategy invalid, type(%s), task(%+v)", tp, task)
e = fmt.Errorf("[service.mids|GetMids] strategy invalid, type(%s), task(%+v)", tp, task)
return e
}
if e != nil {
log.Error("[service.mids|GetMids] get mid error(%v), type(%s), task(%+v)", e, tp, task)
return e
}
// 来源之间求并集并过滤重复出现的id
// filter by black list
mutex.Lock()
for fansID := range mFans {
if _, ok := midBlackList[fansID]; !ok {
fans[fansID] = true
}
}
for fansID := range mSpe {
if _, ok := midBlackList[fansID]; !ok {
fansSP[fansID] = true
}
}
mutex.Unlock()
log.Info("[service.mids|GetMids] get mids by type(%s), task(%+v), common(%d), special(%d)",
tp, task, len(mFans), len(mSpe))
return e
})
}
group.Wait()
if len(fansSP) > 0 {
midMap[model.RelationSpecial] = s.midFilter(fansSP, model.StartLiveBusiness, task)
}
if len(fans) > 0 {
midMap[model.RelationAttention] = s.midFilter(fans, model.StartLiveBusiness, task)
}
return midMap
}
// GetFansBySwitch 开播提醒获取开关mids
func (s *Service) GetFansBySwitch(c context.Context, targetID int64) (fans map[int64]bool, fansSP map[int64]bool, err error) {
// 获取直播侧开关数据(可能包含普通关注与特别关注)
m, err := s.dao.GetFansBySwitch(c, targetID)
if err != nil {
err = errors.WithStack(err)
log.Error("[service.mids|GetMidsBySwitch] get switch mids error(%v), targetID(%v)", err, targetID)
return
}
// 区分普通关注与特别关注
fans, fansSP, err = s.dao.SeparateFans(c, targetID, m)
return
}
// GetFansBySwitchAndSpecial 开播提醒,获取开关用户与特别关注用户的交集
func (s *Service) GetFansBySwitchAndSpecial(c context.Context, targetID int64) (fans map[int64]bool, fansSP map[int64]bool, err error) {
// 获取直播侧开关数据(可能包含普通关注与特别关注)
m, err := s.dao.GetFansBySwitch(c, targetID)
if err != nil {
err = errors.WithStack(err)
log.Error("[service.mids|GetMidsBySwitch] get switch mids error(%v), targetID(%v)", err, targetID)
return
}
// 从开关数据中获取到特别关注的部分
_, fansSP, err = s.dao.SeparateFans(c, targetID, m)
return
}
// getSourceByTypes 根据不同的推送策略构造Task.MidSource字段
func (s *Service) getSourceByTypes(types []string) int {
var source, midSource int
for _, t := range types {
switch t {
case model.StrategySwitch:
source = model.TaskSourceSwitch
case model.StrategySpecial:
source = model.TaskSourceSpecial
case model.StrategyFans:
source = model.TaskSourceFans
case model.StrategySwitchSpecial:
source = model.TaskSourceSwitchSpe
default:
source = 0
}
midSource = midSource ^ source
}
return midSource
}

View File

@@ -0,0 +1,92 @@
package service
import (
"context"
. "github.com/smartystreets/goconvey/convey"
"go-common/app/interface/live/push-live/model"
"math/rand"
"strconv"
"testing"
)
func TestService_InitPushTask(t *testing.T) {
initd()
Convey("should return init struct", t, func() {
targetID = rand.Int63n(100) + 1
uname := "测试"
linkValue := strconv.Itoa(rand.Intn(9999))
roomTitle := "room_title"
expireTime := rand.Intn(10000) + 1
task := makeTestInitPushTask(targetID, uname, linkValue, roomTitle, expireTime)
So(task.TargetID, ShouldResemble, targetID)
So(task.AlertTitle, ShouldResemble, uname)
So(task.AlertBody, ShouldResemble, roomTitle)
So(task.ExpireTime, ShouldResemble, expireTime)
So(task.LinkValue, ShouldResemble, linkValue)
})
}
func TestDao_GetSourceByTypes(t *testing.T) {
initd()
Convey("Get mid_source by different types", t, func() {
types := []string{model.StrategySwitch, model.StrategyFans, model.StrategySpecial, model.StrategySwitchSpecial}
length := len(types)
currentX := rand.Intn(length)
currentY := rand.Intn(length)
var currentTypes []string
if currentX >= currentY {
currentTypes = types[currentY:currentX]
} else {
currentTypes = types[currentX:currentY]
}
midSource := s.getSourceByTypes(currentTypes)
So(midSource, ShouldBeGreaterThanOrEqualTo, 0)
So(midSource, ShouldBeLessThanOrEqualTo, 15)
})
}
func TestService_GetFansBySwitch(t *testing.T) {
initd()
Convey("should find some fans id by given target id", t, func() {
targetID = 27515316
fans, fansSP, err := s.GetFansBySwitch(context.Background(), targetID)
So(len(fans), ShouldBeGreaterThan, 0)
So(len(fansSP), ShouldBeGreaterThan, 0)
So(err, ShouldBeNil)
})
}
func TestService_GetFansBySwitchAndSpecial(t *testing.T) {
initd()
Convey("should find some fans id by given target id", t, func() {
targetID = 27515316
fans, fansSP, err := s.GetFansBySwitchAndSpecial(context.Background(), targetID)
So(len(fans), ShouldEqual, 0)
So(len(fansSP), ShouldBeGreaterThan, 0)
So(err, ShouldBeNil)
})
}
func TestService_GetMids(t *testing.T) {
initd()
Convey("should find some fans id by given target id", t, func() {
targetID = 27515316
uname := "测试"
linkValue := strconv.Itoa(rand.Intn(9999))
roomTitle := "room_title"
expireTime := rand.Intn(10000) + 1
task := makeTestInitPushTask(targetID, uname, linkValue, roomTitle, expireTime)
types := []string{"Switch", "Special"}
s.pushTypes = types
midMap := s.GetMids(context.Background(), task)
for _, list := range midMap {
So(len(list), ShouldBeGreaterThan, 0)
}
})
}