Create & Init Project...

This commit is contained in:
2019-04-22 18:49:16 +08:00
commit fc4fa37393
25440 changed files with 4054998 additions and 0 deletions

View File

@@ -0,0 +1,57 @@
package(default_visibility = ["//visibility:public"])
load(
"@io_bazel_rules_go//go:def.bzl",
"go_library",
)
go_library(
name = "go_default_library",
srcs = [
"bloomfilter.go",
"dao.go",
"parallel.go",
"recall.go",
"relation.go",
"user.go",
],
importpath = "go-common/app/service/bbq/recsys/dao",
tags = ["automanaged"],
visibility = ["//visibility:public"],
deps = [
"//app/service/bbq/recsys-recall/api/grpc/v1:go_default_library",
"//app/service/bbq/recsys/api/grpc/v1:go_default_library",
"//app/service/bbq/recsys/conf:go_default_library",
"//app/service/bbq/recsys/dao/parallel:go_default_library",
"//app/service/bbq/recsys/model:go_default_library",
"//app/service/bbq/recsys/service/retrieve:go_default_library",
"//app/service/bbq/search/api/grpc/v1:go_default_library",
"//app/service/bbq/user/api:go_default_library",
"//app/service/main/relation/api:go_default_library",
"//library/cache/memcache:go_default_library",
"//library/cache/redis:go_default_library",
"//library/database/sql:go_default_library",
"//library/log:go_default_library",
"//library/net/rpc/warden:go_default_library",
"//vendor/github.com/Dai0522/go-hash/bloomfilter:go_default_library",
"//vendor/github.com/Dai0522/workpool:go_default_library",
"//vendor/github.com/json-iterator/go:go_default_library",
],
)
filegroup(
name = "package-srcs",
srcs = glob(["**"]),
tags = ["automanaged"],
visibility = ["//visibility:private"],
)
filegroup(
name = "all-srcs",
srcs = [
":package-srcs",
"//app/service/bbq/recsys/dao/parallel:all-srcs",
],
tags = ["automanaged"],
visibility = ["//visibility:public"],
)

View File

@@ -0,0 +1,105 @@
package dao
import (
"context"
"errors"
"fmt"
"strconv"
"time"
"go-common/app/service/bbq/recsys/dao/parallel"
"go-common/library/log"
"go-common/library/cache/redis"
"github.com/Dai0522/go-hash/bloomfilter"
"github.com/Dai0522/workpool"
)
const (
_baseBfKey = "BBQ:BF:V1:%s:%s"
)
func userBFRedisKey(k string) string {
d := time.Now().Format("20060102")
return fmt.Sprintf(_baseBfKey, k, d)
}
func (d *Dao) loadBF(c context.Context, mid int64, buvid string) (bf *bloomfilter.BloomFilter, err error) {
var tasks []workpool.Task
if buvid != "" {
buvidK := userBFRedisKey(buvid)
t := parallel.NewRedisTask(&c, d.bfRedis, "GET", buvidK)
tasks = append(tasks, t)
}
if mid != 0 {
midK := userBFRedisKey(strconv.FormatInt(mid, 10))
t := parallel.NewRedisTask(&c, d.bfRedis, "GET", midK)
tasks = append(tasks, t)
}
ftTasks := d.parallelTask(tasks)
for _, ft := range *ftTasks {
raw, e := ft.Wait(100 * time.Millisecond)
if e != nil && e != redis.ErrNil {
log.Errorv(c, log.KV("BF_GET_ERROR", e), log.KV("TASK", ft.T.(*parallel.RedisTask)))
continue
}
if raw == nil || len(*raw) == 0 {
continue
}
tmp, e := bloomfilter.Load(raw)
if e != nil || tmp == nil {
log.Errorv(c, log.KV("BF_LOAD_ERROR", e), log.KV("TASK", ft.T.(*parallel.RedisTask)), log.KV("raw", *raw))
continue
}
bf = bloomfilter.Merge(bf, tmp)
}
if bf == nil {
bf, err = bloomfilter.New(1000, 0.0001)
}
return
}
// WriteBF .
func (d *Dao) WriteBF(c context.Context, mid int64, buvid string, svid []uint64) (bool, error) {
if mid == int64(0) && buvid == "" {
return false, errors.New("mid && buvid can't be empty")
}
// load bf from redis
bf, err := d.loadBF(c, mid, buvid)
if err != nil {
return false, err
}
// put svid
for _, v := range svid {
bf.PutUint64(v)
}
// store bf into redis
var tasks []workpool.Task
b := bf.Serialized()
if buvid != "" {
buvidK := userBFRedisKey(buvid)
t := parallel.NewRedisTask(&c, d.bfRedis, "SETEX", buvidK, 86400, *b)
tasks = append(tasks, t)
}
if mid != int64(0) {
midK := userBFRedisKey(strconv.FormatInt(mid, 10))
t := parallel.NewRedisTask(&c, d.bfRedis, "SETEX", midK, 86400, *b)
tasks = append(tasks, t)
}
ftTasks := d.parallelTask(tasks)
for _, ft := range *ftTasks {
_, err = ft.Wait(100 * time.Millisecond)
if err != nil {
log.Errorv(c, log.KV("BF_SET_ERROR", err))
}
}
return true, err
}

View File

@@ -0,0 +1,96 @@
package dao
import (
"context"
relation "go-common/app/service/main/relation/api"
"time"
recallv1 "go-common/app/service/bbq/recsys-recall/api/grpc/v1"
"go-common/app/service/bbq/recsys/conf"
searchv1 "go-common/app/service/bbq/search/api/grpc/v1"
user "go-common/app/service/bbq/user/api"
"go-common/library/cache/memcache"
"go-common/library/cache/redis"
xsql "go-common/library/database/sql"
"go-common/library/net/rpc/warden"
"github.com/Dai0522/workpool"
)
// Dao dao
type Dao struct {
c *conf.Config
mc *memcache.Pool
redis *redis.Pool
bfRedis *redis.Pool
db *xsql.DB
wp *workpool.Pool
SearchClient searchv1.SearchClient
RecallClient recallv1.RecsysRecallClient
UserClient user.UserClient
RelationClient relation.RelationClient
}
// New init mysql db
func New(c *conf.Config) (dao *Dao) {
wpConf := &workpool.PoolConfig{
MaxWorkers: c.WorkerPool.MaxWorkers,
MaxIdleWorkers: c.WorkerPool.MaxIdleWorkers,
MinIdleWorkers: c.WorkerPool.MinIdleWorkers,
KeepAlive: time.Duration(c.WorkerPool.KeepAlive),
}
wp, err := workpool.NewWorkerPool(1024, wpConf)
if err != nil {
panic(err)
}
wp.Start()
dao = &Dao{
c: c,
redis: redis.NewPool(c.Redis),
bfRedis: redis.NewPool(c.BFRedis),
db: xsql.NewMySQL(c.MySQL),
wp: wp,
RecallClient: newRecallClient(c.GRPCClient["recall"]),
UserClient: newUserClient(c.GRPCClient["user"]),
RelationClient: newRelationClient(c.GRPCClient["relation"]),
}
return
}
func newRecallClient(cfg *conf.GRPCConfig) recallv1.RecsysRecallClient {
cc, err := warden.NewClient(cfg.WardenConf).Dial(context.Background(), cfg.Addr)
if err != nil {
panic(err)
}
return recallv1.NewRecsysRecallClient(cc)
}
func newUserClient(cfg *conf.GRPCConfig) user.UserClient {
cc, err := warden.NewClient(cfg.WardenConf).Dial(context.Background(), cfg.Addr)
if err != nil {
panic(err)
}
return user.NewUserClient(cc)
}
func newRelationClient(cfg *conf.GRPCConfig) relation.RelationClient {
cc, err := warden.NewClient(cfg.WardenConf).Dial(context.Background(), cfg.Addr)
if err != nil {
panic(err)
}
return relation.NewRelationClient(cc)
}
// Close close the resource.
func (d *Dao) Close() {
d.mc.Close()
d.redis.Close()
d.bfRedis.Close()
d.db.Close()
}
// Ping dao ping
func (d *Dao) Ping(c context.Context) error {
// TODO: if you need use mc,redis, please add
return d.db.Ping(c)
}

View File

@@ -0,0 +1,58 @@
package dao
import (
"go-common/app/service/bbq/recsys/dao/parallel"
"github.com/Dai0522/workpool"
)
// parallelTask2 .
func (d *Dao) parallelTask2(tasks map[string]workpool.Task) map[string]workpool.FutureTask {
ftMap := make(map[string]workpool.FutureTask)
for name, task := range tasks {
ft := workpool.NewFutureTask(task)
retry := 0
err := d.wp.Submit(ft)
for err != nil && retry < 3 {
err = d.wp.Submit(ft)
retry++
}
ftMap[name] = *ft
}
return ftMap
}
// parallelTask .
func (d *Dao) parallelTask(tasks []workpool.Task) *[]workpool.FutureTask {
ftArr := make([]workpool.FutureTask, len(tasks))
for i := range tasks {
ft := workpool.NewFutureTask(tasks[i])
retry := 0
err := d.wp.Submit(ft)
for err != nil && retry < 3 {
err = d.wp.Submit(ft)
retry++
}
ftArr[i] = *ft
}
return &ftArr
}
// ParallelRedis run redis cmd parallel
func (d *Dao) ParallelRedis(tasks *[]parallel.RedisTask) *[]workpool.FutureTask {
ftArr := make([]workpool.FutureTask, len(*tasks))
for i := range *tasks {
ft := workpool.NewFutureTask(&(*tasks)[i])
retry := 0
err := d.wp.Submit(ft)
for err != nil && retry < 3 {
err = d.wp.Submit(ft)
retry++
}
ftArr[i] = *ft
}
return &ftArr
}

View File

@@ -0,0 +1,32 @@
package(default_visibility = ["//visibility:public"])
load(
"@io_bazel_rules_go//go:def.bzl",
"go_library",
)
go_library(
name = "go_default_library",
srcs = ["redis.go"],
importpath = "go-common/app/service/bbq/recsys/dao/parallel",
tags = ["automanaged"],
visibility = ["//visibility:public"],
deps = [
"//library/cache/redis:go_default_library",
"//library/log:go_default_library",
],
)
filegroup(
name = "package-srcs",
srcs = glob(["**"]),
tags = ["automanaged"],
visibility = ["//visibility:private"],
)
filegroup(
name = "all-srcs",
srcs = [":package-srcs"],
tags = ["automanaged"],
visibility = ["//visibility:public"],
)

View File

@@ -0,0 +1,63 @@
package parallel
import (
"context"
"unsafe"
"go-common/library/cache/redis"
"go-common/library/log"
)
// RedisTask .
type RedisTask struct {
ctx *context.Context
name string
pool *redis.Pool
cmd string
args []interface{}
}
// NewRedisTaskWithName new redis parallel task
func NewRedisTaskWithName(ctx *context.Context, name string, pool *redis.Pool, cmd string, args ...interface{}) *RedisTask {
return &RedisTask{
ctx: ctx,
name: name,
pool: pool,
cmd: cmd,
args: args,
}
}
// NewRedisTask new redis parallel task
func NewRedisTask(ctx *context.Context, pool *redis.Pool, cmd string, args ...interface{}) *RedisTask {
return &RedisTask{
ctx: ctx,
pool: pool,
cmd: cmd,
args: args,
}
}
// Run .
func (rt *RedisTask) Run() (result *[]byte) {
conn := rt.pool.Get(*rt.ctx)
defer conn.Close()
reply, err := conn.Do(rt.cmd, rt.args...)
if err != nil {
log.Error("RedisTask Run error:[%+v]", err)
return
}
switch reply := reply.(type) {
case []byte:
result = &reply
case string:
b := []byte(reply)
result = &b
default:
result = (*[]byte)(unsafe.Pointer(&reply))
}
return
}

View File

@@ -0,0 +1,68 @@
package dao
import (
"context"
recsys "go-common/app/service/bbq/recsys/api/grpc/v1"
"go-common/app/service/bbq/recsys/model"
"go-common/app/service/bbq/recsys/service/retrieve"
"go-common/library/cache/redis"
"go-common/library/log"
"strconv"
"strings"
)
//DownGradeRecall ...
func (d *Dao) DownGradeRecall(c context.Context) (response *recsys.RecsysResponse, err error) {
conn := d.redis.Get(c)
defer conn.Close()
key := retrieve.RecallHotDefault
var str string
if str, err = redis.String(conn.Do("GET", key)); err != nil {
if err == redis.ErrNil {
err = nil
} else {
log.Errorw(c, "recall", "get hot recall error", "err", err)
}
}
response = new(recsys.RecsysResponse)
response.Message = make(map[string]string)
records := make([]*recsys.RecsysRecord, 0)
response.Message[model.ResponseDownGrade] = "2"
for _, svidStr := range strings.Split(str, ",") {
svid, _ := strconv.ParseInt(svidStr, 10, 64)
record := &recsys.RecsysRecord{
Svid: svid,
Score: 0,
Map: make(map[string]string),
}
record.Map[model.RecallClasses] = retrieve.HotRecall
records = append(records, record)
}
key = retrieve.RecallOpVideoKey
if str, err = redis.String(conn.Do("GET", key)); err != nil {
if err == redis.ErrNil {
err = nil
} else {
log.Errorw(c, "recall", "get selection recall error", "err", err)
}
}
response = new(recsys.RecsysResponse)
response.Message = make(map[string]string)
for _, svidStr := range strings.Split(str, ",") {
svid, _ := strconv.ParseInt(svidStr, 10, 64)
record := &recsys.RecsysRecord{
Svid: svid,
Score: 0,
Map: make(map[string]string),
}
record.Map[model.RecallClasses] = retrieve.SelectionRecall
records = append(records, record)
}
response.List = records
return
}

View File

@@ -0,0 +1,46 @@
package dao
import (
"context"
"go-common/app/service/bbq/recsys/model"
"go-common/app/service/bbq/user/api"
"go-common/library/log"
)
//GetUserFollow ...
func (d *Dao) GetUserFollow(c context.Context, mid int64, u *model.UserProfile) (err error) {
if mid == 0 {
return
}
relationReq := &api.ListRelationReq{Mid: mid}
listRelationReply, err := d.UserClient.ListFollow(c, relationReq)
if err != nil {
log.Errorv(c)
return
}
for _, MID := range listRelationReply.List {
u.BBQFollow[MID] = 1
}
return
}
//GetUserBlack ...
func (d *Dao) GetUserBlack(c context.Context, mid int64, u *model.UserProfile) (err error) {
if mid == 0 {
return
}
relationReq := &api.ListRelationReq{Mid: mid}
listRelationReply, err := d.UserClient.ListBlack(c, relationReq)
if err != nil {
log.Errorv(c)
return
}
for _, MID := range listRelationReply.List {
u.BBQBlack[MID] = 1
}
return
}

View File

@@ -0,0 +1,496 @@
package dao
import (
"context"
"fmt"
"strconv"
"strings"
"time"
"unsafe"
recsys "go-common/app/service/bbq/recsys/api/grpc/v1"
"go-common/app/service/bbq/recsys/dao/parallel"
"go-common/app/service/bbq/recsys/model"
"go-common/library/cache/redis"
"go-common/library/log"
"github.com/Dai0522/workpool"
"github.com/json-iterator/go"
)
//user const
const (
TaskLastPage = "TaskLastPage"
TaskLastUpsPage = "TaskLastUpsPage"
TaskBiliUserProfile = "TaskBiliUserProfile"
TaskBBQUserProfile = "TaskBBQUserProfile"
TaskBBQDeviceProfile = "TaskBBQDeviceProfile"
TaskUserLike = "TaskUserLike"
TaskUserLikeYesterday = "TaskUserLikeYesterday"
TaskUserPlay = "TaskUserPlay"
TaskUserPlayYesterday = "TaskUserPlayYesterday"
TaskDevicePlay = "TaskDevicePlay"
TaskDevicePlayYesterday = "TaskDevicePlayYesterday"
TaskUserFollow = "TaskUserFollow"
TaskUserFollowYesterday = "TaskUserFollowYesterday"
//_BBQDeviceProfileKey = "bbq:device:profile:%s"
_BBQDeviceProfileKey = "bbq:device:profile:{buvid}:%s"
_BBQUserProfileKey = "bbq:user:profile:%d"
_BiliUserProfileKey = "bbq:user:basic:%d"
_LastFewPageRecords1 = "bbq:last:v1:mid:%d"
_LastFewPageRecords2 = "bbq:last:v1:buvid:%s"
_LastFewUpsPageRecords1 = "bbq:last:v1:ups:mid:%d"
_LastFewUpsPageRecords2 = "bbq:last:v1:ups:buvid:%s"
_RealTimeUserLike = "storm:v2:u:%d:like:%s"
_RealTimeUserPlayMID = "storm:v2:u:%d:%s:view:100"
_RealTimeUserPlayBuvID = "storm:v2:u:%s:%s:view:100"
_RealTimeUserFollow = "storm:v2:u:%d:%s:follow:100"
_ModelTest = "bbq:model:init"
_Zone = "zone"
_Tag = "tag"
_Up = "up"
)
//LastPageRedisKey for main rec process
func (d *Dao) LastPageRedisKey(mid int64, buvid string) (key string) {
if mid > 0 {
key = fmt.Sprintf(_LastFewPageRecords1, mid)
} else {
key = fmt.Sprintf(_LastFewPageRecords2, buvid)
}
return
}
//LastUpsPageRedisKey for ups rec process
func (d *Dao) LastUpsPageRedisKey(mid int64, buvid string) (key string) {
if mid > 0 {
key = fmt.Sprintf(_LastFewUpsPageRecords1, mid)
} else {
key = fmt.Sprintf(_LastFewUpsPageRecords2, buvid)
}
return
}
//InitModel ...
func (d *Dao) InitModel(c context.Context, weights map[string]float64) (err error) {
conn := d.redis.Get(c)
defer conn.Close()
key := _ModelTest
if result, err := redis.String(conn.Do("GET", key)); err == nil {
for _, field := range strings.Split(result, ",") {
featureWeightPair := strings.Split(field, ":")
if len(featureWeightPair) >= 2 {
feature := featureWeightPair[0]
weight, _ := strconv.ParseFloat(featureWeightPair[1], 64)
weights[feature] = weight
}
}
}
return
}
//StoreRecResults store rec or upsRec history according to getKeyFunc
func (d *Dao) StoreRecResults(c context.Context, u *model.UserProfile, mid int64, buvid string, response *recsys.RecsysResponse, getKeyFunc func(int64, string) string, lastRecords []model.Record4Dup) (err error) {
conn := d.redis.Get(c)
defer conn.Close()
key := getKeyFunc(mid, buvid)
maxPageNum := 10
size := len(response.List)
if len(lastRecords) > maxPageNum*size {
lastRecords = lastRecords[size:]
}
for _, record := range response.List {
svid := record.Svid
mid, ok1 := record.Map[model.UperMid]
tag, ok2 := record.Map[model.ScatterTag]
if ok1 && ok2 {
lastRecords = append(lastRecords, model.Record4Dup{
SVID: svid,
MID: mid,
Tag: tag,
})
}
}
bytes, _ := jsoniter.Marshal(lastRecords)
_, err = conn.Do("SETEX", key, 86400, bytes)
if err != nil {
log.Error("store last few records error: ", err)
}
////for test
//if mid == 28272030 || mid == 390642849 {
// return
//}
// write bloomfilter for es
svids := make([]uint64, len(response.List))
for i, v := range response.List {
svids[i] = uint64(v.Svid)
}
if _, bfErr := d.WriteBF(c, mid, buvid, svids); bfErr != nil {
log.Errorv(c, log.KV("Write BF error: ", bfErr))
}
return
}
//InitUserProfile ...
func (d *Dao) InitUserProfile(c context.Context, mid int64, buvid string) (u *model.UserProfile) {
u = &model.UserProfile{
Mid: mid,
Buvid: buvid,
Name: "",
Gender: -1,
ViewVideos: []int64{},
Zones1: map[string]float64{},
BiliTags: map[string]float64{}, //bili
Zones2: map[string]float64{}, //bili
FollowUps: map[int64]int64{}, //bili
BBQTags: map[string]float64{}, //bbq
BBQZones: map[string]float64{}, //bbq
BBQPrefUps: map[int64]int64{}, //bbq
BBQFollowAction: map[int64]int64{}, //bbq
BBQFollow: map[int64]int64{}, //bbq
BBQBlack: map[int64]int64{}, //bbq
PosVideos: map[int64]int64{},
NegVideos: map[int64]int64{},
LikeVideos: map[int64]int64{},
LikeTags: map[string]float64{},
LikeTagIDs: map[int64]int64{},
LikeUPs: map[int64]int64{},
PosTagIDs: map[int64]int64{},
NegTagIDs: map[int64]int64{},
PosTags: map[string]float64{},
NegTags: map[string]float64{},
LastRecords: []model.Record4Dup{},
}
return
}
//LoadUserProfile load user info from redis parallel
func (d *Dao) LoadUserProfile(c context.Context, mid int64, buvid string) (userProfile *model.UserProfile, err error) {
tasks := make(map[string]workpool.Task)
userProfile = d.InitUserProfile(c, mid, buvid)
// lastPage
if mid != 0 || buvid != "" {
taskName := TaskLastPage
key := fmt.Sprintf(_LastFewPageRecords2, buvid)
if mid != 0 {
key = fmt.Sprintf(_LastFewPageRecords1, mid)
}
task := parallel.NewRedisTaskWithName(&c, taskName, d.redis, "GET", key)
tasks[taskName] = task
}
if mid != 0 || buvid != "" {
taskName := TaskLastUpsPage
key := fmt.Sprintf(_LastFewUpsPageRecords2, buvid)
if mid != 0 {
key = fmt.Sprintf(_LastFewUpsPageRecords1, mid)
}
task := parallel.NewRedisTaskWithName(&c, taskName, d.redis, "GET", key)
tasks[taskName] = task
}
// user profile bili
if mid != 0 {
taskName := TaskBiliUserProfile
key := fmt.Sprintf(_BiliUserProfileKey, mid)
task := parallel.NewRedisTaskWithName(&c, taskName, d.redis, "HGETALL", key)
tasks[taskName] = task
}
// user profile bbq: mid
if mid != 0 {
taskName := TaskBBQUserProfile
key := fmt.Sprintf(_BBQUserProfileKey, mid)
task := parallel.NewRedisTaskWithName(&c, taskName, d.redis, "HGETALL", key)
tasks[taskName] = task
}
// user profile bbq: buvid
if mid == 0 && buvid != "" {
taskName := TaskBBQDeviceProfile
key := fmt.Sprintf(_BBQDeviceProfileKey, buvid)
task := parallel.NewRedisTask(&c, d.redis, "HGETALL", key)
tasks[taskName] = task
}
// user real time like
today := time.Now().Format("20060102")
yesterday := time.Now().AddDate(0, 0, -1).Format("20060102")
if mid != 0 {
taskName := TaskUserLike
key := fmt.Sprintf(_RealTimeUserLike, mid, today)
task := parallel.NewRedisTask(&c, d.redis, "HGETALL", key)
tasks[taskName] = task
}
if mid != 0 {
taskName := TaskUserLikeYesterday
key := fmt.Sprintf(_RealTimeUserLike, mid, yesterday)
task := parallel.NewRedisTask(&c, d.redis, "HGETALL", key)
tasks[taskName] = task
}
if mid != 0 {
taskName := TaskUserFollow
key := fmt.Sprintf(_RealTimeUserFollow, mid, today)
task := parallel.NewRedisTask(&c, d.redis, "HGETALL", key)
tasks[taskName] = task
}
if mid != 0 {
taskName := TaskUserFollowYesterday
key := fmt.Sprintf(_RealTimeUserFollow, mid, yesterday)
task := parallel.NewRedisTask(&c, d.redis, "HGETALL", key)
tasks[taskName] = task
}
if mid != 0 {
taskName := TaskUserPlay
key := fmt.Sprintf(_RealTimeUserPlayMID, mid, today)
task := parallel.NewRedisTask(&c, d.redis, "HGETALL", key)
tasks[taskName] = task
}
if mid != 0 {
taskName := TaskUserPlayYesterday
key := fmt.Sprintf(_RealTimeUserPlayMID, mid, yesterday)
task := parallel.NewRedisTask(&c, d.redis, "HGETALL", key)
tasks[taskName] = task
}
if mid == 0 && buvid != "" {
taskName := TaskDevicePlay
key := fmt.Sprintf(_RealTimeUserPlayBuvID, buvid, today)
task := parallel.NewRedisTask(&c, d.redis, "HGETALL", key)
tasks[taskName] = task
}
if mid == 0 && buvid != "" {
taskName := TaskDevicePlayYesterday
key := fmt.Sprintf(_RealTimeUserPlayBuvID, buvid, yesterday)
task := parallel.NewRedisTask(&c, d.redis, "HGETALL", key)
tasks[taskName] = task
}
ftTasks := d.parallelTask2(tasks)
for name, task := range ftTasks {
var raw *[]byte
raw, err = task.Wait(100 * time.Millisecond)
if err != nil && err != redis.ErrNil {
log.Errorv(c, log.KV("REDIS_GET_ERROR", err))
continue
}
if raw == nil {
continue
}
switch name {
case TaskLastPage:
setLastPage(raw, userProfile, "lastRecords")
case TaskLastUpsPage:
setLastPage(raw, userProfile, "lastUpsRecords")
case TaskBiliUserProfile:
setUserProfileBili(raw, err, userProfile)
case TaskBBQDeviceProfile:
setUserProfileBBQ(raw, err, userProfile)
case TaskBBQUserProfile:
setUserProfileBBQ(raw, err, userProfile)
case TaskUserLikeYesterday:
setUserLikeInfo(raw, err, userProfile)
case TaskUserLike:
setUserLikeInfo(raw, err, userProfile)
case TaskUserFollowYesterday:
setUserFollowInfo(raw, err, userProfile)
case TaskUserFollow:
setUserFollowInfo(raw, err, userProfile)
case TaskUserPlayYesterday:
setUserPlayInfo(raw, err, userProfile)
case TaskDevicePlayYesterday:
setUserPlayInfo(raw, err, userProfile)
case TaskUserPlay:
setUserPlayInfo(raw, err, userProfile)
case TaskDevicePlay:
setUserPlayInfo(raw, err, userProfile)
}
}
if err == redis.ErrNil {
err = nil
}
return
}
func setUserProfileBBQ(bytes *[]byte, inErr error, u *model.UserProfile) (err error) {
var res map[string]string
if res, err = redis.StringMap(*(*interface{})(unsafe.Pointer(bytes)), inErr); err != nil {
if err == redis.ErrNil {
err = nil
} else {
log.Error("redis HGETALL failed error(%v)", err)
}
}
for key, value := range res {
if key == _Zone {
zone2s := strings.Split(value, ",")
for _, zone2 := range zone2s {
u.BBQZones[zone2] = 1.0
}
} else if key == _Tag {
tags := strings.Split(value, ",")
for _, tag := range tags {
u.BBQTags[tag] = 1.0
}
} else if key == _Up {
ups := strings.Split(value, ",")
for _, upStr := range ups {
upMID, _ := strconv.ParseInt(upStr, 10, 64)
u.BBQPrefUps[upMID] = 1
}
}
}
return
}
func setUserProfileBili(bytes *[]byte, inErr error, u *model.UserProfile) {
var res map[string]string
var err error
if res, err = redis.StringMap(*(*interface{})(unsafe.Pointer(bytes)), inErr); err != nil {
if err == redis.ErrNil {
err = nil
} else {
log.Error("redis HGETALL failed error(%v)", err)
}
}
for key, value := range res {
if key == _Zone {
zone2s := strings.Split(value, ",")
for _, zone2 := range zone2s {
u.Zones2[zone2] = 1.0
}
} else if key == _Tag {
tags := strings.Split(value, ",")
for _, tag := range tags {
u.BiliTags[tag] = 1.0
}
} else if key == _Up {
ups := strings.Split(value, ",")
for _, upStr := range ups {
upMID, _ := strconv.ParseInt(upStr, 10, 64)
u.FollowUps[upMID] = 1
}
}
}
}
func setUserLikeInfo(bytes *[]byte, inErr error, u *model.UserProfile) {
var object struct {
SVID int64 `json:"svid"`
CTime int64 `json:"ctime"`
BuvID string `json:"buvid"`
}
var res map[string]string
var err error
if res, err = redis.StringMap(*(*interface{})(unsafe.Pointer(bytes)), inErr); err != nil {
if err != redis.ErrNil {
log.Error("redis HGETALL failed error(%v)", err)
}
}
for _, value := range res {
err = jsoniter.UnmarshalFromString(value, &object)
if err != nil {
log.Error("json parse error: %v", err)
}
u.LikeVideos[object.SVID] = object.CTime
}
}
func setUserFollowInfo(bytes *[]byte, inErr error, u *model.UserProfile) {
var object struct {
UpID int64 `json:"upid"`
CTime int64 `json:"ctime"`
MID int64 `json:"mid"`
}
var res map[string]string
var err error
if res, err = redis.StringMap(*(*interface{})(unsafe.Pointer(bytes)), inErr); err != nil {
if err != redis.ErrNil {
log.Error("user real time follow redis HGETALL failed error(%v)", err)
}
}
for _, value := range res {
err = jsoniter.UnmarshalFromString(value, &object)
if err != nil {
log.Error("json parse error: %v", err)
}
u.BBQFollowAction[object.UpID] = object.CTime
}
}
func setUserPlayInfo(bytes *[]byte, inErr error, u *model.UserProfile) {
var object struct {
Svid int64 `json:"svid"`
CTime int64 `json:"ctime"`
Duration int64 `json:"duration"`
ViewDuration int64 `json:"viewDuration"`
}
var res map[string]string
var err error
if res, err = redis.StringMap(*(*interface{})(unsafe.Pointer(bytes)), inErr); err != nil {
if err != redis.ErrNil {
log.Error("redis HGETALL failed error(%v)", err)
} else {
err = nil
}
}
for _, value := range res {
err = jsoniter.UnmarshalFromString(value, &object)
if err != nil {
log.Error("json parse error: %v", err)
continue
}
u.ViewVideos = append(u.ViewVideos, object.Svid)
if object.ViewDuration >= 15000 || (object.Duration >= 5000 && float64(object.ViewDuration) >= 0.95*float64(object.Duration)) {
u.PosVideos[object.Svid] = object.CTime
}
if object.ViewDuration <= 500 {
u.NegVideos[object.Svid] = object.CTime
}
}
}
func setLastPage(bytes *[]byte, u *model.UserProfile, lastRecordType string) {
var results []model.Record4Dup
if len(*bytes) == 0 {
return
}
err := jsoniter.Unmarshal(*bytes, &results)
if err != nil {
log.Error("UnmarshalFromString value(%v) error(%v)", bytes, err)
} else {
if lastRecordType == "lastRecords" {
u.LastRecords = results
} else {
u.LastUpsRecords = results
}
}
}