Create & Init Project...

This commit is contained in:
2019-04-22 18:49:16 +08:00
commit fc4fa37393
25440 changed files with 4054998 additions and 0 deletions

View File

@@ -0,0 +1,18 @@
filegroup(
name = "package-srcs",
srcs = glob(["**"]),
tags = ["automanaged"],
visibility = ["//visibility:private"],
)
filegroup(
name = "all-srcs",
srcs = [
":package-srcs",
"//app/job/main/tv/service/pgc:all-srcs",
"//app/job/main/tv/service/report:all-srcs",
"//app/job/main/tv/service/ugc:all-srcs",
],
tags = ["automanaged"],
visibility = ["//visibility:public"],
)

View File

@@ -0,0 +1,75 @@
package(default_visibility = ["//visibility:public"])
load(
"@io_bazel_rules_go//go:def.bzl",
"go_test",
"go_library",
)
go_test(
name = "go_default_test",
srcs = [
"sea_con_test.go",
"service_test.go",
"sync_mc_test.go",
"zone_index_test.go",
],
embed = [":go_default_library"],
rundir = ".",
tags = ["automanaged"],
deps = [
"//app/job/main/tv/conf:go_default_library",
"//vendor/github.com/smartystreets/goconvey/convey:go_default_library",
],
)
go_library(
name = "go_default_library",
srcs = [
"del_cont.go",
"del_season.go",
"filter.go",
"license.go",
"sea_con.go",
"search_sug.go",
"service.go",
"sync_ep.go",
"sync_mc.go",
"sync_retry.go",
"sync_season.go",
"zone_index.go",
],
importpath = "go-common/app/job/main/tv/service/pgc",
tags = ["automanaged"],
visibility = ["//visibility:public"],
deps = [
"//app/job/main/tv/conf:go_default_library",
"//app/job/main/tv/dao/app:go_default_library",
"//app/job/main/tv/dao/cms:go_default_library",
"//app/job/main/tv/dao/ftp:go_default_library",
"//app/job/main/tv/dao/lic:go_default_library",
"//app/job/main/tv/dao/playurl:go_default_library",
"//app/job/main/tv/model/common:go_default_library",
"//app/job/main/tv/model/pgc:go_default_library",
"//library/ecode:go_default_library",
"//library/log:go_default_library",
"//library/queue/databus:go_default_library",
"//library/time:go_default_library",
"//library/xstr:go_default_library",
"//vendor/github.com/robfig/cron:go_default_library",
],
)
filegroup(
name = "package-srcs",
srcs = glob(["**"]),
tags = ["automanaged"],
visibility = ["//visibility:private"],
)
filegroup(
name = "all-srcs",
srcs = [":package-srcs"],
tags = ["automanaged"],
visibility = ["//visibility:public"],
)

View File

@@ -0,0 +1,59 @@
package pgc
import (
"database/sql"
"time"
"go-common/app/job/main/tv/dao/lic"
model "go-common/app/job/main/tv/model/pgc"
"go-common/library/log"
)
// sync the deleted EP data to the license owner
func (s *Service) delCont() {
var (
sign = s.c.Sync.Sign
prefix = s.c.Sync.AuditPrefix
)
defer s.waiter.Done()
for {
if s.daoClosed {
log.Info("delCont DB closed!")
return
}
// pick data
delCont, err := s.dao.DelCont(ctx)
if err == sql.ErrNoRows || len(delCont) == 0 {
log.Info("No deleted data to pick from Cont to sync")
time.Sleep(time.Duration(s.c.Sync.Frequency.FreModSeason))
continue
}
delEpids := []int{}
for _, v := range delCont {
delEpids = append(delEpids, v.EPID)
}
s.dao.DelaySync(ctx, delCont) // avoid always be stuck by one error data
body := lic.DelEpLic(prefix, sign, delEpids)
// call API
var res *model.Document
res, err = s.licDao.CallRetry(ctx, s.c.Sync.API.DelEPURL, body)
// 3 times still error
if err != nil {
log.Error("DelEPURL interface not available! %v", err)
time.Sleep(time.Duration(s.c.Sync.Frequency.ErrorWait))
continue
}
// update the state
if err == nil && res != nil {
for _, v := range delCont {
_, err := s.dao.SyncCont(ctx, v.EPID)
if err != nil {
log.Error("SyncCont EP %v to auditing fail!", v.ID)
continue
}
}
}
// break after each loop
time.Sleep(1 * time.Second)
}
}

View File

@@ -0,0 +1,54 @@
package pgc
import (
"database/sql"
"time"
"go-common/app/job/main/tv/dao/lic"
"go-common/library/log"
)
// sync the deleted season data to the license owner
func (s *Service) delSeason() {
var (
sign = s.c.Sync.Sign
prefix = s.c.Sync.AuditPrefix
)
defer s.waiter.Done()
for {
if s.daoClosed {
log.Info("delSeason DB closed!")
return
}
delSeason, err := s.dao.DelSeason(ctx)
if err == sql.ErrNoRows || len(delSeason) == 0 {
log.Info("No deleted data to pick from Season to sync")
time.Sleep(time.Duration(s.c.Sync.Frequency.FreModSeason))
continue
}
for _, v := range delSeason {
data := lic.DelLic(sign, prefix, v.ID)
// ignore the program part during modified season sync
body := lic.PrepareXML(data)
res, err := s.licDao.CallRetry(ctx, s.c.Sync.API.DelSeasonURL, body)
// 3 times still error
if err != nil {
log.Error("DelSeasonURL interface not available!Sid: %v, Err: %v", v.ID, err)
s.dao.DelaySeason(ctx, v.ID)
time.Sleep(time.Duration(s.c.Sync.Frequency.ErrorWait))
// avoid always be stuck by one error data
break
}
if err == nil && res != nil {
_, err := s.dao.RejectSeason(ctx, int(v.ID))
if err != nil {
log.Error("DelSeasonSync season %v to rejected fail!", v.ID)
// sync next one
continue
}
}
}
// break after each loop
time.Sleep(1 * time.Second)
}
}

View File

@@ -0,0 +1,91 @@
package pgc
import (
"context"
"fmt"
"strings"
model "go-common/app/job/main/tv/model/pgc"
"go-common/library/log"
"go-common/library/xstr"
)
// batchFilter picks a batch of seasonCMS data, to define their newest ep and update the struct
func (s *Service) batchFilter(ctx context.Context, snCMS []*model.SeasonCMS) {
if len(snCMS) == 0 {
return
}
for _, v := range snCMS {
if newest, err := s.newestNB(v.SeasonID); err != nil || newest == 0 {
continue
} else {
v.NewestNb = newest
}
}
}
// newestNB picks all the eps of the season and do the title fitler to calculate the newest episode
func (s *Service) newestNB(sid int) (newest int, err error) {
var (
keywords = s.c.Cfg.TitleFilter
strategy = s.c.Cfg.LessStrategy
)
eps, err := s.dao.AllEP(ctx, sid, strategy)
if err != nil {
log.Warn("AllEP newestNB SeasonID %d, Err %v", sid, err)
return
}
for _, v := range eps {
if titleCheck(keywords, v.Title) {
continue
}
newest++
}
if newest == 0 {
log.Warn("AllEP newestNB SeasonID %d, After Filter it's empty", sid)
}
return
}
// titleCheck checks whether the title matches some forbidden keywords
func titleCheck(keywords []string, title string) bool {
for _, v := range keywords {
if strings.Contains(title, v) {
return true
}
}
return false
}
func (s *Service) cmsShelve() {
var (
ctx = context.Background()
cfg = s.c.Cfg.Merak
validMap map[int64]int
onIDs, offIDs []int64
err error
)
if validMap, err = s.cmsDao.ValidSns(ctx, cfg.Onlyfree); err != nil {
log.Error("cmsShelve ValidSns Err %v", err)
return
}
if onIDs, offIDs, err = s.cmsDao.ShelveOp(ctx, validMap); err != nil {
log.Error("cmsShelve ShelveOp err %v", err)
return
}
if len(onIDs) > 0 {
if err = s.cmsDao.ActOps(ctx, onIDs, true); err != nil {
log.Error("cmsShelve ActOps OnIDs %v, Err %v", onIDs, err)
}
}
if len(offIDs) > 0 {
if err = s.cmsDao.ActOps(ctx, offIDs, false); err != nil {
log.Error("cmsShelve ActOps OffIDs %v, Err %v", offIDs, err)
}
}
log.Info("cmsShelve OnIDs %v, OffIDs %v", onIDs, offIDs)
content := fmt.Sprintf(cfg.Template, xstr.JoinInts(onIDs), xstr.JoinInts(offIDs))
if err = s.cmsDao.MerakNotify(ctx, cfg.Title, content); err != nil {
log.Error("Merak Content %s, Err %v", content, err)
}
}

View File

@@ -0,0 +1,68 @@
package pgc
import (
"fmt"
"strconv"
"go-common/app/job/main/tv/conf"
"go-common/app/job/main/tv/dao/lic"
model "go-common/app/job/main/tv/model/pgc"
)
var categories = map[int8]string{
1: "番剧",
2: "电影",
3: "纪录片",
4: "国漫",
5: "电视剧",
}
var zones = map[int64]string{
1: "中国",
2: "日本",
}
const _zoneNotFound = "其他"
// newLic create the skeleton of the license struct
func newLic(Season *model.TVEpSeason, conf *conf.Sync) *model.License {
// one license stryct oer season
var (
ps []*model.PS
sign = conf.Sign
area string
ok bool
)
if areaInt, _ := strconv.ParseInt(Season.Area, 10, 64); areaInt != 0 { //compatible with old version ( area was int )
if area, ok = zones[areaInt]; !ok {
area = _zoneNotFound
}
} else { // new logic, directly transform
area = Season.Area
}
var programS = &model.PS{
ProgramSetID: conf.AuditPrefix + fmt.Sprintf("%d", Season.ID),
ProgramSetName: Season.Title,
ProgramSetClass: Season.Style,
ProgramSetType: categories[Season.Category],
PublishDate: Season.PlayTime.Time().Format("2006-01-02"),
Copyright: Season.Copyright,
ProgramCount: int(Season.TotalNum),
CREndData: "1970-01-01",
DefinitionType: "SD",
CpCode: conf.LConf.CPCode,
PayStatus: Season.Status,
PrimitiveName: Season.OriginName,
Alias: Season.Alias,
Zone: area,
LeadingRole: Season.Role,
ProgramSetDesc: Season.Desc,
Staff: Season.Staff,
ProgramSetPoster: Season.Cover,
ProgramList: &model.ProgramList{},
Producer: Season.Producer,
SubGenre: Season.Version,
}
ps = append(ps, programS)
return lic.BuildLic(sign, ps, 0)
}

View File

@@ -0,0 +1,88 @@
package pgc
import (
"encoding/json"
"os"
"time"
model "go-common/app/job/main/tv/model/pgc"
"go-common/library/log"
)
const (
//ContLimit is used for getting ugc value 50 records every time
_ContLimit = 50
)
func (s *Service) seaPgcContproc() {
for {
if s.daoClosed {
log.Info("seaPgcContproc DB closed!")
return
}
s.seaPgcCont()
time.Sleep(time.Duration(s.c.Search.Cfg.UploadFre))
}
}
// seaPgcCont is used for generate search content content
func (s *Service) seaPgcCont() {
var (
err error
seasons []*model.SearPgcCon
str []byte // the json string to write in file
cnt int
cycle int //cycle count and every cycle is 50 records
id int
)
if cnt, err = s.dao.PgcContCount(ctx); err != nil {
log.Error(errFormat, "searchCont", "OnlineSeasonsC", err)
return
}
cycle = cnt / _ContLimit
if cnt%_ContLimit != 0 {
cycle = cnt/_ContLimit + 1
}
// write into the file
file, error := os.OpenFile(s.c.Search.PgcContPath, os.O_RDWR|os.O_CREATE|os.O_TRUNC, 0766)
if error != nil {
log.Error(errFormat, "searchSug", "OpenFile", err)
return
}
//cycle get sql value
for i := 0; i < cycle; i++ {
if i == 0 {
id = 0
} else {
id = seasons[len(seasons)-1].ID
}
if seasons, err = s.dao.PgcCont(ctx, id, _ContLimit); err != nil {
log.Error(errFormat, "PgcCont", "PgcCont", err)
return
}
for _, v := range seasons {
if str, err = json.Marshal(v); err != nil {
log.Error(errFormat, "searchSug", "JsonMarshal", err)
return
}
file.WriteString(string(str) + "\n")
}
}
file.Close()
//calculate file's md5
if err = s.ftpDao.FileMd5(s.c.Search.PgcContPath, s.c.Search.PgcContMd5Path); err != nil {
log.Error(errFormat, "searPgcCont", "fileMd5", err)
return
}
// upload original file
if err = s.ftpDao.UploadFile(s.c.Search.PgcContPath, s.c.Search.FTP.RemotePgcCont, s.c.Search.FTP.RemotePgcURL); err != nil {
log.Error(errFormat, "searPgcCont-File", "uploadFile", err)
return
}
// upload md5 file
if err = s.ftpDao.UploadFile(s.c.Search.PgcContMd5Path, s.c.Search.FTP.RemotePgcContMd5, s.c.Search.FTP.RemotePgcURL); err != nil {
log.Error(errFormat, "searPgcCont-Md5", "uploadFile", err)
return
}
log.Info("FTP Upload Success")
}

View File

@@ -0,0 +1,13 @@
package pgc
import (
"testing"
. "github.com/smartystreets/goconvey/convey"
)
func TestService_SearPgcCon(t *testing.T) {
Convey("search season content count", t, WithService(func(s *Service) {
s.seaPgcCont()
}))
}

View File

@@ -0,0 +1,101 @@
package pgc
import (
"encoding/json"
"os"
"time"
model "go-common/app/job/main/tv/model/pgc"
"go-common/library/log"
)
const (
errFormat = "Func:[%s] - Step:[%s] - Error:[%v]"
)
func (s *Service) pgcSeaSug(f *os.File) (err error) {
var (
str []byte // the json string to write in file
sug []*model.SearchSug
)
if sug, err = s.dao.PgcSeaSug(ctx); err != nil {
log.Error(errFormat, "searchSug", "PgcSeaSug", err)
return
}
for _, v := range sug {
if str, err = json.Marshal(v); err != nil {
log.Error(errFormat, "searchSug", "JsonMarshal", err)
return
}
f.WriteString(string(str) + "\n")
}
return
}
func (s *Service) ugcSeaSug(f *os.File) (err error) {
var (
str []byte // the json string to write in file
sug []*model.SearchSug
)
if sug, err = s.dao.UgcSeaSug(ctx); err != nil {
log.Error(errFormat, "ugcSeaSug", "UgcSeaSug", err)
return
}
for _, v := range sug {
if str, err = json.Marshal(v); err != nil {
log.Error(errFormat, "ugcSeaSug", "JsonMarshal", err)
return
}
f.WriteString(string(str) + "\n")
}
return
}
func (s *Service) searchSugproc() {
for {
if s.daoClosed {
log.Info("searchSugproc DB closed!")
return
}
s.searchSug()
time.Sleep(time.Duration(s.c.Search.Cfg.UploadFre))
}
}
// generate the valid seasons file for search suggestion
func (s *Service) searchSug() {
// write into the file
file, err := os.OpenFile(s.c.Search.SugPath, os.O_RDWR|os.O_CREATE|os.O_TRUNC, 0766)
if err != nil {
log.Error(errFormat, "searchSug", "OpenFile", err)
return
}
if err := s.pgcSeaSug(file); err != nil {
log.Error(errFormat, "searchSug", "OpenFile", err)
return
}
//if switch is on then onpen ugc search suggest
if s.c.Search.UgcSwitch == "on" {
if err := s.ugcSeaSug(file); err != nil {
log.Error(errFormat, "searchSug", "OpenFile", err)
return
}
}
file.Close()
// calculate file's md5
if err := s.ftpDao.FileMd5(s.c.Search.SugPath, s.c.Search.Md5Path); err != nil {
log.Error(errFormat, "searchSug", "fileMd5", err)
return
}
// upload original file
if err := s.ftpDao.UploadFile(s.c.Search.SugPath, s.c.Search.FTP.RemoteFName, s.c.Search.FTP.URL); err != nil {
log.Error(errFormat, "searchSug-File", "uploadFile", err)
return
}
//upload md5 file
if err := s.ftpDao.UploadFile(s.c.Search.Md5Path, s.c.Search.FTP.RemoteMd5, s.c.Search.FTP.URL); err != nil {
log.Error(errFormat, "searchSug-Md5", "uploadFile", err)
return
}
log.Error("FTP Upload Success")
}

View File

@@ -0,0 +1,109 @@
package pgc
import (
"context"
"math/rand"
"sync"
"time"
"go-common/app/job/main/tv/conf"
"go-common/app/job/main/tv/dao/app"
"go-common/app/job/main/tv/dao/cms"
"go-common/app/job/main/tv/dao/lic"
playdao "go-common/app/job/main/tv/dao/playurl"
model "go-common/app/job/main/tv/model/pgc"
"go-common/library/log"
"go-common/library/queue/databus"
"go-common/app/job/main/tv/dao/ftp"
"github.com/robfig/cron"
)
var ctx = context.Background()
// Service struct of service.
type Service struct {
dao *app.Dao
daoClosed bool // logic close the dao's DB
playurlDao *playdao.Dao
licDao *lic.Dao
ftpDao *ftp.Dao
cmsDao *cms.Dao
c *conf.Config
waiter *sync.WaitGroup // general waiter
waiterConsumer *sync.WaitGroup
contentSub *databus.Databus // consumer for state change
cron *cron.Cron
ResuEps []*model.Content
ResuSns []*model.TVEpSeason
resuRetry map[string]int
}
// New create service instance and return.
func New(c *conf.Config) (s *Service) {
s = &Service{
c: c,
dao: app.New(c),
playurlDao: playdao.New(c),
licDao: lic.New(c),
ftpDao: ftp.New(c),
cmsDao: cms.New(c),
daoClosed: false,
waiter: new(sync.WaitGroup),
waiterConsumer: new(sync.WaitGroup),
contentSub: databus.New(c.ContentSub),
cron: cron.New(),
resuRetry: make(map[string]int),
}
rand.Seed(time.Now().UnixNano())
// flush Redis - zone list
go s.ZoneIdx()
if err := s.cron.AddFunc(s.c.Redis.CronPGC, s.ZoneIdx); err != nil {
panic(err)
}
if err := s.cron.AddFunc(s.c.PlayControl.ProducerCron, s.refreshCache); err != nil {
panic(err)
}
if err := s.cron.AddFunc(s.c.Cfg.Merak.Cron, s.cmsShelve); err != nil {
panic(err)
}
s.cron.Start()
go s.searchSugproc() // uploads the passed season's list to search sug's FTP
go s.seaPgcContproc() // uploads pgc search content to sug's FTP
s.waiter.Add(1)
go s.syncEPs()
s.waiter.Add(1)
go s.resubEps()
s.waiter.Add(1)
go s.resubSns()
s.waiter.Add(1)
go s.syncSeason()
s.waiter.Add(1)
go s.delSeason()
s.waiter.Add(1)
go s.delCont()
// Databus
s.waiterConsumer.Add(1)
go s.consumeContent() // consume Databus Message to update MC
return
}
// Close dao.
func (s *Service) Close() {
if s.dao != nil {
s.daoClosed = true
log.Info("Dao Closed!")
}
log.Info("Crontab Closed!")
s.cron.Stop()
log.Info("Databus Closed!")
s.contentSub.Close()
log.Info("Wait Producer!")
s.waiter.Wait()
log.Info("Wait SyncMC Consumers")
s.waiterConsumer.Wait()
log.Info("Physical Dao Closed!")
s.dao.Close()
log.Info("tv-job has been closed.")
}

View File

@@ -0,0 +1,36 @@
package pgc
import (
"flag"
"path/filepath"
"testing"
"time"
"go-common/app/job/main/tv/conf"
. "github.com/smartystreets/goconvey/convey"
)
var (
srv *Service
)
func init() {
dir, _ := filepath.Abs("../../cmd/tv-job-test.toml")
flag.Set("conf", dir)
conf.Init()
srv = New(conf.Conf)
time.Sleep(time.Second)
}
func WithService(f func(s *Service)) func() {
return func() {
f(srv)
}
}
func TestService_SearchSug(t *testing.T) {
Convey("TestService_SearchSug", t, WithService(func(s *Service) {
s.searchSug()
}))
}

View File

@@ -0,0 +1,98 @@
package pgc
import (
"context"
"time"
"go-common/app/job/main/tv/dao/lic"
model "go-common/app/job/main/tv/model/pgc"
"go-common/library/ecode"
"go-common/library/log"
)
// pick the data from DB to audit and combine the XML for the license owner
// producer, content data => channel
func (s *Service) syncEPs() {
defer s.waiter.Done()
for {
if s.daoClosed {
log.Info("syncEPs DB closed!")
return
}
readySids, err := s.dao.ReadySns(ctx)
if err != nil || len(readySids) == 0 {
time.Sleep(time.Duration(s.c.Sync.Frequency.ErrorWait))
continue
}
for _, sid := range readySids {
var contSlices [][]*model.Content
if contSlices, err = s.dao.PickData(ctx, sid); err != nil || len(contSlices) == 0 {
continue
}
for _, conts := range contSlices {
if err = s.epsSync(sid, conts); err != nil {
s.addRetryEps(conts)
}
s.dao.AuditingCont(ctx, conts) // update status to auditing
}
}
time.Sleep(1 * time.Second)
}
}
func (s *Service) epsSync(sid int64, conts []*model.Content) (err error) {
var reqCall = &model.ReqEpLicCall{
SID: sid,
Conts: conts,
}
if reqCall.EpLic, err = s.epLicCreate(ctx, sid, conts); err != nil {
return
}
return s.epLicCall(ctx, reqCall)
}
// epLicCreate picks the sid and conts to create the license model
func (s *Service) epLicCreate(ctx context.Context, sid int64, conts []*model.Content) (epLic *model.License, err error) {
var (
season *model.TVEpSeason
prefix = s.c.Sync.AuditPrefix
programs []*model.Program
)
if season, err = s.dao.Season(ctx, int(sid)); err != nil {
log.Error("Season ID %d, Err %v", sid, err)
return
}
epLic = newLic(season, s.c.Sync)
epLic.XMLData.Service.Head.Count = len(conts)
for _, v := range conts {
s.dao.WaitCall(ctx, v.EPID) // avoid always selecting the same data, give time to the caller
url, _, errPlay := s.playurlDao.Playurl(ctx, v.CID)
if errPlay != nil {
log.Error("syncEPs EP Playurl EPID = %d, Error: %v", v.EPID, errPlay)
s.addRetryEp(v)
continue
}
ep, errEP := s.dao.EP(ctx, v.EPID)
if errEP != nil {
log.Error("EpContent EPID %d Can't found", v.EPID)
continue
}
program := model.CreateProgram(prefix, ep)
program.ProgramMediaList = &model.PMList{
ProgramMedia: []*model.PMedia{model.CreatePMedia(s.c.Sync.AuditPrefix, v.EPID, url)},
}
programs = append(programs, program)
}
epLic.XMLData.Service.Body.ProgramSetList.ProgramSet[0].ProgramList.Program = programs
return
}
// epLicCall picks the license and sync to audit
func (s *Service) epLicCall(ctx context.Context, req *model.ReqEpLicCall) (err error) {
var cfg = s.c.Sync
res, err := s.licDao.CallRetry(ctx, cfg.API.AddURL, lic.PrepareXML(req.EpLic))
if res == nil {
err = ecode.TvSyncErr
}
return
}

View File

@@ -0,0 +1,249 @@
package pgc
import (
"context"
"encoding/json"
"fmt"
"reflect"
"time"
appDao "go-common/app/job/main/tv/dao/app"
model "go-common/app/job/main/tv/model/pgc"
"go-common/library/ecode"
"go-common/library/log"
timex "go-common/library/time"
)
type cntFunc func(ctx context.Context) (count int, err error)
type refreshFunc func(ctx context.Context, LastID int, nbData int) (myLast int, err error)
type reqCachePro struct {
cnt cntFunc
proName string
refresh refreshFunc
ps int
}
func (s *Service) cacheProducer(ctx context.Context, req *reqCachePro) (err error) {
var (
count int
pagesize = req.ps
maxID = 0 // the max ID of the latest piece
begin = time.Now()
)
if count, err = req.cnt(ctx); err != nil {
log.Error("[%s] CountEP error [%v]", req.proName, err)
return
}
nbPiece := appDao.NumPce(count, pagesize)
log.Info("[%s] NumPiece %d, Pagesize %d", req.proName, nbPiece, pagesize)
for i := 0; i < nbPiece; i++ {
newMaxID, errR := req.refresh(ctx, maxID, pagesize)
if errR != nil {
log.Error("[%s] Pick Piece %d Error, Ignore it", req.proName, i)
continue
}
if newMaxID > maxID {
maxID = newMaxID
} else { // fatal error
log.Error("[%s] MaxID is not increasing! [%d,%d]", req.proName, newMaxID, maxID)
return
}
time.Sleep(time.Duration(s.c.UgcSync.Frequency.ProducerFre)) // pause after each piece produced
log.Info("[%s] Pagesize %d, Num of piece %d, Time Already %v", req.proName, pagesize, i, time.Since(begin))
}
log.Info("[%s] Finish! Pagesize %d, Num of piece %d, Time %v", req.proName, pagesize, nbPiece, time.Since(begin))
return
}
// refreshCache refreshes the cache of ugc and pgc
func (s *Service) refreshCache() {
var (
ctx = context.Background()
begin = time.Now()
pgcPS = s.c.PlayControl.PieceSize
reqEp = &reqCachePro{
cnt: s.dao.CountEP,
proName: "epProducer",
refresh: s.dao.RefreshEPMC,
ps: pgcPS,
}
reqSn = &reqCachePro{
cnt: s.dao.CountSeason,
proName: "snProducer",
refresh: s.dao.RefreshSnMC,
ps: pgcPS,
}
)
if err := s.cacheProducer(ctx, reqEp); err != nil {
log.Error("reqEp Err %v", err)
return
}
if err := s.cacheProducer(ctx, reqSn); err != nil {
log.Error("reqSn Err %v", err)
}
log.Info("refreshCache Finish, Time %v", time.Since(begin))
}
// stock EP&Season auth info and intervention info in MC
func (s *Service) stockContent(jsonstr json.RawMessage, tableName string) (err error) {
// season stock in MC
if tableName == "tv_ep_season" {
sn := &model.DatabusSeason{}
if err = json.Unmarshal(jsonstr, sn); err != nil {
log.Error("json.Unmarshal(%s) error(%v)", jsonstr, err)
return
}
if reflect.DeepEqual(sn.Old, sn.New) { // if media fields not modified, no need to update
log.Info("SeasonID %d No need to update", sn.New.ID)
return
}
return s.stockSeason(sn)
// ep stock in MC
} else if tableName == "tv_content" {
ep := &model.DatabusEP{}
if err = json.Unmarshal(jsonstr, ep); err != nil {
log.Error("json.Unmarshal(%s) error(%v)", jsonstr, err)
return
}
if reflect.DeepEqual(ep.Old, ep.New) { // if media fields not modified, no need to update
log.Info("Epid %d No need to update", ep.New.EPID)
return
}
return s.stockEP(ep)
} else {
return fmt.Errorf("Databus Msg (%s) - Incorrect Table (%s) ", jsonstr, tableName)
}
}
func (s *Service) composeSnCMS(sn *model.MediaSn) *model.SeasonCMS {
var (
epid, order int
err error
playtime int64
)
if epid, order, err = s.dao.NewestOrder(ctx, sn.ID); err != nil {
log.Warn("stockSeason NewestOrder Sid: %d, Err %v", sn.ID, err)
}
if playtime, err = appDao.TimeTrans(sn.Playtime); err != nil {
log.Warn("stockSeason Playtime Sid: %d, Err %v", sn.ID, err)
}
return &model.SeasonCMS{
SeasonID: int(sn.ID),
Cover: sn.Cover,
Desc: sn.Desc,
Title: sn.Title,
UpInfo: sn.UpInfo,
Category: sn.Category,
Area: sn.Area,
Playtime: timex.Time(playtime),
Role: sn.Role,
Staff: sn.Staff,
TotalNum: sn.TotalNum,
Style: sn.Style,
NewestOrder: order,
NewestEPID: epid,
PayStatus: sn.Status, // databus sn logic
}
}
// treat the databus season msg, stock the auth & media info in MC
func (s *Service) stockSeason(sn *model.DatabusSeason) (err error) {
var (
snSub *model.TVEpSeason
snAuth = sn.New.ToSimple() // auth info in MC
snMedia = s.composeSnCMS(sn.New) // media info in MC
)
s.batchFilter(ctx, []*model.SeasonCMS{snMedia}) // treat the newest NB logic
if sn.New.Check == _seasonPassed && sn.Old.Check == _seasonPassed { // keep already passed logic
if snSub, err = s.dao.Season(ctx, int(sn.New.ID)); err != nil {
return
}
s.addRetrySn(snSub)
}
if err = s.dao.SetSeason(ctx, snAuth); err != nil { // auth
log.Error("SetSeason error(%v)", snAuth, err)
return
}
if err = s.dao.SetSnCMSCache(ctx, snMedia); err != nil { // media
log.Error("SetSnCMSCache error(%v)", snMedia, err)
return
}
if err = s.listMtn(sn.Old, sn.New); err != nil { // maintenance of the zone list in Redis
log.Error("stockContent listMtn error(%v)", sn.New, err)
}
return
}
// treat the databus ep msg, stock the auth & media info in MC
func (s *Service) stockEP(ep *model.DatabusEP) (err error) {
var (
epAuth = ep.New.ToSimple()
epMedia = ep.New.ToCMS()
epSub *model.Content
)
if ep.New.State == _epPassed && ep.Old.State == _epPassed { // keep already passed logic
if epSub, err = s.dao.Cont(ctx, ep.New.EPID); err != nil {
return
}
s.addRetryEp(epSub)
}
if err = s.dao.SetEP(ctx, epAuth); err != nil { // set ep auth MC
return
}
if err = s.dao.SetEpCMSCache(ctx, epMedia); err != nil { // set ep media MC
return
}
err = s.updateSnCMS(epAuth.SeasonID)
return
}
// updateSnCMS picks the season info from DB and update the CMS cache
func (s *Service) updateSnCMS(sid int) (err error) {
var snMedia *model.SeasonCMS
if snMedia, err = s.dao.PickSeason(ctx, sid); err != nil { // pick season cms info
log.Error("stockEP PickSeason Sid: %d, Err: %v", sid, err)
return
}
if snMedia == nil { // season info not found
err = ecode.NothingFound
log.Error("stockEP PickSeason Sid: %d, Err: %v", sid, err)
return
}
s.batchFilter(ctx, []*model.SeasonCMS{snMedia})
if err = s.dao.SetSnCMSCache(ctx, snMedia); err != nil { // ep update, we also consider to update its season info for the "latest" info
log.Error("SetSnCMSCache error(%v)", snMedia, err)
}
return
}
// consume Databus message; because daily modification is not many, so use simple loop
func (s *Service) consumeContent() {
defer s.waiterConsumer.Done()
for {
msg, ok := <-s.contentSub.Messages()
if !ok {
log.Info("databus: tv-job ep/season consumer exit!")
return
}
msg.Commit()
s.treatMsg(msg.Value)
time.Sleep(1 * time.Millisecond)
}
}
func (s *Service) treatMsg(msg json.RawMessage) {
m := &model.DatabusRes{}
log.Info("[ConsumeContent] New Message: %s", msg)
if err := json.Unmarshal(msg, m); err != nil {
log.Error("json.Unmarshal(%s) error(%v)", msg, err)
return
}
if m.Action == "delete" {
log.Info("[ConsumeContent] Content Deletion, We ignore:<%v>,<%v>", m, msg)
return
}
if err := s.stockContent(msg, m.Table); err != nil {
log.Error("stockContent.(%s,%s), error(%v)", msg, m.Table, err)
return
}
}

View File

@@ -0,0 +1,13 @@
package pgc
import (
"testing"
. "github.com/smartystreets/goconvey/convey"
)
func TestService_FullRefresh(t *testing.T) {
Convey("No redundant data", t, WithService(func(s *Service) {
s.refreshCache()
}))
}

View File

@@ -0,0 +1,165 @@
package pgc
import (
"time"
"go-common/app/job/main/tv/model/common"
"go-common/app/job/main/tv/model/pgc"
"go-common/library/log"
)
func (s *Service) addRetryEp(in *pgc.Content) {
s.addRetryEps([]*pgc.Content{in})
}
// addRetryEps adds eps into retry list
func (s *Service) addRetryEps(in []*pgc.Content) {
var (
epids []int
newConts []*pgc.Content
)
for _, v := range in {
if !s.retryLimit(false, int64(v.EPID)) { // filter retried too many times ep
continue
}
newConts = append(newConts, v)
epids = append(epids, v.EPID)
}
if len(newConts) == 0 {
return
}
log.Warn("addRetryEps Add IDs %v", epids)
s.ResuEps = append(s.ResuEps, newConts...)
}
// pickRetryEp picks the to-retry eps from memory
func (s *Service) pickRetryEp() (res []*pgc.Content) {
if len(s.ResuEps) == 0 {
return
}
res = append(res, s.ResuEps...)
log.Info("pickRetry EP Len %d", len(res))
s.ResuEps = make([]*pgc.Content, 0)
return
}
// re-submit eps
func (s *Service) resubEps() {
defer s.waiter.Done()
for {
if s.daoClosed {
log.Info("resubEps DB closed!")
return
}
readyEps := s.pickRetryEp() // pick to-retry eps from memory
if len(readyEps) == 0 {
log.Info("resubEps Empty")
time.Sleep(time.Duration(s.c.Cfg.SyncRetry.RetryFre))
continue
}
againEps := make([]*pgc.Content, 0)
for _, ep := range readyEps { // retry them
if err := s.epsSync(int64(ep.SeasonID), []*pgc.Content{ep}); err != nil { // if error, re-add this item into re-sub list
log.Error("resubEps Sid %d, Epid %v, Err %v", ep.SeasonID, ep.EPID, err)
againEps = append(againEps, ep)
continue
}
retry := &common.SyncRetry{}
retry.FromEp(0, int64(ep.EPID))
s.dao.DelRetry(ctx, retry) // after succ, del it from MC
}
if len(againEps) > 0 {
s.addRetryEps(againEps)
}
time.Sleep(time.Duration(s.c.Cfg.SyncRetry.RetryFre))
}
}
func (s *Service) addRetrySn(in *pgc.TVEpSeason) {
s.addRetrySns([]*pgc.TVEpSeason{in})
}
// addRetrySns adds sns into retry list
func (s *Service) addRetrySns(in []*pgc.TVEpSeason) {
var (
sids []int64
newConts []*pgc.TVEpSeason
)
for _, v := range in {
if !s.retryLimit(true, v.ID) { // filter retried too many times ep
continue
}
newConts = append(newConts, v)
sids = append(sids, v.ID)
}
log.Warn("addRetrySns Add IDs %v", sids)
s.ResuSns = append(s.ResuSns, newConts...)
}
// pickRetryEp picks the to-retry eps from memory
func (s *Service) pickRetrySn() (res []*pgc.TVEpSeason) {
if len(s.ResuSns) == 0 {
return
}
res = append(res, s.ResuSns...)
log.Info("pickRetry Sn Len %d", len(res))
s.ResuSns = make([]*pgc.TVEpSeason, 0)
return
}
// re-submit eps
func (s *Service) resubSns() {
defer s.waiter.Done()
for {
if s.daoClosed {
log.Info("resubSns DB closed!")
return
}
readySns := s.pickRetrySn()
if len(readySns) == 0 {
log.Info("resubSns Empty")
time.Sleep(time.Duration(s.c.Cfg.SyncRetry.RetryFre))
continue
}
againSns := make([]*pgc.TVEpSeason, 0)
for _, sn := range readySns {
if err := s.snSync(sn); err != nil { // if error, re-add this item into re-sub list
log.Error("resubSns Sid %d, Err %v", sn.ID, err)
againSns = append(againSns, sn)
continue
}
retry := &common.SyncRetry{}
retry.FromSn(0, sn.ID)
s.dao.DelRetry(ctx, retry)
}
if len(againSns) > 0 {
s.addRetrySns(againSns)
}
time.Sleep(time.Duration(s.c.Cfg.SyncRetry.RetryFre))
}
}
// retryLimit limits the retry times
func (s *Service) retryLimit(isSn bool, id int64) bool {
var req = &common.SyncRetry{}
if isSn {
req.FromSn(0, id)
} else {
req.FromEp(0, id)
}
retryTms, err := s.dao.GetRetry(ctx, req)
if err != nil {
log.Error("GetRetry Req %s, Err %v", req.MCKey(), err)
return true
}
if retryTms > s.c.Cfg.SyncRetry.MaxRetry {
log.Error("retryLimit Req %s, Retry Already %d times, stop here", req.MCKey(), retryTms)
return false
}
s.dao.SetRetry(ctx, &common.SyncRetry{
Ctype: req.Ctype,
CID: req.CID,
Retry: retryTms + 1,
})
return true
}

View File

@@ -0,0 +1,46 @@
package pgc
import (
"database/sql"
"time"
"go-common/app/job/main/tv/dao/lic"
model "go-common/app/job/main/tv/model/pgc"
"go-common/library/ecode"
"go-common/library/log"
)
// Sync modified season data to the license owner
func (s *Service) syncSeason() {
defer s.waiter.Done()
for {
if s.daoClosed {
log.Info("syncSeason DB closed!")
return
}
modSeason, err := s.dao.ModSeason(ctx)
if err == sql.ErrNoRows || len(modSeason) == 0 {
log.Info("No modified data to pick from Season to audit")
time.Sleep(time.Duration(s.c.Sync.Frequency.FreModSeason))
continue
}
for _, v := range modSeason {
if err = s.snSync(v); err != nil {
s.addRetrySn(v)
}
s.dao.AuditSeason(ctx, int(v.ID)) // update season status after succ
}
time.Sleep(1 * time.Second) // break after each loop
}
}
func (s *Service) snSync(sn *model.TVEpSeason) (err error) {
cfg := s.c.Sync
data := newLic(sn, cfg)
data.XMLData.Service.Head.Count = 1
res, err := s.licDao.CallRetry(ctx, cfg.API.UpdateURL, lic.PrepareXML(data))
if res == nil {
err = ecode.TvSyncErr
}
return
}

View File

@@ -0,0 +1,60 @@
package pgc
import (
"context"
model "go-common/app/job/main/tv/model/pgc"
"go-common/library/log"
)
const (
_seasonPassed = 1
_epPassed = 3
_cmsValid = 1
_notDeleted = 0
)
// ZoneIdx finds out all the passed seasons in DB and then arrange them in a sorted set in Redis
func (s *Service) ZoneIdx() {
var (
_pgcZones = s.c.Cfg.PGCZonesID
ctx = context.Background()
)
for _, v := range _pgcZones {
zoneSns, err := s.dao.PassedSn(ctx, v)
if err != nil {
log.Error("ZoneIdx - PassedSn %d Error %v", v, err)
continue
}
if err = s.dao.Flush(ctx, v, zoneSns); err != nil {
log.Error("ZoneIdx - Flush %d Error %v", v, err)
continue
}
}
}
// listMtn maintains the list of zone index
func (s *Service) listMtn(oldSn *model.MediaSn, newSn *model.MediaSn) (err error) {
if oldSn == nil {
log.Info("ListMtn OldSn is Nil, NewSn is %v", newSn)
oldSn = &model.MediaSn{}
}
if oldSn.Check == _seasonPassed && oldSn.IsDeleted == _notDeleted && oldSn.Valid == _cmsValid { // previously passed
if !(newSn.Check == _seasonPassed && newSn.IsDeleted == _notDeleted && newSn.Valid == _cmsValid) { // not passed now
if err = s.dao.ZRemIdx(ctx, newSn.Category, newSn.ID); err != nil {
log.Error("listMtn - ZRemIdx - Category: %d, Sn: %s, Error: %v", newSn.Category, newSn, err)
return
}
log.Info("Remove Sid %d From Zone %d", newSn.ID, newSn.Category)
}
} else { // previously not passed, or not exist
if newSn.Check == _seasonPassed && newSn.IsDeleted == _notDeleted && newSn.Valid == _cmsValid { // passed now
if err = s.dao.ZAddIdx(ctx, newSn.Category, newSn.Ctime, newSn.ID); err != nil {
log.Error("listMtn - ZAddIdx - Category: %d, Sn: %s, Error: %v", newSn.Category, newSn, err)
return
}
log.Info("Add Sid %d Into Zone %d", newSn.ID, newSn.Category)
}
}
return
}

View File

@@ -0,0 +1,13 @@
package pgc
import (
"testing"
. "github.com/smartystreets/goconvey/convey"
)
func TestService_ZoneIdx(t *testing.T) {
Convey("ZoneIdx", t, WithService(func(s *Service) {
s.ZoneIdx()
}))
}

View File

@@ -0,0 +1,50 @@
package(default_visibility = ["//visibility:public"])
load(
"@io_bazel_rules_go//go:def.bzl",
"go_test",
"go_library",
)
go_test(
name = "go_default_test",
srcs = ["service_test.go"],
embed = [":go_default_library"],
rundir = ".",
tags = ["automanaged"],
deps = ["//app/job/main/tv/conf:go_default_library"],
)
go_library(
name = "go_default_library",
srcs = [
"service.go",
"style.go",
],
importpath = "go-common/app/job/main/tv/service/report",
tags = ["automanaged"],
visibility = ["//visibility:public"],
deps = [
"//app/job/main/tv/conf:go_default_library",
"//app/job/main/tv/dao/report:go_default_library",
"//app/job/main/tv/model/pgc:go_default_library",
"//app/job/main/tv/model/report:go_default_library",
"//library/log:go_default_library",
"//library/sync/pipeline/fanout:go_default_library",
"//vendor/github.com/robfig/cron:go_default_library",
],
)
filegroup(
name = "package-srcs",
srcs = glob(["**"]),
tags = ["automanaged"],
visibility = ["//visibility:private"],
)
filegroup(
name = "all-srcs",
srcs = [":package-srcs"],
tags = ["automanaged"],
visibility = ["//visibility:public"],
)

View File

@@ -0,0 +1,293 @@
package report
import (
"bytes"
"context"
"encoding/json"
"io"
xhttp "net/http"
"strings"
"sync"
"time"
"go-common/app/job/main/tv/conf"
"go-common/app/job/main/tv/dao/report"
mdlrep "go-common/app/job/main/tv/model/report"
"go-common/library/log"
"go-common/library/sync/pipeline/fanout"
"github.com/robfig/cron"
)
const (
_retry = 3
_jobRunning = 3
_startJob = 4
_readSize = 1024
)
// Service struct of service .
type Service struct {
c *conf.Config
ch chan bool
dao *report.Dao
respURL map[string]interface{}
cache *fanout.Fanout
lock sync.Mutex
labelRes map[int]map[string]int
readSize int
// cron
cron *cron.Cron
}
// New creates a Service instance.
func New(c *conf.Config) (s *Service) {
s = &Service{
c: c,
dao: report.New(c),
respURL: make(map[string]interface{}),
cache: fanout.New("cache", fanout.Worker(1), fanout.Buffer(1024)),
labelRes: make(map[int]map[string]int),
readSize: c.Report.ReadSize * _readSize,
cron: cron.New(),
ch: make(chan bool, c.Report.RoutineCount),
}
if err := s.cron.AddFunc(s.c.Report.CronAc, s.oneWork(mdlrep.ArchiveClick)); err != nil { // corn report run
panic(err)
}
if err := s.cron.AddFunc(s.c.Report.CronAd, s.oneWork(mdlrep.ActiveDuration)); err != nil { // corn report run
panic(err)
}
if err := s.cron.AddFunc(s.c.Report.CronPd, s.oneWork(mdlrep.PlayDuration)); err != nil { // corn report run
panic(err)
}
if err := s.cron.AddFunc(s.c.Report.CronVe, s.oneWork(mdlrep.VisitEvent)); err != nil { // corn report run
panic(err)
}
s.cron.Start()
s.readCache() // data report
s.readLabelCache() // label style
go s.reportCon() // data report
go s.showStyle() // label style
go s.showLabel() // label style
return
}
func (s *Service) oneWork(table string) func() {
return func() {
if s.c.Report.Env != "prod" {
return
}
var (
res string
err error
)
if res, err = s.requestURL(table); err != nil {
log.Error("reportPro s.requestURL() error(%v)", err)
return
}
if res == "" {
return
}
s.lock.Lock()
s.respURL[res] = struct{}{}
s.lock.Unlock()
s.setCache()
}
}
func (s *Service) reportCon() {
if s.c.Report.Env != "prod" {
return
}
var (
err error
info *mdlrep.DpCheckJobResult
)
for {
var (
flags, failStr []string
)
s.lock.Lock()
for k := range s.respURL {
flags = append(flags, k)
}
s.respURL = make(map[string]interface{})
s.lock.Unlock()
for _, v := range flags {
if v == "" {
continue
}
// loop send http request and return result
if info, err = s.check(v); err == nil && len(info.Files) > 0 {
now := time.Now()
s.upReport(info)
log.Warn("report success fileNum(%d) url(%s) 本次上报数据耗时: %s", len(info.Files), v, time.Since(now))
continue
}
if info.StatusID == _jobRunning || info.StatusID == _startJob {
failStr = append(failStr, v)
}
}
s.lock.Lock()
for _, v := range failStr {
s.respURL[v] = struct{}{}
}
s.lock.Unlock()
s.setCache()
time.Sleep(3 * time.Second)
}
}
func (s *Service) readFile(path string) {
var (
n int
err error
resdata []map[string]interface{}
resp *xhttp.Response
buf = make([]byte, 1024)
chunks []byte
req *xhttp.Request
fileCnt = 0
)
client := &xhttp.Client{
Transport: &xhttp.Transport{
DisableKeepAlives: true,
},
}
req, err = xhttp.NewRequest("GET", path, strings.NewReader(""))
if err != nil {
log.Error("[url(%s)] xhttp.NewRequest error(%v)", path, err)
return
}
resp, err = client.Do(req)
if err != nil {
log.Error("[url(%s)] client.Do error(%v)", path, err)
return
}
defer resp.Body.Close()
for {
n, err = resp.Body.Read(buf)
if err != nil {
if err == io.EOF {
break
}
log.Error("resp.Body.Read error(%v)", err)
return
}
if 0 == n {
break
}
chunks = append(chunks, buf[:n]...)
if len(chunks) > s.readSize { // 500K
lastPos := bytes.LastIndex(chunks, []byte("\n"))
if lastPos < 0 {
continue
}
fileCnt = fileCnt + 1
results := append([]byte{}, chunks[:lastPos]...)
chunks = append([]byte{}, chunks[lastPos:]...)
bsdata := bytes.Split(results, []byte("\n"))
for _, bs := range bsdata {
n := bytes.Split(bs, []byte("\u0001"))
m := mdlrep.ArcClickParam(n)
resdata = append(resdata, m)
}
if err = s.postData(resdata); err != nil {
log.Error("[url(%s)] s.postData error(%v)", path, err)
}
resdata = make([]map[string]interface{}, 0)
}
}
if len(chunks) > 0 {
bsdata := bytes.Split(chunks, []byte("\n"))
for _, bs := range bsdata {
n := bytes.Split(bs, []byte("\u0001"))
m := mdlrep.ArcClickParam(n)
resdata = append(resdata, m)
}
if err = s.postData(resdata); err != nil {
log.Error("[url(%s)] s.postData error(%v)", path, err)
}
}
}
func (s *Service) requestURL(table string) (res string, err error) {
for i := 0; i < _retry; i++ {
if res, err = s.dao.Report(context.Background(), table); err == nil {
break
}
}
return
}
func (s *Service) check(res string) (info *mdlrep.DpCheckJobResult, err error) {
for i := 0; i < _retry; i++ {
if info, err = s.dao.CheckJob(context.Background(), res); err == nil {
break
}
}
return
}
// upReport .
func (s *Service) upReport(info *mdlrep.DpCheckJobResult) {
for _, v := range info.Files {
s.readFile(v)
}
}
func (s *Service) postData(param []map[string]interface{}) (err error) {
for _, v := range param {
s.ch <- true
go s.sendOnce(v)
}
return
}
func (s *Service) sendOnce(v map[string]interface{}) (err error) {
var (
body string
data []byte
)
defer func() {
<-s.ch
}()
if data, err = json.Marshal(v); err != nil {
log.Error("Service postData json.Marshal error(%v)", err)
return
}
body = body + string(data) + ","
s.dealBody(body)
return
}
func (s *Service) readCache() {
if s.c.Report.Env != "prod" {
return
}
var (
err error
btRes = make(map[string]interface{})
)
if btRes, err = s.dao.GetReportCache(context.Background()); err != nil {
log.Error("s.dao.GetReportCache error(%v)", err)
panic(err)
}
s.respURL = btRes
}
func (s *Service) dealBody(body string) {
body = strings.Replace(body, `\\N`, "", -1)
body = strings.TrimSuffix(body, ",")
body = `{"code": 0,"message": "0","ttl": 1,"data":[` + body + `]}`
if err := s.dao.PostRequest(context.Background(), body); err != nil {
log.Error("s.dao.PostRequest error(%v)", err)
}
}
func (s *Service) setCache() {
s.cache.Do(context.Background(), func(c context.Context) {
s.dao.SetReportCache(c, s.respURL)
})
}

View File

@@ -0,0 +1,21 @@
package report
import (
"flag"
"path/filepath"
"time"
"go-common/app/job/main/tv/conf"
)
var (
srv *Service
)
func init() {
dir, _ := filepath.Abs("../../cmd/tv-job-test.toml")
flag.Set("conf", dir)
conf.Init()
srv = New(conf.Conf)
time.Sleep(time.Second)
}

View File

@@ -0,0 +1,89 @@
package report
import (
"context"
"strings"
"time"
mdlpgc "go-common/app/job/main/tv/model/pgc"
"go-common/library/log"
)
func (s *Service) showStyle() {
var (
err error
res []*mdlpgc.StyleRes
styleStr []*mdlpgc.ParamStyle
styleRes = make(map[int][]*mdlpgc.ParamStyle)
ctx = context.Background()
)
for {
if res, err = s.dao.FindStyle(ctx); err != nil {
log.Error("s.dao.FindStyle error(%v)", err)
time.Sleep(time.Second * 5)
continue
}
if len(res) != 0 {
for _, v := range res {
styleStr = make([]*mdlpgc.ParamStyle, 0)
if m, ok := s.labelRes[v.Category]; ok {
a := strings.Split(v.Style, ",")
for _, v1 := range a {
r := &mdlpgc.ParamStyle{}
if m1, ok1 := m[v1]; ok1 {
r.Name = v1
r.StyleID = m1
styleStr = append(styleStr, r)
}
}
if len(styleStr) != 0 {
styleRes[v.ID] = styleStr
}
}
}
}
if len(styleRes) > 0 {
s.cache.Do(ctx, func(ctx context.Context) {
// set style data to mc
s.dao.SetStyleCache(ctx, styleRes)
})
}
time.Sleep(time.Duration(s.c.Style.StyleSpan))
}
}
func (s *Service) showLabel() {
var (
err error
res map[int]map[string]int
ctx = context.Background()
)
for {
if res, err = s.dao.FindLabelID(ctx); err != nil {
log.Error("s.dao.FindLabelID error(%v)", err)
time.Sleep(time.Second * 5)
continue
}
if len(res) != 0 {
s.labelRes = res
s.cache.Do(ctx, func(ctx context.Context) {
// set label data to mc
s.dao.SetLabelCache(ctx, s.labelRes)
})
}
time.Sleep(time.Duration(s.c.Style.LabelSpan))
}
}
func (s *Service) readLabelCache() {
var (
err error
m map[int]map[string]int
)
if m, err = s.dao.GetLabelCache(context.Background()); err != nil {
log.Error("s.dao.GetLabelCache error(%v)", err)
panic(err)
}
s.labelRes = m
}

View File

@@ -0,0 +1,86 @@
load(
"@io_bazel_rules_go//go:def.bzl",
"go_test",
"go_library",
)
go_test(
name = "go_default_test",
srcs = [
"import_test.go",
"sea_con_test.go",
"sync_video_test.go",
"ugc_test.go",
],
embed = [":go_default_library"],
rundir = ".",
tags = ["automanaged"],
deps = [
"//app/job/main/tv/conf:go_default_library",
"//app/job/main/tv/model/ugc:go_default_library",
"//vendor/github.com/smartystreets/goconvey/convey:go_default_library",
],
)
go_library(
name = "go_default_library",
srcs = [
"databus.go",
"del_arc.go",
"del_video.go",
"full_refresh.go",
"import.go",
"manual.go",
"media_cache.go",
"pick.go",
"report_cid.go",
"sea_con.go",
"sync_arc.go",
"sync_video.go",
"tool.go",
"ugc.go",
"upper.go",
"view.go",
"zone_index.go",
],
importpath = "go-common/app/job/main/tv/service/ugc",
tags = ["automanaged"],
visibility = ["//visibility:public"],
deps = [
"//app/job/main/tv/conf:go_default_library",
"//app/job/main/tv/dao/app:go_default_library",
"//app/job/main/tv/dao/archive:go_default_library",
"//app/job/main/tv/dao/cms:go_default_library",
"//app/job/main/tv/dao/ftp:go_default_library",
"//app/job/main/tv/dao/lic:go_default_library",
"//app/job/main/tv/dao/playurl:go_default_library",
"//app/job/main/tv/dao/ugc:go_default_library",
"//app/job/main/tv/dao/upper:go_default_library",
"//app/job/main/tv/model/pgc:go_default_library",
"//app/job/main/tv/model/ugc:go_default_library",
"//app/service/main/account/model:go_default_library",
"//app/service/main/archive/api:go_default_library",
"//app/service/main/archive/api/gorpc:go_default_library",
"//app/service/main/archive/model/archive:go_default_library",
"//library/database/sql:go_default_library",
"//library/ecode:go_default_library",
"//library/log:go_default_library",
"//library/queue/databus:go_default_library",
"//library/time:go_default_library",
"//vendor/github.com/robfig/cron:go_default_library",
],
)
filegroup(
name = "package-srcs",
srcs = glob(["**"]),
tags = ["automanaged"],
visibility = ["//visibility:private"],
)
filegroup(
name = "all-srcs",
srcs = [":package-srcs"],
tags = ["automanaged"],
visibility = ["//visibility:public"],
)

View File

@@ -0,0 +1,294 @@
package ugc
import (
"context"
"encoding/json"
appDao "go-common/app/job/main/tv/dao/app"
ugcmdl "go-common/app/job/main/tv/model/ugc"
arcmdl "go-common/app/service/main/archive/api"
"go-common/library/database/sql"
"go-common/library/log"
)
const (
_tableArchive = "archive"
_updateAction = "update"
_insertAction = "insert"
_deleted = 1
)
// arcConsumeproc consumer archive
func (s *Service) arcConsumeproc() {
var err error
defer s.waiter.Done()
for {
msg, ok := <-s.archiveNotifySub.Messages()
if !ok {
log.Info("arc databus Consumer exit")
break
}
var ms = &ugcmdl.ArcMsg{}
log.Info("arcConsumeproc New message: %s", msg)
if err = json.Unmarshal(msg.Value, ms); err != nil {
msg.Commit()
log.Error("json.Unmarshal(%s) error(%v)", msg.Value, err)
continue
}
switch ms.Table {
case _tableArchive:
s.ArcHandle(ms)
}
msg.Commit()
}
}
// ArcHandle treats the archive notify-T message to update the DB if there is any change
func (s *Service) ArcHandle(arcMsg *ugcmdl.ArcMsg) {
var (
msgMID = arcMsg.New.Mid
trustUp = false
)
if len(s.activeUps) > 0 { // check whether it's our trust upper
if _, ok := s.activeUps[msgMID]; ok {
trustUp = true
}
} else { // when the memory is not ready, check upper from DB
mid, _ := s.dao.UpInList(ctx, arcMsg.New.Mid)
trustUp = mid > 0
}
if !trustUp { // if it's not our trust upper, ignore the message except the archive was imported manually
if arcMsg.Action == _updateAction && s.arcExist(arcMsg.New.Aid) { // if it's update and the archive exists ( added manually ), we allow it
s.arcUpdate(arcMsg.Old, arcMsg.New)
return
}
log.Info("Message Aid %d, Mid %d, Not in List, Ignore", arcMsg.New.Aid, arcMsg.New.Mid)
appDao.PromInfo("DsInsert:Ignore")
return
}
// arc update
if arcMsg.Action == _updateAction {
s.arcUpdate(arcMsg.Old, arcMsg.New)
}
// arc insert
if arcMsg.Action == _insertAction {
s.arcInsert(arcMsg.New)
}
}
// distinguishes whether an arc exist
func (s *Service) arcExist(aid int64) bool {
var (
res *ugcmdl.Archive
err error
)
if res, err = s.dao.ParseArc(ctx, aid); err != nil || res == nil {
return false
}
if res.Deleted == _deleted {
return false
}
return true
}
// arcInsert inserts a new databus notified archive
func (s *Service) arcInsert(arc *ugcmdl.ArchDatabus) (err error) {
if exist := s.arcExist(arc.Aid); exist {
appDao.PromError("DsInsert:Exist")
log.Error("Databus Insert Data Aid %d Exist", arc.Aid)
return
}
if err = s.importArc(context.Background(), arc.Aid, false); err != nil {
appDao.PromError("DsInsert:Err")
log.Error("Databus Import Arc %d Error %v", arc.Aid, err)
return
}
appDao.PromInfo("DsInsert:Succ")
return
}
// arcUpdate updates a databus notified archive
func (s *Service) arcUpdate(old *ugcmdl.ArchDatabus, new *ugcmdl.ArchDatabus) (err error) {
if !s.arcExist(new.Aid) { // if an archive is not existing yet in our DB, we insert it
return s.arcInsert(new)
}
new.Cover = s.coverURL(new.Cover, s.c.UgcSync.Cfg.BFSPrefix)
var (
oldAllow = &ugcmdl.ArcAllow{}
newAllow = &ugcmdl.ArcAllow{}
)
oldAllow.FromDatabus(old)
newAllow.FromDatabus(new)
if !oldAllow.CanPlay() && newAllow.CanPlay() { // if an archive is recovered, re-insert it
log.Info("Aid %d is recovered, add it", new.Aid)
return s.arcInsert(new)
}
if oldAllow.CanPlay() && !newAllow.CanPlay() { // if an archive is banned, delete it
log.Info("Aid %d can't play, delete it", new.Aid)
if err = s.dao.UpdateArc(ctx, new); err != nil {
return
}
return s.delArc(new.Aid)
}
// if arc level changed or video level changed, treat and import data
return s.arcCompare(old, new)
}
// arcCompare compares the archive & the videos of the old and the new, to update if needed
func (s *Service) arcCompare(old *ugcmdl.ArchDatabus, new *ugcmdl.ArchDatabus) (err error) {
var (
diff *ugcmdl.VideoDiff
hitPGC bool
)
if hitPGC, err = s.delPGC(new.TypeID, new.Aid); err != nil {
return
}
if hitPGC { // if the archive hits PGC types, delete it
log.Warn("arcCompare Del Aid %d, Because of its typeID %d", new.Aid, new.TypeID)
return
}
if s.diffArc(old, new) { // archive level info update if different
if err = s.dao.UpdateArc(ctx, new); err != nil {
appDao.PromError("DsUpdArc:Err")
return
}
s.modArcCh <- []int64{new.Aid} // add one archive to submit
appDao.PromInfo("DsUpdArc:Succ")
}
// video level info update if different
if diff, err = s.diffVideos(new.Aid); err != nil {
appDao.PromError("DsUpdVideo:Err")
return
}
log.Info("Diff Result For Aid %d, Equal %v, Updated %v, Removed %v, New %v", new.Aid, diff.Equal, diff.Updated, diff.Removed, diff.New)
if err = s.treatDiffV(diff); err != nil {
appDao.PromError("DsUpdVideo:Err")
return
}
appDao.PromInfo("DsUpdVideo:Succ")
return
}
// get first level of types name
func (s *Service) getPTypeName(typeID int32) (name string) {
var (
second, first *arcmdl.Tp
ok bool
)
if second, ok = s.arcTypes[typeID]; !ok {
log.Error("can't find type for ID: %d ", typeID)
return
}
if first, ok = s.arcTypes[second.Pid]; !ok {
log.Error("can't find type for ID: %d, second Info: %v", second, second.Pid)
return
}
return first.Name
}
// getPType first level of types name
func (s *Service) getPType(typeID int) (pid int) {
var (
second *arcmdl.Tp
ok bool
)
if second, ok = s.arcTypes[int32(typeID)]; !ok {
log.Error("can't find type for ID: %d ", typeID)
return
}
return int(second.Pid)
}
// diffArc distinguishes whether the key fields of an archive have been changed
func (s *Service) diffArc(old *ugcmdl.ArchDatabus, new *ugcmdl.ArchDatabus) (diff bool) {
diff = (old.Title != new.Title)
diff = diff || (old.Content != new.Content)
diff = diff || (old.PubTime != new.PubTime)
diff = diff || (old.TypeID != new.TypeID)
diff = diff || (old.Cover != new.Cover)
diff = diff || (s.getPTypeName(old.TypeID) != s.getPTypeName(new.TypeID))
return
}
// diffVideos distinguishes whethe
func (s *Service) diffVideos(aid int64) (diff *ugcmdl.VideoDiff, err error) {
var (
rpcRes *arcmdl.ViewReply
dbRes map[int64]*ugcmdl.SimpleVideo
video *ugcmdl.SimpleVideo
ok bool
)
diff = &ugcmdl.VideoDiff{
Aid: aid,
}
if rpcRes, err = s.videoPick(ctx, aid); err != nil {
log.Error("rpc video pick %d, error %v", aid, err)
return
}
if dbRes, err = s.dao.PickVideos(ctx, aid); err != nil {
log.Error("db video pick %d, error %v", aid, err)
return
}
for _, page := range rpcRes.Pages {
if video, ok = dbRes[page.Cid]; !ok { // not found in DB, means it's new
diff.New = append(diff.New, page.Cid)
continue
}
if video.IndexOrder == int64(page.Page) && video.Eptitle == page.Part { // if title & index_order equal
diff.Equal = append(diff.Equal, page.Cid)
} else { // otherwise it's updated
diff.Updated = append(diff.Updated, page)
}
delete(dbRes, page.Cid)
}
for _, v := range dbRes {
diff.Removed = append(diff.Removed, v.CID)
}
return
}
// treatDiffV treats the result of diffVideos, like we add new ones, we deleted removed ones, and we updated the modified ones
func (s *Service) treatDiffV(diff *ugcmdl.VideoDiff) (err error) {
var (
newPages []*arcmdl.Page
page *arcmdl.Page
aid = diff.Aid
tx *sql.Tx
)
// all the operations about this archive's videos, will be in one transaction
if tx, err = s.dao.BeginTran(ctx); err != nil {
log.Error("BeginTran Error %v", err)
return
}
// add new videos
if len(diff.New) > 0 {
for _, v := range diff.New {
if page, err = s.pagePick(ctx, v, aid, ""); err != nil {
continue
}
newPages = append(newPages, page)
}
if err = s.dao.TxAddVideos(tx, newPages, aid); err != nil {
tx.Rollback()
return
}
}
if len(diff.Removed) > 0 {
for _, v := range diff.Removed {
if err = s.dao.TxDelVideo(tx, v); err != nil {
tx.Rollback()
return
}
}
}
if len(diff.Updated) > 0 {
for _, v := range diff.Updated {
if err = s.dao.TxUpdateVideo(tx, v); err != nil {
tx.Rollback()
return
}
}
}
tx.Commit()
return
}

View File

@@ -0,0 +1,93 @@
package ugc
import (
"time"
appDao "go-common/app/job/main/tv/dao/app"
"go-common/app/job/main/tv/dao/lic"
"go-common/library/database/sql"
"go-common/library/log"
)
func (s *Service) delArcproc() {
defer s.waiter.Done()
for {
if s.daoClosed {
log.Info("delArcproc DB closed!")
return
}
// build the skeleton, arc + video data
cAid, err := s.dao.DeletedArc(ctx)
if err != nil && err != sql.ErrNoRows {
log.Error("DeletedArc Error %v", err)
appDao.PromError("SyncDelAid:Err")
time.Sleep(time.Duration(s.c.UgcSync.Frequency.SyncFre))
continue
}
if err == sql.ErrNoRows || cAid == 0 {
log.Info("SyncDelAid No Data to Sync")
time.Sleep(time.Duration(s.c.UgcSync.Frequency.SyncFre))
continue
}
if err = s.delLic(cAid); err != nil {
appDao.PromError("SyncDelAid:Err")
log.Error("delLic error %v, aid %d", err, cAid)
time.Sleep(time.Duration(s.c.UgcSync.Frequency.SyncFre))
continue
}
appDao.PromInfo("SyncDelAid:Succ")
}
}
// delArcErr: it logs the error and postpone the videos for the next submit
func (s *Service) delArcErr(aid int64, fmt string, err error) {
s.dao.PpDelArc(ctx, aid)
log.Error(fmt, aid, err)
}
// delLic: sync our arc data to License owner
func (s *Service) delLic(cAid int64) (err error) {
var (
xmlBody string
sign = s.c.Sync.Sign
prefix = s.c.Sync.UGCPrefix
)
xmlBody = lic.PrepareXML(lic.DelLic(sign, prefix, cAid))
// call api
if _, err = s.licDao.CallRetry(ctx, s.c.Sync.API.DelSeasonURL, xmlBody); err != nil {
s.delArcErr(cAid, "xml call %d error %v", err)
return
}
// update the arc & videos' submit status to finish
if err = s.dao.FinishDelArc(ctx, cAid); err != nil {
s.delArcErr(cAid, "FinishDelArc %d error %v", err)
}
return
}
func (s *Service) delArc(aid int64) (err error) {
var tx *sql.Tx
// check whether the arc exist in our DB
if !s.arcExist(aid) {
log.Warn("Del Arc %d, it doesn't exist", aid)
return
}
// delete the arc, put submit to 1
if tx, err = s.dao.BeginTran(ctx); err != nil { // begin transaction
return
}
if err = s.dao.TxDelArc(tx, aid); err != nil {
appDao.PromError("DelArc:Err")
tx.Rollback()
return
}
// delete the videos put submit to 1
if err = s.dao.TxDelVideos(tx, aid); err != nil {
appDao.PromError("DelArc:Err")
tx.Rollback()
return
}
appDao.PromInfo("DelArc:Succ")
tx.Commit()
return
}

View File

@@ -0,0 +1,66 @@
package ugc
import (
appDao "go-common/app/job/main/tv/dao/app"
"go-common/app/job/main/tv/dao/lic"
"go-common/library/database/sql"
"go-common/library/log"
"time"
)
func (s *Service) delVideoproc() {
defer s.waiter.Done()
for {
if s.daoClosed {
log.Info("delVideoproc DB closed!")
return
}
// pick deleted videos
videoIDs, err := s.dao.DeletedVideos(ctx)
if err != nil && err != sql.ErrNoRows {
log.Error("videoIDs Error %v", err)
appDao.PromError("SyncDelVid:Err")
time.Sleep(time.Duration(s.c.UgcSync.Frequency.SyncFre))
continue
}
if err == sql.ErrNoRows || len(videoIDs) == 0 {
log.Info("No SyncDelVid Data to Sync")
time.Sleep(time.Duration(s.c.UgcSync.Frequency.SyncFre))
continue
}
if err = s.delVideoLic(videoIDs); err != nil {
appDao.PromError("SyncDelVid:Err")
log.Error("delLic error %v, cids %s", err, videoIDs)
time.Sleep(time.Duration(s.c.UgcSync.Frequency.SyncFre))
continue
}
appDao.PromInfo("SyncDelVid:Succ")
}
}
// delVideoErr: it logs the error and postpone the videos for the next submit
func (s *Service) delVideoErr(cids []int, fmt string, err error) {
s.dao.PpDelVideos(ctx, cids)
log.Error(fmt, cids, err)
}
// delVideoLic: sync our deleted video data to License owner
func (s *Service) delVideoLic(videoIDs []int) (err error) {
var (
xmlBody string
sign = s.c.Sync.Sign
prefix = s.c.Sync.UGCPrefix
)
xmlBody = lic.DelEpLic(prefix, sign, videoIDs)
// call api
if _, err = s.licDao.CallRetry(ctx, s.c.Sync.API.DelEPURL, xmlBody); err != nil {
s.delVideoErr(videoIDs, "xml call %v error %v", err)
return
}
// update the videos' submit status to finish
if err = s.dao.FinishDelVideos(ctx, videoIDs); err != nil {
log.Info("Del Video Finish, Sync For Vids: %v", videoIDs)
s.delVideoErr(videoIDs, "FinishDelVideos %v error %v", err)
}
return
}

View File

@@ -0,0 +1,215 @@
package ugc
import (
"context"
"fmt"
"time"
"go-common/app/job/main/tv/dao/app"
"go-common/app/job/main/tv/model/ugc"
"go-common/library/log"
)
const (
_errSleep = 500 * time.Millisecond
_succSleep = 10 * time.Millisecond
)
func errMid(funcName string, mid int64, err error) {
log.Error("Func:[%s], Step:[%s], Mid:[%d], Err:[%v]", "fullRefresh", funcName, mid, err)
}
func errArcPce(funcName string, mid int64, numPce int, err error) {
log.Error("Func:[%s], Step:[%s], Mid:[%d], NumPce:[%d], Err:[%v]", "fullRefresh-ArcPce", funcName, mid, numPce, err)
}
func infoArc(funcName string, aid int64, msg string) {
log.Info("Func:[%s], Step:[%s], Aid:[%d], Msg:[%s]", "fullRefresh-ArcPce-Arc", funcName, aid, msg)
time.Sleep(_errSleep)
}
func errArc(funcName string, aid int64, err error) {
log.Error("Func:[%s], Step:[%s], Aid:[%d], Err:[%v]", "fullRefresh-ArcPce-Arc", funcName, aid, err)
time.Sleep(_errSleep)
}
func errArcVideos(funcName string, aid int64, cids []int64, err error) {
log.Error("Func:[%s], Step:[%s], Aid:[%d], Cids: [%v], Err:[%v]", "fullRefresh-ArcPce-Arc-Videos", funcName, aid, cids, err)
time.Sleep(_errSleep)
}
func infoArcVideos(funcName string, aid int64, cids []int64, msg string) {
log.Info("Func:[%s], Step:[%s], Aid:[%d], Cids: [%v], Msg:[%s]", "fullRefresh-ArcPce-Arc-Videos", funcName, aid, cids, msg)
time.Sleep(_succSleep)
}
func (s *Service) fullRefreshproc() {
for {
s.fullRefresh()
time.Sleep(time.Duration(s.c.UgcSync.Frequency.FullRefreshFre))
}
}
func (s *Service) fullRefresh() {
var (
fullName = "fullRefresh"
pagesize = s.c.UgcSync.Batch.ProducerPS
begin = time.Now()
totalArcs = 0
treatedUp = 0
totalUp = len(s.activeUps)
)
if totalUp == 0 {
log.Error("[%s] ActiveUps Empty", fullName)
return
}
log.Info("fullRefresh Total Uppers Len %d", totalUp)
for mid := range s.activeUps {
var (
upArcCnt int
err error
)
if upArcCnt, err = s.dao.UpArcsCnt(ctx, int64(mid)); err != nil {
errMid("CountUpArcs", mid, err)
continue
}
if upArcCnt == 0 {
errMid("CountUpArcs", mid, fmt.Errorf("Empty Arcs"))
continue
}
for arcPce := 0; arcPce < app.NumPce(int(upArcCnt), pagesize); arcPce++ { // travel the upper's archive by piece
var upArcs []*ugc.ArcFull
if upArcs, err = s.dao.PickUpArcs(ctx, int(mid), arcPce, pagesize); err != nil {
errArcPce("PickUpArcs", mid, arcPce, err)
continue
}
if len(upArcs) == 0 {
errArcPce("PickUpArcs", mid, arcPce, fmt.Errorf("Empty Arcs, Stop Picking"))
break
}
if err = s.fullArcs(ctx, upArcs); err != nil {
errArcPce("FullArcs", mid, arcPce, err)
}
time.Sleep(time.Duration(s.c.UgcSync.Frequency.FullRefArcFre)) // pause between each archives pce treatment
}
treatedUp = treatedUp + 1
totalArcs = totalArcs + upArcCnt
log.Info("fullRefresh Total Up %d, Treated Up %d, Treated Arcs %d, Time Used %v", totalUp, treatedUp, totalArcs, time.Since(begin))
}
log.Info("fullRefresh Ends! Len Uppers %d, Time Used %v", len(s.activeUps), time.Since(begin))
}
func (s *Service) fullArcs(ctx context.Context, arcs []*ugc.ArcFull) (err error) {
for _, arc := range arcs {
var (
arcOk, actVideos, shouldAudit bool
aid = arc.AID
transCids []int64
arcAllow = &ugc.ArcAllow{}
)
if err = s.dao.SetArcCMS(ctx, &arc.ArcCMS); err != nil { // set cache
errArc("SetArcCMS", aid, err) // cache error, ignore
}
if arc.Deleted == 1 {
if actVideos, err = s.dao.ActVideos(ctx, aid); err != nil {
errArc("actVideos", aid, err) // db error
continue
}
if !actVideos {
infoArc("actVideos", aid, "Arc Deleted && No Active Videos, Jump to the next")
continue
} else {
if err = s.dao.DelVideos(ctx, aid); err != nil { // delete also the videos
errArc("actVideos", aid, err)
continue
}
infoArc("actVideos", aid, "Arc Deleted, So we delete the rest videos")
}
}
arcAllow.FromArcFull(arc)
if arcOk = s.arcAllowImport(arcAllow); !arcOk {
log.Warn("[fullRefresh-ArcPce-Arc]")
continue
}
if arcOk, transCids, err = s.transFailTreat(ctx, aid); err != nil {
errArcVideos("TransFailVideos-DelVideos", aid, transCids, err) // db error
continue
}
if !arcOk {
continue
}
if shouldAudit, err = s.dao.ShouldAudit(ctx, aid); err != nil {
errArc("ShouldAudit", aid, err)
continue
}
if shouldAudit {
log.Info("fullRefresh addAudCid cAid %d", aid)
s.audAidCh <- []int64{aid} // add aid into channel to treat
}
if err = s.refArcVideo(ctx, aid); err != nil {
errArc("refArcVideo", aid, err)
continue
}
time.Sleep(10 * time.Millisecond)
}
return
}
func (s *Service) transFailTreat(ctx context.Context, aid int64) (arcOk bool, failCids []int64, err error) {
arcOk = true
if failCids, err = s.dao.TransFailVideos(ctx, aid); err != nil { // delete transcoding failed cids
errArc("TransFailVideos", aid, err) // db error, stop this archive here
return
}
if len(failCids) == 0 {
// infoArcVideos("TransFailVideos", aid, failCids, "No Fail Cids")
return
}
if arcOk, err = s.dao.DelVideoArc(ctx, &ugc.DelVideos{
AID: aid,
CIDs: failCids,
}); err != nil {
return
}
if !arcOk {
infoArcVideos("TransFailVideos", aid, failCids, " Delete Videos & Arc succ")
return
}
infoArcVideos("TransFailVideos", aid, failCids, " Delete Videos succ")
return
}
func (s *Service) refArcVideo(ctx context.Context, cAid int64) (err error) {
var (
proName = "videoProducer-video"
pagesize = s.c.UgcSync.Batch.ProducerPS
videoCnt int
maxID = 0
)
if videoCnt, err = s.dao.ArcVideoCnt(ctx, cAid); err != nil {
log.Error("[%s] CountArcs Aid %d, error [%v]", proName, cAid, err)
return
}
if videoCnt == 0 {
return
}
nbPiece := app.NumPce(videoCnt, pagesize)
log.Info("[%s] NumPiece %d, Pagesize %d", proName, nbPiece, pagesize)
for i := 0; i < nbPiece; i++ {
videos, newMaxID, errR := s.dao.PickArcVideo(ctx, cAid, maxID, pagesize)
if errR != nil {
log.Error("[%s] Pick Piece %d Error, Ignore it", proName, i)
continue
}
if newMaxID <= maxID {
log.Error("[%s] MaxID is not increasing! [%d,%d]", proName, newMaxID, maxID)
return
}
maxID = newMaxID
for _, v := range videos {
s.dao.SetVideoCMS(ctx, v)
}
time.Sleep(500 * time.Millisecond)
}
return
}

View File

@@ -0,0 +1,219 @@
package ugc
import (
"fmt"
"time"
appDao "go-common/app/job/main/tv/dao/app"
ugcmdl "go-common/app/job/main/tv/model/ugc"
arccli "go-common/app/service/main/archive/api"
arcmdl "go-common/app/service/main/archive/model/archive"
"go-common/library/database/sql"
"go-common/library/log"
)
const (
_arcRetry = 3
_apiRetry = 5
_sleep = 100 * time.Millisecond
)
// upImportproc always runs to init the uppers
func (s *Service) upImportproc() {
var (
err error
uppers []*ugcmdl.Upper
)
defer s.waiter.Done()
for {
if s.daoClosed {
log.Info("upImportproc DB closed!")
return
}
// if no more data, we scan per 30s
if uppers, err = s.dao.Import(ctx); err != nil && err != sql.ErrNoRows {
log.Error("upperImport error %v", err)
appDao.PromError("ImportMid:Err")
time.Sleep(time.Duration(s.c.UgcSync.Frequency.ImportFre))
continue
}
if len(uppers) == 0 && err == sql.ErrNoRows {
log.Info("No Import Data")
time.Sleep(time.Duration(s.c.UgcSync.Frequency.ImportFre))
continue
}
if err = s.upImport(uppers); err != nil {
log.Error("upImport Error %v", err)
appDao.PromError("ImportMid:Err")
time.Sleep(time.Duration(s.c.UgcSync.Frequency.ImportFre))
continue
}
appDao.PromInfo("ImportMid:Succ")
time.Sleep(1 * time.Second)
}
}
// upImport loads 20 uppers to init, and load them one by one
func (s *Service) upImport(uppers []*ugcmdl.Upper) (err error) {
for _, v := range uppers {
// import data
if err = s.InitUpper(v.MID); err != nil {
log.Error("initUpper MID: %v, Err: %v, Postpone the MID", v.MID, err)
s.dao.PpUpper(ctx, v.MID)
continue
}
// update the status
if err = s.dao.FinishUpper(ctx, v.MID); err != nil {
log.Error("FinishUpper Mid: %d, Err: %v", v.MID, err)
return
}
time.Sleep(time.Duration(s.c.UgcSync.Frequency.UpperPause)) // pause after import each upper
}
return
}
// InitUpper takes the upper's archive & videos, load them into our DB
func (s *Service) InitUpper(mid int64) (err error) {
var (
arcCount int
ps = s.c.UgcSync.Batch.ArcPS // page size to pick archives
ptn int // total page number
pMatch map[int64]*arccli.Arc // the mapping of aid to archive model of one page
pAids []int64 // the aids of one page
videoNum int64
begin = time.Now()
)
// count upper's archive and get the total number of pages to get
if arcCount, err = s.arcCount(mid); err != nil {
return
}
log.Info("InitUpper mid %d, Count: %d", mid, arcCount)
if arcCount == 0 {
log.Error("Upper %d Arc Count is 0", mid)
return
}
if arcCount%ps == 0 {
ptn = arcCount / ps
} else {
ptn = arcCount/ps + 1
}
// get the upper's archives page by page
for i := 1; i <= ptn; i++ {
if pMatch, pAids, err = s.UpArchives(mid, i, ps); err != nil {
log.Error("Mid %d, Page %d Error %v", mid, i, err)
return
}
if len(pMatch) == 0 { // which means this page is all existing
log.Error("Mid %d, Page %d, no need to import Due to Types Hit", mid, i)
continue
}
if err = s.dao.FilterExist(ctx, &pMatch, pAids); err != nil { // filter the existing ones
log.Error("Mid %d, Page %d Error %v", mid, i, err)
return
}
if len(pMatch) == 0 { // which means this page is all existing
log.Error("Mid %d, Page %d, no need to impot Due to Existing", mid, i)
continue
}
if err = s.arcsIn(pMatch); err != nil { // insert this page's arc & views data into our DB
log.Error("Mid %d, Page %d Error %v", mid, i, err)
return
}
videoNum = videoNum + int64(len(pMatch))
time.Sleep(time.Duration(s.c.UgcSync.Frequency.UpInitFre)) // pause after import each page of upper's archive
}
log.Info("ImportUpper Mid %d, Page Number %d, Page Size %d, "+
"Video Number %d, Time %v", mid, ptn, ps, videoNum, time.Since(begin)) // record init upper time
return
}
// get map's keys
func mapKeys(myMap map[int64]*arccli.Arc) (keys []int64) {
for k := range myMap {
keys = append(keys, k)
}
return
}
// UpArchives picks one page of the up's archives
func (s *Service) UpArchives(mid int64, pn int, ps int) (match map[int64]*arccli.Arc, aids []int64, err error) {
var res []*arccli.Arc
match = make(map[int64]*arccli.Arc)
if err = Retry(func() (err error) {
if res, err = s.arcRPC.UpArcs3(ctx, &arcmdl.ArgUpArcs2{
Mid: mid,
Pn: pn,
Ps: ps,
}); err != nil {
log.Error("%+v", err)
}
return
}, _arcRetry, _sleep); err != nil {
log.Error("upArchives Error %+v", err)
return
} else if len(res) == 0 {
err = fmt.Errorf("result empty")
return
}
for _, v := range res {
arcAllow := &ugcmdl.ArcAllow{}
arcAllow.FromArcmdl(v)
if allow := s.arcAllowImport(arcAllow); !allow { // check whether the archive is allowed to import into TV db
continue
}
match[v.Aid] = v
aids = append(aids, v.Aid)
}
return
}
// Retry . retry one function until no error
func Retry(callback func() error, retry int, sleep time.Duration) (err error) {
for i := 0; i < retry; i++ {
if err = callback(); err == nil {
return
}
time.Sleep(sleep)
}
return
}
// arcsIn picks one page of archive data and their views data, to import them into the DB one by one
func (s *Service) arcsIn(pMatch map[int64]*arccli.Arc) (err error) {
var (
tx *sql.Tx
pViews map[int64]*arccli.ViewReply
pAids []int64
)
// get the filtered aids to get the views
pAids = mapKeys(pMatch)
if pViews, err = s.arcViews(pAids); err != nil {
log.Error("arcsIn Error %v", err)
return
}
// import the arc & its video one by one
for aid, arc := range pMatch {
// begin the transaction and insert the archive data
if tx, err = s.dao.BeginTran(ctx); err != nil { // begin transaction
return
}
arc.Pic = s.coverURL(arc.Pic, s.c.UgcSync.Cfg.BFSPrefix)
if err = s.dao.TxImportArc(tx, arc); err != nil {
tx.Rollback()
return
}
cViews, ok := pViews[arc.Aid]
if !ok {
log.Error("arcIn View Data for %d not found", arc.Aid)
tx.Rollback()
return
}
if err = s.dao.TxMnlVideos(tx, cViews); err != nil {
tx.Rollback()
return
}
tx.Commit()
log.Info("Succ Add Arc & View for Aid: %d", aid)
}
return
}

View File

@@ -0,0 +1,27 @@
package ugc
import (
"encoding/json"
"fmt"
"testing"
. "github.com/smartystreets/goconvey/convey"
)
func TestService_InitUpper(t *testing.T) {
Convey("TestService_InitUpper", t, WithService(func(s *Service) {
err := s.InitUpper(10920044)
So(err, ShouldBeNil)
}))
}
func TestService_UpArchives(t *testing.T) {
Convey("TestService_UpArchives", t, WithService(func(s *Service) {
pMatch, pAids, err := s.UpArchives(10920044, 1, 100)
So(err, ShouldBeNil)
data, _ := json.Marshal(pMatch)
data2, _ := json.Marshal(pAids)
fmt.Println(string(data))
fmt.Println(string(data2))
}))
}

View File

@@ -0,0 +1,138 @@
package ugc
import (
"context"
"strings"
"time"
appDao "go-common/app/job/main/tv/dao/app"
ugcmdl "go-common/app/job/main/tv/model/ugc"
arccli "go-common/app/service/main/archive/api"
"go-common/library/database/sql"
"go-common/library/log"
)
func (s *Service) manualproc() {
var (
err error
arcs []*ugcmdl.Archive
)
defer s.waiter.Done()
for {
if s.daoClosed {
log.Info("manualproc DB closed!")
return
}
if arcs, err = s.dao.Manual(ctx); err != nil && err != sql.ErrNoRows {
log.Error("manualproc Error %v", err)
appDao.PromError("Manual:Err")
time.Sleep(time.Duration(s.c.UgcSync.Frequency.ManualFre))
continue
}
if err == sql.ErrNoRows || len(arcs) == 0 {
log.Info("No Manual Data")
time.Sleep(time.Duration(s.c.UgcSync.Frequency.ManualFre))
continue
}
if err = s.manual(arcs); err != nil {
log.Error("manualproc Error %v", err)
appDao.PromError("Manual:Err")
time.Sleep(time.Duration(s.c.UgcSync.Frequency.ManualFre))
continue
}
appDao.PromInfo("Manual:Succ")
time.Sleep(1 * time.Second)
}
}
func (s *Service) manual(arcs []*ugcmdl.Archive) (err error) {
for _, v := range arcs { // locate each archive
if err = s.importArc(context.Background(), v.AID, true); err != nil {
log.Error("importArc Error %v", err)
s.dao.Ppmnl(ctx, v.AID) // postpone the next retry
continue
}
}
return
}
//coverURL is used for completing url start with bfs,example: /bfs/archive/diuren.png
func (s *Service) coverURL(uri string, prefix string) string {
if strings.HasPrefix(uri, "/bfs") {
return prefix + uri
}
return uri
}
// importArc imports an brand new archive data, isManual means whether we need update the arc's status
func (s *Service) importArc(ctx context.Context, aid int64, isManual bool) (err error) {
var (
tx *sql.Tx
arcGrpc *arccli.Arc
view *arccli.ViewReply
arcAllow = &ugcmdl.ArcAllow{}
arc = &ugcmdl.Archive{}
)
if arcGrpc, err = s.arcPick(ctx, aid); err != nil { // pick archive api data
return
}
arcAllow.FromArcReply(arcGrpc)
if allow := s.arcAllowImport(arcAllow); !allow { // check whether the archive is allowed to import into TV db
if isManual {
err = s.delArc(aid)
}
return
}
arc.FromArcReply(arcGrpc)
arc.Cover = s.coverURL(arc.Cover, s.c.UgcSync.Cfg.BFSPrefix)
if view, err = s.videoPick(ctx, aid); err != nil { // pick video api data
return
}
if tx, err = s.dao.BeginTran(ctx); err != nil { // begin transaction
return
}
if isManual {
if err = s.dao.TxMnlArc(tx, arc); err != nil { // manual import archive data, update
tx.Rollback()
return
}
if arc.MID != 0 {
s.manualUp(ctx, arc.MID)
}
} else {
if err = s.dao.TxAutoArc(tx, arc); err != nil { // databus import archive data, insert
tx.Rollback()
return
}
}
if err = s.dao.TxMnlVideos(tx, view); err != nil { // import video data
tx.Rollback()
return
}
if isManual {
if err = s.dao.TxMnlStatus(tx, aid); err != nil { // update the manual to 0, finish the operation
tx.Rollback()
return
}
}
log.Info("ImportArc Aid %d Succ", aid)
tx.Commit()
return
}
// manualUp imports the manual submit archive's upper
func (s *Service) manualUp(ctx context.Context, mid int64) {
if cmid, _ := s.dao.UpInList(ctx, mid); cmid != 0 { // mid already in list, no need to import
return
}
upRPC, err := s.upDao.Card3(ctx, mid)
if err != nil { // load remote upper data
log.Warn("[manualUp] Card3 Mid %d, Err %d", mid, err)
return
}
s.upDao.ImportUp(ctx, &ugcmdl.EasyUp{ // import this upper into DB and cache
MID: mid,
Face: upRPC.Face,
Name: upRPC.Name,
})
}

View File

@@ -0,0 +1,84 @@
package ugc
import (
"encoding/json"
appDao "go-common/app/job/main/tv/dao/app"
ugcmdl "go-common/app/job/main/tv/model/ugc"
"go-common/library/log"
xtime "go-common/library/time"
)
// arcDatabus refreshes the mc cache for archive media info
func (s *Service) arcDatabus(jsonstr json.RawMessage) (err error) {
var (
arc = &ugcmdl.DatabusArc{}
pubtime int64
)
if err = json.Unmarshal(jsonstr, arc); err != nil {
log.Error("json.Unmarshal(%s) error(%v)", jsonstr, err)
return
}
arcMark := arc.New
if pubtime, err = appDao.TimeTrans(arcMark.Pubtime); err != nil {
log.Warn("arcDatabus Pubtime AVID: %d, Err %v", arcMark.AID, err)
}
// we prepare the cms cache
if err = s.dao.SetArcCMS(ctx, &ugcmdl.ArcCMS{
// Media Info
Title: arcMark.Title,
AID: arcMark.AID,
Content: arcMark.Content,
Cover: arcMark.Cover,
TypeID: arcMark.TypeID,
Pubtime: xtime.Time(pubtime),
Videos: arcMark.Videos,
Valid: arcMark.Valid,
Deleted: arcMark.Deleted,
Result: arcMark.Result,
}); err != nil {
log.Error("arcDatabus setArcCMS AVID: %d, Err %v", arcMark.AID, err)
}
// we prepare the rpc cache for the ugc view page if the archive is able to play
if arcMark.IsPass() {
s.viewCache(int64(arcMark.AID))
appDao.PromInfo("ArcRPC-AddCache")
}
s.listMtn(arc.Old, arc.New)
return
}
// videoDatabus refreshes the mc cache for video media info
func (s *Service) videoDatabus(jsonstr json.RawMessage) (err error) {
var (
video = &ugcmdl.DatabusVideo{}
criCID = s.c.UgcSync.Cfg.CriticalCid
)
if err = json.Unmarshal(jsonstr, video); err != nil {
log.Error("json.Unmarshal(%s) error(%v)", jsonstr, err)
return
}
vm := video.New
if vm.ToReport(criCID) { // if the video has not been reported yet, we do it and update the mark field from 0 to 1
s.repCidCh <- vm.CID
}
if vm.ToAudit(criCID) {
log.Info("videoDatabus addAudCid cAid %d", vm.AID)
s.audAidCh <- []int64{vm.AID} // add aid into channel to treat
}
if video.Old == nil { // if the brand new episode can play
if vm.CanPlay() {
log.Info("videoDatabus reshelfAid cAid %d", vm.AID)
s.reshelfAidCh <- vm.AID
}
} else { // or it couldn't play and it passes now
if !video.Old.CanPlay() && vm.CanPlay() {
log.Info("videoDatabus reshelfAid cAid %d", vm.AID)
s.reshelfAidCh <- vm.AID
}
}
if err = s.dao.SetVideoCMS(ctx, vm.ToCMS()); err != nil { // we prepare the cms cache
log.Warn("videoDatabus setVideoCMS CID: %d, Err %v", vm.CID, err)
}
return
}

View File

@@ -0,0 +1,125 @@
package ugc
import (
"context"
"fmt"
ugcmdl "go-common/app/job/main/tv/model/ugc"
arccli "go-common/app/service/main/archive/api"
arcmdl "go-common/app/service/main/archive/model/archive"
"go-common/library/ecode"
"go-common/library/log"
)
// VideoApi calls the rpc of video, pick videos of an archive
func (s *Service) videoPick(c context.Context, aid int64) (resp *arccli.ViewReply, err error) {
if err = Retry(func() (err error) {
if resp, err = s.arcClient.View(c, &arccli.ViewRequest{
Aid: aid,
}); err != nil {
log.Error("ArcRPC For Aid: %d, Error: %v", aid, err)
}
return
}, _arcRetry, _sleep); err != nil {
log.Error("upArchives Error %+v", err)
return
}
return
}
// arcAllowImport tells whether the archive is allowed to import into TV database
func (s *Service) arcAllowImport(arc *ugcmdl.ArcAllow) (allowed bool) {
if !arc.CanPlay() {
log.Warn("arcAllowImport Aid %d Not allowed Due to State %d", arc.Aid, arc.State)
return
}
if arc.Ugcpay == arcmdl.AttrYes {
log.Warn("arcAllowImport Aid %d Not allowed Due to Ugcpay %d", arc.Aid, arc.Ugcpay)
return
}
if s.hitPGC(arc.Typeid) {
log.Warn("arcAllowImport Aid %d Not allowed Due to HitPGC %d", arc.Aid, arc.Typeid)
return
}
if !arc.IsOrigin() {
log.Warn("arcAllowImport Aid %d Not allowed Due to Not Origin", arc.Aid, arc.Copyright)
}
allowed = true
return
}
// Archive calls the api of Archive, pick the archive data
func (s *Service) arcPick(c context.Context, aid int64) (arc *arccli.Arc, err error) {
var arcReply *arccli.ArcReply
for i := 0; i < _arcRetry; i++ {
if arcReply, err = s.arcClient.Arc(c, &arccli.ArcRequest{Aid: aid}); err == nil {
break
}
}
if err != nil {
log.Error("upArchives Aid %d Error %v", aid, err)
return
}
if arcReply == nil || arcReply.Arc == nil {
err = ecode.NothingFound
return
}
arc = arcReply.Arc
return
}
// ArcCount counts the mid's archive, pick the number
func (s *Service) arcCount(mid int64) (count int, err error) {
if err = Retry(func() (err error) {
if count, err = s.arcRPC.UpCount2(ctx, &arcmdl.ArgUpCount2{
Mid: mid,
}); err != nil {
log.Error("ArcCount For Mid: %d, Error: %v", mid, err)
}
return
}, _arcRetry, _sleep); err != nil {
log.Error("upArchives Error %+v", err)
}
return
}
// arcViews picks the views of the given page of aids
func (s *Service) arcViews(aids []int64) (res map[int64]*arccli.ViewReply, err error) {
var resp *arccli.ViewsReply
if err = Retry(func() (err error) {
if resp, err = s.arcClient.Views(ctx, &arccli.ViewsRequest{
Aids: aids,
}); err != nil {
log.Error("%+v", err)
}
return
}, _arcRetry, _sleep); err != nil {
log.Error("upArchives Error %+v", err)
return
} else if len(resp.Views) == 0 {
err = fmt.Errorf("result empty")
return
}
res = resp.Views
return
}
// VideoApi calls the rpc of video, pick videos of an archive
func (s *Service) pagePick(c context.Context, cid int64, aid int64, ip string) (res *arccli.Page, err error) {
if err = Retry(func() (err error) {
if res, err = s.arcRPC.Video3(c, &arcmdl.ArgVideo2{
Aid: aid,
Cid: cid,
RealIP: ip,
}); err != nil {
log.Error("ArcRPC For Aid: %d, Cid: %d, Error: %v", aid, cid, err)
}
return
}, _arcRetry, _sleep); err != nil {
log.Error("upArchives Error %+v", err)
return
} else if res == nil {
err = fmt.Errorf("result empty")
}
return
}

View File

@@ -0,0 +1,167 @@
package ugc
import (
"encoding/json"
"time"
model "go-common/app/job/main/tv/model/pgc"
ugcmdl "go-common/app/job/main/tv/model/ugc"
"go-common/library/log"
"go-common/library/queue/databus"
)
func (s *Service) repCidproc() {
defer s.waiter.Done()
var toRepCids []int64
for {
cid, ok := <-s.repCidCh
if !ok {
log.Warn("[repCidproc] channel quit")
return
}
toRepCids = append(toRepCids, cid)
if len(toRepCids) < s.c.UgcSync.Batch.ReportCidPS { // not enough cid, stay waiting
time.Sleep(5 * time.Second)
continue
}
goCids := make([]int64, len(toRepCids))
copy(goCids, toRepCids)
toRepCids = []int64{}
if err := s.reportCids(goCids); err != nil {
log.Error("reportCids Cids %v, Err %v", goCids, err)
continue
}
}
}
func (s *Service) audCidproc() {
defer s.waiter.Done()
var (
toAudAids = make(map[int64]int)
ps = s.c.UgcSync.Batch.ReportCidPS
)
for {
aids, ok := <-s.audAidCh
if !ok {
log.Warn("[audCidproc] channel quit")
return
}
for _, aid := range aids {
toAudAids[aid] = 1 // use map to remove duplicated aids
}
if len(toAudAids) < ps { // not enough cid, stay waiting
time.Sleep(3 * time.Second)
continue
}
distinctAIDs := pickKeys(toAudAids)
toAudAids = make(map[int64]int)
if err := s.wrapSyncLic(ctx, distinctAIDs); err != nil {
log.Error("audCidproc Aids %v, Err %v", distinctAIDs, err)
continue
}
log.Info("audCidproc Apply %d Aids: %v", len(aids), distinctAIDs)
}
}
func (s *Service) reshelfArcproc() {
defer s.waiter.Done()
var (
reshelfAids = make(map[int64]int)
ps = s.c.UgcSync.Batch.ReshelfPS
)
for {
aid, ok := <-s.reshelfAidCh
if !ok {
log.Warn("[reshelfAid] channel quit")
return
}
reshelfAids[aid] = 1 // use map to remove duplicated aids
if len(reshelfAids) < ps { // not enough cid, stay waiting
time.Sleep(3 * time.Second)
continue
}
distinctAIDs := pickKeys(reshelfAids)
reshelfAids = make(map[int64]int)
if offAids, err := s.cmsDao.OffArcs(ctx, distinctAIDs); err != nil {
log.Error("reshelfAid OffArcs Aids %v, Err %v", distinctAIDs, err)
continue
} else if len(offAids) == 0 {
log.Warn("reshelfAid OffArcs Origin Aids %v, after filter it's empty", distinctAIDs)
continue
} else {
if err = s.cmsDao.ReshelfArcs(ctx, offAids); err != nil {
log.Error("reshelfAid OffAids %v, ReshelfArcs Err %v", offAids, err)
continue
}
log.Info("reshelfAid Apply %d Aids: %v", len(offAids), offAids)
}
}
}
func (s *Service) reportCids(cids []int64) (err error) {
var cidReq []*ugcmdl.CidReq
for _, v := range cids {
cidReq = append(cidReq, &ugcmdl.CidReq{CID: v})
}
for i := 0; i < _apiRetry; i++ {
if err = s.dao.RepCidBatch(ctx, cidReq); err == nil {
break
}
}
if err != nil { // 3 times still error
log.Error("ReportCid Cids %v Err %v", cids, err)
return
}
err = s.dao.FinishReport(ctx, cids)
log.Info("ReportCids %v, Len %d, Succ!", cids, len(cids))
return
}
// consume Databus message; beause daily modification is not many, so use simple loop
func (s *Service) consumeVideo() {
defer s.waiter.Done()
for {
msg, ok := <-s.ugcSub.Messages()
if !ok {
log.Info("databus: tv-job video consumer exit!")
return
}
msg.Commit()
time.Sleep(1 * time.Millisecond)
Loop:
for {
select {
case s.consumerLimit <- struct{}{}: // would block if already 2 goroutines:
go s.UgcDbus(msg)
break Loop
default:
log.Warn("consumeVideo thread Full!!!")
time.Sleep(1 * time.Second)
}
}
}
}
// UgcDbus def.
func (s *Service) UgcDbus(msg *databus.Message) {
m := &model.DatabusRes{}
log.Info("[consumeVideo] New Message: %s", msg)
if err := json.Unmarshal(msg.Value, m); err != nil {
log.Error("json.Unmarshal(%s) error(%v)", msg.Value, err)
<-s.consumerLimit // clean the space for new consumer to begin
return
}
if m.Action == "delete" {
log.Info("[consumeVideo] Video Deletion, We ignore:<%v>,<%v>", m, msg.Value)
<-s.consumerLimit // clean the space for new consumer to begin
return
}
if m.Table == "ugc_video" {
s.videoDatabus(msg.Value)
} else if m.Table == "ugc_archive" {
s.arcDatabus(msg.Value)
} else {
log.Error("[consumeVideo] Wrong Table Name: ", m.Table)
}
<-s.consumerLimit // clean the space for new consumer to begin
}

View File

@@ -0,0 +1,85 @@
package ugc
import (
"encoding/json"
"os"
"time"
model "go-common/app/job/main/tv/model/pgc"
"go-common/library/log"
)
const (
errFormat = "Func:[%s] - Step:[%s] - Error:[%v]"
//ContLimit is used for getting pgc season value 50 records every time
_ContLimit = 50
)
func (s *Service) seaUgcContproc() {
for {
if s.daoClosed {
log.Info("seaUgcContproc DB closed!")
return
}
s.seaUgcCont()
time.Sleep(time.Duration(s.c.Search.Cfg.UploadFre))
}
}
// seaUgcCont is used for generate search content content
func (s *Service) seaUgcCont() {
var (
archives []*model.SearUgcCon
str []byte // the json string to write in file
cycle int //cycle count and every cycle is 50 records
maxID = 0
cnt int
file *os.File
err error
)
if cnt, err = s.dao.UgcCnt(ctx); err != nil {
log.Error(errFormat, "searUgcCont", "UgcCnt", err)
return
}
cycle = cnt / _ContLimit
if cnt%_ContLimit != 0 {
cycle = cnt/_ContLimit + 1
}
// write into the file
if file, err = os.OpenFile(s.c.Search.UgcContPath, os.O_RDWR|os.O_CREATE|os.O_TRUNC, 0766); err != nil {
log.Error(errFormat, "searUgcCont", "OpenFile", err)
return
}
//cycle get sql value
for i := 0; i < cycle; i++ {
if archives, maxID, err = s.dao.UgcCont(ctx, maxID, _ContLimit); err != nil {
log.Error(errFormat, "searUgcCont", "UgcCont", err)
return
}
for _, v := range archives {
v.Typeid = s.getPType(v.Typeid)
if str, err = json.Marshal(v); err != nil {
log.Error(errFormat, "searUgcCont", "JsonMarshal", err)
return
}
file.WriteString(string(str) + "\n")
}
}
file.Close()
//calculate file's md5
if err = s.ftpDao.FileMd5(s.c.Search.UgcContPath, s.c.Search.UgcContMd5Path); err != nil {
log.Error(errFormat, "searUgcCont", "fileMd5", err)
return
}
// upload original file
if err = s.ftpDao.UploadFile(s.c.Search.UgcContPath, s.c.Search.FTP.RemoteUgcCont, s.c.Search.FTP.RemoteUgcURL); err != nil {
log.Error(errFormat, "searUgcCont-File", "uploadFile", err)
return
}
// upload md5 file
if err = s.ftpDao.UploadFile(s.c.Search.UgcContMd5Path, s.c.Search.FTP.RemoteUgcContMd5, s.c.Search.FTP.RemoteUgcURL); err != nil {
log.Error(errFormat, "searUgcCont-Md5", "uploadFile", err)
return
}
log.Info("FTP Upload Success")
}

View File

@@ -0,0 +1,13 @@
package ugc
import (
"testing"
. "github.com/smartystreets/goconvey/convey"
)
func TestService_SearUgcCon(t *testing.T) {
Convey("search season content count", t, WithService(func(s *Service) {
s.seaUgcCont()
}))
}

View File

@@ -0,0 +1,92 @@
package ugc
import (
"context"
"time"
"go-common/app/job/main/tv/dao/lic"
model "go-common/app/job/main/tv/model/pgc"
ugcmdl "go-common/app/job/main/tv/model/ugc"
"go-common/library/log"
)
// syncLic: sync our arc data to License owner
func (s *Service) modArcproc() (err error) {
defer s.waiter.Done()
var cid int64
for {
cAids, ok := <-s.modArcCh
if !ok {
log.Warn("[modLic] channel quit")
return
}
for _, cAid := range cAids {
if cid, err = s.dao.VideoSubmit(ctx, cAid); err != nil {
log.Warn("modArc Aid %d, Err %v, Jump", cAid, err)
time.Sleep(time.Duration(s.c.UgcSync.Frequency.ErrorWait))
continue
}
log.Info("modArc Aid %d, Can submit because CID %d already submitted", cAid, cid)
if err = s.modArc(ctx, cAid); err != nil {
log.Warn("modArc Aid %d Err %v", cAid, err)
continue
}
}
}
}
func (s *Service) modArc(ctx context.Context, cAid int64) (err error) {
var (
skeleton = &ugcmdl.LicSke{}
licData *model.License
xmlBody string
arc *ugcmdl.Archive
)
if arc, err = s.dao.ParseArc(ctx, cAid); err != nil {
log.Warn("ParseArc Aid %d not found", cAid)
}
skeleton.Arc = arc.ToSimple()
skeleton.Videos = []*ugcmdl.SimpleVideo{} // empty videos
// build the license data and transform to xml
if licData, err = s.auditMsg(skeleton); err != nil {
log.Error("build lic msg %d error %v", cAid, err)
return
}
xmlBody = lic.PrepareXML(licData)
// call api
if _, err = s.licDao.CallRetry(ctx, s.c.Sync.API.AddURL, xmlBody); err != nil {
log.Error("xml call %d error %v", cAid, err)
return
}
// update the arc & videos' submit status to finish
if err = s.dao.FinishArc(ctx, cAid); err != nil {
log.Error("finishArc %d Error %v", cAid, err)
}
return
}
// wrapSyncLic warps the syncLic method with aidMap
func (s *Service) wrapSyncLic(ctx context.Context, aids []int64) (err error) {
var arc *ugcmdl.Archive
for _, cAid := range aids {
if arc, err = s.dao.ParseArc(ctx, cAid); err != nil {
log.Warn("wrapSyncLic ParseArc Aid %d not found", cAid)
continue
}
arcAllow := &ugcmdl.ArcAllow{}
arcAllow.FromArchive(arc)
if !s.arcAllowImport(arcAllow) {
log.Warn("wrapSyncLic cAid %d Can't play", cAid)
continue
}
if arc.Deleted == 1 {
log.Warn("wrapSyncLic cAid %d Deleted", cAid)
continue
}
if err = s.syncLic(cAid, arc.ToSimple()); err != nil {
log.Error("wrapSyncLic cAid %d Err %v", cAid, err)
continue
}
}
return
}

View File

@@ -0,0 +1,190 @@
package ugc
import (
"fmt"
"go-common/app/job/main/tv/dao/lic"
model "go-common/app/job/main/tv/model/pgc"
ugcmdl "go-common/app/job/main/tv/model/ugc"
"go-common/library/ecode"
"go-common/library/log"
)
const (
_crEnd = "1970-01-01" // copyright end date
_definition = "SD"
)
// syncVideoErr: it logs the error and postpone the videos for the next submit
func (s *Service) syncVideoErr(funcName string, cids []int64, aid int64, err error) {
s.dao.PpVideos(ctx, cids)
errArcVideos("syncLic:"+funcName, aid, cids, err)
}
// syncLic: sync our arc data to License owner
func (s *Service) syncLic(cAid int64, arc *ugcmdl.SimpleArc) (err error) {
var (
skeleton = &ugcmdl.LicSke{}
videoPces [][]*ugcmdl.SimpleVideo
ps = s.c.UgcSync.Batch.SyncPS // sync page size
licData *model.License
xmlBody string
errCall error
)
skeleton.Arc = arc
if videoPces, err = s.dao.ParseVideos(ctx, cAid, ps); err != nil {
log.Error("ParseVideos %d Error %v", cAid, err)
return
}
if len(videoPces) == 0 { // no to audit cids
return
}
for _, videos := range videoPces {
skeleton.Videos = videos
var cids = []int64{}
for _, v := range videos {
cids = append(cids, v.CID)
}
if licData, errCall = s.auditMsg(skeleton); errCall != nil { // build the license data and transform to xml
s.syncVideoErr("AuditMsg ", cids, cAid, errCall)
continue
}
xmlBody = lic.PrepareXML(licData)
if _, errCall = s.licDao.CallRetry(ctx, s.c.Sync.API.AddURL, xmlBody); errCall != nil { // call api
s.syncVideoErr("XmlCall ", cids, cAid, errCall)
continue
}
if errCall = s.dao.FinishVideos(ctx, skeleton.Videos, cAid); errCall != nil { // update the arc & videos' submit status to finish
s.syncVideoErr("FinishVideos ", cids, cAid, errCall)
continue
}
infoArcVideos("syncLic", cAid, cids, "Succ Apply For Audit")
}
return
}
// auditMsg transforms a skeleton to license audit message struct for UGC
func (s *Service) auditMsg(skeleton *ugcmdl.LicSke) (licData *model.License, err error) {
var (
programSets []*model.PS
programs []*model.Program
sign = s.c.Sync.Sign
)
if len(skeleton.Videos) > 0 {
if programs, err = s.videoProgram(skeleton.Arc.AID, skeleton.Videos); err != nil {
log.Error("auditMsg videoProgram Aid %d, Err %v", skeleton.Arc.AID, err)
return
}
}
if programSets, err = s.arcPSet(skeleton.Arc, programs); err != nil {
log.Error("arcPSet Error %v", err)
return
}
licData = lic.BuildLic(sign, programSets, len(programs))
return
}
// videoProgram transforms the videos to license defined program models
func (s *Service) videoProgram(aid int64, videos []*ugcmdl.SimpleVideo) (programs []*model.Program, err error) {
var (
ugcPrefix = s.c.Sync.UGCPrefix
deadCIDs = []int64{}
arcValid bool
)
for _, v := range videos {
playurl, hitDead, errCall := s.playurlDao.Playurl(ctx, int(v.CID))
if errCall != nil {
log.Error("Playurl CID %d, Error %v", v.CID, errCall)
continue
}
if hitDead { // hit playurl dead codes
deadCIDs = append(deadCIDs, v.CID)
continue
}
media := model.MakePMedia(ugcPrefix, playurl, v.CID)
program := &model.Program{
ProgramID: fmt.Sprintf("%s%d", ugcPrefix, v.CID),
ProgramName: v.Eptitle,
ProgramLength: int(v.Duration),
ProgramDesc: v.Description,
PublishDate: _crEnd,
Number: fmt.Sprintf("%d", v.IndexOrder),
DefinitionType: "SD",
ProgramMediaList: &model.PMList{
ProgramMedia: []*model.PMedia{
media,
},
},
}
programs = append(programs, program)
}
if len(deadCIDs) > 0 { // treat deadCIDs, delete them
if arcValid, err = s.dao.DelVideoArc(ctx, &ugcmdl.DelVideos{
AID: aid,
CIDs: deadCIDs,
}); err != nil {
log.Error("VideoProgram DelVideos Aid %d, Cids %v, Err %v", aid, deadCIDs, err)
return
}
if !arcValid {
log.Info("VideoProgram DelVideos Aid %d is empty, delete it also", aid)
err = ecode.NothingFound
return
}
if len(deadCIDs) == len(videos) {
log.Info("VideoProgram Passed Videos Aid %d are dead, Cids %v", aid, deadCIDs)
err = ecode.NothingFound
return
}
log.Info("VideoProgram DelVideos Aid %d, Playurl DeadCids %v", aid, deadCIDs)
}
return
}
// arcPSet transforms an archive model to a license programSet model
func (s *Service) arcPSet(arc *ugcmdl.SimpleArc, programs []*model.Program) (ps []*model.PS, err error) {
var (
secondType string
firstType string
copyright = s.c.UgcSync.Cfg.Copyright
upper *ugcmdl.Upper
)
// get second type name
if tp, ok := s.arcTypes[arc.TypeID]; !ok {
log.Error("For Aid %d, Can't find Second TypeID %d Name", arc.AID, arc.TypeID)
} else {
secondType = tp.Name
}
// get first type name
firstType = s.getPTypeName(arc.TypeID)
// build the programSet structure
var program = &model.PS{
ProgramSetID: fmt.Sprintf("%s%d", s.c.Sync.UGCPrefix, arc.AID),
ProgramSetName: arc.Title,
ProgramSetClass: secondType,
ProgramSetType: firstType,
PublishDate: arc.Pubtime,
Copyright: copyright,
ProgramCount: int(arc.Videos),
CREndData: _crEnd,
DefinitionType: _definition,
CpCode: s.c.Sync.LConf.CPCode,
PayStatus: 0,
ProgramSetDesc: arc.Content,
ProgramSetPoster: arc.Cover,
ProgramList: &model.ProgramList{
Program: programs,
},
}
// upper info
if upper, err = s.upDao.LoadUpMeta(ctx, arc.MID); err != nil { // get upper meta info
log.Error("modLic LoadUpMeta Aid %d, Mid %d, Err %v", arc.AID, arc.MID, err)
err = nil
}
if upper != nil {
program.Producer = upper.OriName
program.Portrait = upper.OriFace
}
ps = append(ps, program)
return
}

View File

@@ -0,0 +1,21 @@
package ugc
import (
"testing"
"go-common/app/job/main/tv/model/ugc"
. "github.com/smartystreets/goconvey/convey"
)
func TestService_SyncLic(t *testing.T) {
Convey("TestService_SyncLic", t, WithService(func(s *Service) {
err := s.syncLic(10099174, &ugc.SimpleArc{
AID: 10099174,
Title: "test",
Duration: 400,
Cover: "testCover",
})
So(err, ShouldBeNil)
}))
}

View File

@@ -0,0 +1,46 @@
package ugc
import (
appDao "go-common/app/job/main/tv/dao/app"
arccli "go-common/app/service/main/archive/api"
"go-common/library/log"
)
// call ArcRPC for types data
func (s *Service) loadTypes() {
var (
resp *arccli.TypesReply
err error
)
if resp, err = s.arcClient.Types(ctx, &arccli.NoArgRequest{}); err != nil {
log.Error("arcRPC loadType Error %v", err)
return
}
s.arcTypes = resp.Types
}
func (s *Service) hitPGC(tid int32) (hit bool) {
_, hit = s.pgcTypes[s.getPTypeName(tid)]
return
}
func (s *Service) delPGC(tid int32, aid int64) (hit bool, err error) {
if hit = s.hitPGC(tid); !hit { // if not hit, do nothing
appDao.PromInfo("HitPGC:FdSucc")
return
}
log.Info("delPGC Aid %d, Tid %d", aid, tid)
appDao.PromInfo("HitPGC:DelSucc")
if err = s.delArc(aid); err != nil { // if hit, delete it if exist
appDao.PromInfo("HitPGC:DelErr")
log.Error("HitPGC DelArc %d, Err %v", aid, err)
}
return
}
func pickKeys(q map[int64]int) (res []int64) {
for k := range q {
res = append(res, k)
}
return
}

View File

@@ -0,0 +1,164 @@
package ugc
import (
"context"
"strconv"
"sync"
"go-common/app/job/main/tv/conf"
appDao "go-common/app/job/main/tv/dao/app"
arcdao "go-common/app/job/main/tv/dao/archive"
"go-common/app/job/main/tv/dao/cms"
"go-common/app/job/main/tv/dao/ftp"
"go-common/app/job/main/tv/dao/lic"
playdao "go-common/app/job/main/tv/dao/playurl"
ugcdao "go-common/app/job/main/tv/dao/ugc"
updao "go-common/app/job/main/tv/dao/upper"
arccli "go-common/app/service/main/archive/api"
archive "go-common/app/service/main/archive/api/gorpc"
"go-common/library/log"
"go-common/library/queue/databus"
"github.com/robfig/cron"
)
const _chanSize = 10240
var ctx = context.TODO()
// Service is show service.
type Service struct {
c *conf.Config
// dao
dao *ugcdao.Dao
playurlDao *playdao.Dao
licDao *lic.Dao
ftpDao *ftp.Dao
appDao *appDao.Dao
arcDao *arcdao.Dao
upDao *updao.Dao
cmsDao *cms.Dao
// logic
daoClosed bool
// waiter
waiter *sync.WaitGroup
consumerLimit chan struct{}
// rpc
arcClient arccli.ArchiveClient
arcRPC *archive.Service2
// databus
archiveNotifySub *databus.Databus
ugcSub *databus.Databus
// memory data
ugcTypesRel map[int32]*conf.UgcType
ugcTypesCat map[int32]int32
arcTypes map[int32]*arccli.Tp // map for arc types
pgcTypes map[string]int // filter pgc types data
activeUps map[int64]int // store all the trusted uppers
// cron
cron *cron.Cron
// channels
modArcCh, audAidCh chan []int64
repCidCh, reshelfAidCh chan int64
}
// New inits the ugc service
func New(c *conf.Config) (s *Service) {
s = &Service{
c: c,
dao: ugcdao.New(c),
playurlDao: playdao.New(c),
licDao: lic.New(c),
ftpDao: ftp.New(c),
arcDao: arcdao.New(c),
appDao: appDao.New(c),
upDao: updao.New(c),
cmsDao: cms.New(c),
waiter: new(sync.WaitGroup),
archiveNotifySub: databus.New(c.ArchiveNotifySub),
ugcSub: databus.New(c.UgcSub),
cron: cron.New(),
arcTypes: make(map[int32]*arccli.Tp),
ugcTypesRel: make(map[int32]*conf.UgcType),
pgcTypes: make(map[string]int),
activeUps: make(map[int64]int),
consumerLimit: make(chan struct{}, c.UgcSync.Cfg.ThreadLimit),
modArcCh: make(chan []int64, _chanSize),
audAidCh: make(chan []int64, _chanSize),
reshelfAidCh: make(chan int64, _chanSize),
repCidCh: make(chan int64, _chanSize),
arcRPC: archive.New2(c.ArchiveRPC),
}
// transform cfg to map, in order to filter pgc types archive
var err error
if s.arcClient, err = arccli.NewClient(c.ArcClient); err != nil {
panic(err)
}
for _, v := range s.c.Cfg.PgcTypes {
s.pgcTypes[v] = 1
}
for k, v := range s.c.Cfg.UgcZones { // transform cfg map
s.ugcTypesRel[atoi(k)] = v
}
if err := s.cron.AddFunc(s.c.Redis.CronUGC, s.ZoneIdx); err != nil { // load Zone Idx & types
panic(err)
}
if err := s.cron.AddFunc(s.c.UgcSync.Frequency.TypesCron, s.loadTypes); err != nil {
panic(err)
}
s.cron.Start()
s.loadTypes() // load types
s.loadTids() // load ugc idx relationship
s.refreshUp(ctx, false) // init upper list
go s.ZoneIdx()
s.waiter.Add(1)
go s.syncUpproc() // sync modified uppers' info to license owner
go s.refreshUpproc() // refresh upper info
s.waiter.Add(1)
go s.manualproc() // manual import videos
s.waiter.Add(1)
go s.upImportproc() // import upper history data
go s.fullRefreshproc() // full refrsh video data
s.waiter.Add(1)
go s.modArcproc() // sync modified archive data
s.waiter.Add(1)
go s.delArcproc() // sync deleted archive data
s.waiter.Add(1)
go s.delVideoproc() // sync deleted video data
s.waiter.Add(1)
go s.delUpproc() // treat deleted uppers
go s.seaUgcContproc() // uploads ugc search content to sug's FTP
s.waiter.Add(1)
go s.arcConsumeproc() // archive Notify-T databus
s.waiter.Add(1)
go s.consumeVideo() // consume video databus, report cid info
s.waiter.Add(1)
go s.repCidproc() // consume channel and report cid to playurl
s.waiter.Add(1)
go s.audCidproc() // consume audit aid data
s.waiter.Add(1)
go s.reshelfArcproc() // reshelf the cms invalid arcs when they have at least one video that can play now
return
}
// Close close the services
func (s *Service) Close() {
if s.dao != nil {
s.daoClosed = true
log.Info("Dao Closed!")
}
log.Info("Close ArcNotifySub!")
s.archiveNotifySub.Close()
log.Info("Close ugcSub!")
s.ugcSub.Close()
log.Info("Wait Sync!")
s.waiter.Wait()
log.Info("DB Closed Physically!")
s.dao.DB.Close()
}
// transform string to int
func atoi(number string) int32 {
res, _ := strconv.Atoi(number)
return int32(res)
}

View File

@@ -0,0 +1,44 @@
package ugc
import (
"flag"
"path/filepath"
"testing"
"time"
"encoding/json"
"go-common/app/job/main/tv/conf"
"go-common/app/job/main/tv/model/ugc"
. "github.com/smartystreets/goconvey/convey"
)
var (
srv *Service
)
func init() {
dir, _ := filepath.Abs("../../cmd/tv-job-test.toml")
flag.Set("conf", dir)
conf.Init()
srv = New(conf.Conf)
time.Sleep(time.Second)
}
func WithService(f func(s *Service)) func() {
return func() {
f(srv)
}
}
func TestService_ArcHandle(t *testing.T) {
Convey("TestService_ArcHandle", t, WithService(func(s *Service) {
msg := []byte(`{"action":"update","table":"archive","new":{"id":0,"aid":10110186,"mid":27515615,"typeid":76,"videos":1,"title":"xxx","cover":"http://i0.hdslb.com/bfs/archive/b5beb958f94f5deb6d3ba8775c4e81d2cf0f4bc1.jpg","content":"万年不变小电视","duration":10,"attribute":2113536,"copyright":1,"access":0,"pubtime":"2018-06-12 19:40:23","ctime":"2018-06-12 19:40:28","mtime":"",
"state":0,"mission_id":0,"order_id":0,"redirect_url":"","forward":0,"dynamic":""},"old":{"id":0,"aid":10110186,"mid":27515615,"typeid":76,"videos":1,"title":"XXXX","cover":"http://i0.hdslb.com/bfs/archive/b5beb958f94f5deb6d3ba8775c4e81d2cf0f4bc1.jpg","content":"万年不变小电视","duration":10,"attribute":2113536,"copyright":1,"access":0,"pubtime":"2018-06-12 19:40:23","ctime":"2018-06-12 19:40:28","mtime":"",
"state":0,"mission_id":0,"order_id":0,"redirect_url":"","forward":0,"dynamic":""}}`)
var ms = &ugc.ArcMsg{}
err := json.Unmarshal(msg, ms)
s.ArcHandle(ms)
So(err, ShouldBeNil)
}))
}

View File

@@ -0,0 +1,252 @@
package ugc
import (
"context"
"time"
appDao "go-common/app/job/main/tv/dao/app"
ugcMdl "go-common/app/job/main/tv/model/ugc"
account "go-common/app/service/main/account/model"
"go-common/library/database/sql"
"go-common/library/log"
)
const (
_arcPiece = 20
_upName = 1
_upFace = 2
)
// refreshUpproc refreshes the upper info regularly
func (s *Service) refreshUpproc() {
var c = context.Background()
for {
time.Sleep(time.Duration(s.c.UgcSync.Frequency.UpperRefresh))
s.refreshUp(c, true)
log.Info("RefreshUpper Succ")
}
}
// refreshUp picks all the upper in our DB and compare them with RPC result to refresh if different
func (s *Service) refreshUp(ctx context.Context, refresh bool) {
var (
count int64
nbPiece int
err error
maxID int64
activeUps = make(map[int64]int)
)
if count, err = s.upDao.CountUP(ctx); err != nil {
log.Error("[refreshUpper] CountUP error [%v]", err)
return
}
nbPiece = appDao.NumPce(int(count), _arcPiece)
log.Info("[refreshUpper] Numero Piece %d", nbPiece)
for i := 0; i < nbPiece; i++ {
res, newMaxID, err := s.upDao.PickUppers(ctx, maxID, _arcPiece)
if err != nil {
log.Error("[refreshUpper] Pick Piece %d Error, Ignore it", i)
continue
}
if newMaxID <= maxID {
log.Error("MaxID is not increasing! [%d,%d]", newMaxID, maxID)
return
}
maxID = newMaxID
for _, v := range res { // travel the 50 mids
activeUps[v] = 1
if refresh {
s.upCheck(ctx, v)
}
}
}
if len(activeUps) > 0 {
s.activeUps = activeUps
log.Info("[refreshUpper] ActiveUps Len %d", len(activeUps))
}
}
// upCheck checks the upper remote and local, modify if different
func (s *Service) upCheck(ctx context.Context, v int64) (err error) {
var (
upCMS *ugcMdl.Upper
upRPC *account.Card
)
if upCMS, err = s.upDao.LoadUpMeta(ctx, v); err != nil { // load local upper data
log.Warn("[refreshUpper] LoadUpMeta Mid %d, Err %d", v, err)
return
}
if upRPC, err = s.upDao.Card3(ctx, v); err != nil { // load remote upper data
log.Warn("[refreshUpper] Card3 Mid %d, Err %d", v, err)
return
}
if err = s.upModify(ctx, upRPC, upCMS); err != nil {
log.Warn("[refreshUpper] upModify Mid %d, Err %d", v, err)
}
return
}
// upModify checks whether the upper's info has been modified or not, if yes, refresh it in DB and MC
func (s *Service) upModify(ctx context.Context, upRPC *account.Card, upCMS *ugcMdl.Upper) (err error) {
fs, ns := upCMS.IsSame(upRPC.Name, upRPC.Face)
if fs && ns { // compare, if same, just jump to the next upper
return
}
if !fs {
log.Info("Mid %d Face Modified, Old: %s, New %s", upRPC.Mid, upCMS.OriFace, upRPC.Face)
req := &ugcMdl.ReqSetUp{
Value: upRPC.Face,
MID: upRPC.Mid,
UpType: _upFace,
}
if err = s.upDao.RefreshUp(ctx, req); err != nil {
log.Error("RefreshUp Req %v, Err %v", req, err)
return
}
}
if !ns {
log.Info("Mid %d Name Modified, Old: %s, New %s", upRPC.Mid, upCMS.OriName, upRPC.Name)
req := &ugcMdl.ReqSetUp{
Value: upRPC.Name,
MID: upRPC.Mid,
UpType: _upName,
}
if err = s.upDao.RefreshUp(ctx, req); err != nil {
log.Error("RefreshUp Req %v, Err %v", req, err)
return
}
}
err = s.upDao.SendUpper(ctx, upRPC.Mid) // only update once if both face and name are modified
return
}
// delete Upper treatment proc
func (s *Service) delUpproc() {
defer s.waiter.Done()
for {
if s.daoClosed {
log.Info("delUpproc DB closed!")
return
}
// build the skeleton, arc + video data
cMid, err := s.dao.DeletedUp(ctx)
if err != nil && err != sql.ErrNoRows {
log.Error("DeletedUp Error %v", err)
appDao.PromError("DelUp:Err")
time.Sleep(time.Duration(s.c.UgcSync.Frequency.SyncFre))
continue
}
if err == sql.ErrNoRows || cMid == 0 {
log.Info("SyncDelAid No Data to Sync")
time.Sleep(time.Duration(s.c.UgcSync.Frequency.SyncFre))
continue
}
if err = s.delUp(cMid); err != nil {
appDao.PromError("DelUp:Err")
log.Error("delUp error %v, mid %d", err, cMid)
time.Sleep(time.Duration(s.c.UgcSync.Frequency.SyncFre))
continue
}
appDao.PromInfo("DelUp:Succ")
}
}
// delete Upper treatment
func (s *Service) delUp(mid int64) (err error) {
var (
aids []int64
count int64
page int
)
if count, err = s.dao.CountUpArcs(ctx, mid); err != nil {
s.delUpErr(mid, "CountUpArcs Mid %d, Err %v", err)
return
}
page = appDao.NumPce(int(count), _arcPiece)
log.Info("Ready to Remove Mid %d Arcs, Count: %d, Page: %d", mid, count, page)
for i := 0; i < page; i++ {
log.Info("Operating DelUp Mid %d Page %d", mid, i+1)
if aids, err = s.dao.UpArcs(ctx, mid); err != nil && err != sql.ErrNoRows {
return
}
if err == sql.ErrNoRows || len(aids) == 0 { // means end of the loup
break
}
for _, v := range aids {
if err = s.delArc(v); err != nil {
s.delUpErr(mid, "delArc Mid %d, Err %v", err)
return
}
}
}
// change the upper's status to tell it's finish
if err = s.dao.FinishDelUp(ctx, mid); err != nil {
s.delUpErr(mid, "delArc Mid %d, Err %v", err)
return
}
log.Info("DelUp Mid %d Succ, Count %d, Page %d", mid, count, page)
return
}
// delUpErr: it logs the error and postpone the videos for the next submit
func (s *Service) delUpErr(mid int64, fmt string, err error) {
s.dao.PpDelUp(ctx, mid)
log.Error(fmt, mid, err)
}
// syncUpproc picks all the submit=1 uppers and updates their archives' submit to 1
func (s *Service) syncUpproc() {
defer s.waiter.Done()
for {
if s.daoClosed {
log.Info("syncUpproc DB closed!")
return
}
mids, err := s.upDao.TosyncUps(ctx) // pick to sync mids
if err != nil {
log.Error("syncUpproc Error %v", err)
time.Sleep(time.Duration(s.c.UgcSync.Frequency.SyncFre))
continue
}
if len(mids) == 0 {
log.Info("No Upper to Sync")
time.Sleep(time.Duration(s.c.UgcSync.Frequency.SyncFre))
continue
}
log.Info("syncUpproc treats %d uppers", len(mids))
for _, v := range mids { // updates these uppers' archives' submit to 1
if err = s.submitUps(ctx, v); err != nil {
log.Error("syncUpproc updates Archive error %v, mid %d", err, v)
continue
}
if err = s.upDao.FinsyncUps(ctx, v); err != nil {
log.Error("syncUpproc finish upper error %v, mid %d", err, v)
}
}
}
}
func (s *Service) submitUps(ctx context.Context, mid int64) (err error) {
var (
count int64
)
if count, err = s.dao.CountUpArcs(ctx, mid); err != nil {
s.delUpErr(mid, "CountUpArcs Mid %d, Err %v", err)
return
}
page := appDao.NumPce(int(count), _arcPiece)
log.Info("Ready to Submit Mid %d Arcs, Count: %d, Page: %d", mid, count, page)
for i := 0; i < page; i++ {
log.Info("Operating DelUp Mid %d Page %d", mid, i+1)
var aids []int64
if aids, err = s.dao.UpArcs(ctx, mid); err != nil && err != sql.ErrNoRows {
return
}
if err == sql.ErrNoRows || len(aids) == 0 { // means end of the loup
break
}
s.modArcCh <- aids // put them into the channel
time.Sleep(50 * time.Millisecond)
}
return
}

View File

@@ -0,0 +1,39 @@
package ugc
import (
"context"
arccli "go-common/app/service/main/archive/api"
"go-common/library/ecode"
"go-common/library/log"
)
// viewCache distinguishes the archive's license status,
// if it's ok, we call it's RPC and save the result in MC cache to accelerate the view page loading
func (s *Service) viewCache(aid int64) {
if aid == 0 {
return
}
var (
c = context.Background()
err error
arg = &arccli.ViewRequest{Aid: aid}
v *arccli.ViewReply
)
if v, err = s.arcClient.View(c, arg); err != nil {
if ecode.Cause(err) == ecode.NothingFound {
log.Warn("s.arcRPC.View3(%v) error(%v)", arg, err)
err = nil
return
}
log.Error("s.arcRPC.View3(%v) error(%v)", arg, err)
return
}
if err = s.arcDao.UpArcCache(c, v.Arc); err != nil {
log.Error("viewCache %+v", err)
return
}
if s.arcDao.UpViewCache(c, v); err != nil {
log.Error("viewCache %+v", err)
}
}

View File

@@ -0,0 +1,86 @@
package ugc
import (
"context"
"go-common/app/job/main/tv/model/ugc"
"go-common/library/log"
)
// ZoneIdx finds out all the passed seasons in DB and then arrange them in a sorted set in Redis
func (s *Service) ZoneIdx() {
var ctx = context.TODO()
for catID, rel := range s.ugcTypesRel {
var (
firstTid = rel.TID
totalTids = []int64{}
)
for _, v := range s.arcTypes {
if v.Pid == firstTid {
totalTids = append(totalTids, int64(v.ID)) // add second level types
}
}
totalTids = append(totalTids, int64(firstTid)) // add first level type
IdxRanks, err := s.dao.PassedArcs(ctx, totalTids)
if err != nil {
log.Error("UgcZoneIdx - PassedArc TID %d Error %v", firstTid, err)
continue
}
if err = s.appDao.Flush(ctx, int(catID), IdxRanks); err != nil {
log.Error("UgcZoneIdx - Flush CatID %d Error %v", catID, err)
continue
}
}
}
// loadTids loads the relation between typeIDs and category
func (s *Service) loadTids() {
var newTids = make(map[int32]int32)
for catID, rel := range s.ugcTypesRel {
firstTid := rel.TID
for _, v := range s.arcTypes {
if v.Pid == firstTid {
newTids[v.ID] = catID
}
}
newTids[firstTid] = catID
}
if len(newTids) > 0 {
s.ugcTypesCat = newTids
}
}
// listMtn maintains the list of zone index
func (s *Service) listMtn(old *ugc.MarkArc, new *ugc.MarkArc) (err error) {
if old == nil {
log.Info("ListMtn Old is Nil, NewSn is %v", new)
old = &ugc.MarkArc{}
}
if old.IsPass() && new.IsPass() && old.TypeID == new.TypeID { // no need to take action
return
}
if !old.IsPass() && !new.IsPass() { // no need to take action
return
}
if old.TypeID != 0 { // means old is not null
if oldCat, ok := s.ugcTypesCat[old.TypeID]; ok { // if old one is in our list, remove it firstly
if err = s.appDao.ZRemIdx(ctx, int(oldCat), old.AID); err != nil {
log.Error("listMtn - ZRemIdx - Category: %d, Arc: %d, Error: %v", oldCat, old.AID, err)
return
}
}
}
catID, ok := s.ugcTypesCat[new.TypeID]
if !ok {
log.Warn("TypeID %d Is Not our target, ignore", new.TypeID)
return
}
if new.IsPass() { // passed now
if err = s.appDao.ZAddIdx(ctx, int(catID), new.Ctime, new.AID); err != nil {
log.Error("listMtn - ZAddIdx - Category: %d, Arc: %d, Error: %v", catID, new.AID, err)
return
}
log.Info("Add Aid %d Into Zone %d", new.AID, catID)
}
return
}