Create & Init Project...

This commit is contained in:
2019-04-22 18:49:16 +08:00
commit fc4fa37393
25440 changed files with 4054998 additions and 0 deletions

View File

@@ -0,0 +1,75 @@
package(default_visibility = ["//visibility:public"])
load(
"@io_bazel_rules_go//go:def.bzl",
"go_test",
"go_library",
)
go_test(
name = "go_default_test",
srcs = [
"sea_con_test.go",
"service_test.go",
"sync_mc_test.go",
"zone_index_test.go",
],
embed = [":go_default_library"],
rundir = ".",
tags = ["automanaged"],
deps = [
"//app/job/main/tv/conf:go_default_library",
"//vendor/github.com/smartystreets/goconvey/convey:go_default_library",
],
)
go_library(
name = "go_default_library",
srcs = [
"del_cont.go",
"del_season.go",
"filter.go",
"license.go",
"sea_con.go",
"search_sug.go",
"service.go",
"sync_ep.go",
"sync_mc.go",
"sync_retry.go",
"sync_season.go",
"zone_index.go",
],
importpath = "go-common/app/job/main/tv/service/pgc",
tags = ["automanaged"],
visibility = ["//visibility:public"],
deps = [
"//app/job/main/tv/conf:go_default_library",
"//app/job/main/tv/dao/app:go_default_library",
"//app/job/main/tv/dao/cms:go_default_library",
"//app/job/main/tv/dao/ftp:go_default_library",
"//app/job/main/tv/dao/lic:go_default_library",
"//app/job/main/tv/dao/playurl:go_default_library",
"//app/job/main/tv/model/common:go_default_library",
"//app/job/main/tv/model/pgc:go_default_library",
"//library/ecode:go_default_library",
"//library/log:go_default_library",
"//library/queue/databus:go_default_library",
"//library/time:go_default_library",
"//library/xstr:go_default_library",
"//vendor/github.com/robfig/cron:go_default_library",
],
)
filegroup(
name = "package-srcs",
srcs = glob(["**"]),
tags = ["automanaged"],
visibility = ["//visibility:private"],
)
filegroup(
name = "all-srcs",
srcs = [":package-srcs"],
tags = ["automanaged"],
visibility = ["//visibility:public"],
)

View File

@@ -0,0 +1,59 @@
package pgc
import (
"database/sql"
"time"
"go-common/app/job/main/tv/dao/lic"
model "go-common/app/job/main/tv/model/pgc"
"go-common/library/log"
)
// sync the deleted EP data to the license owner
func (s *Service) delCont() {
var (
sign = s.c.Sync.Sign
prefix = s.c.Sync.AuditPrefix
)
defer s.waiter.Done()
for {
if s.daoClosed {
log.Info("delCont DB closed!")
return
}
// pick data
delCont, err := s.dao.DelCont(ctx)
if err == sql.ErrNoRows || len(delCont) == 0 {
log.Info("No deleted data to pick from Cont to sync")
time.Sleep(time.Duration(s.c.Sync.Frequency.FreModSeason))
continue
}
delEpids := []int{}
for _, v := range delCont {
delEpids = append(delEpids, v.EPID)
}
s.dao.DelaySync(ctx, delCont) // avoid always be stuck by one error data
body := lic.DelEpLic(prefix, sign, delEpids)
// call API
var res *model.Document
res, err = s.licDao.CallRetry(ctx, s.c.Sync.API.DelEPURL, body)
// 3 times still error
if err != nil {
log.Error("DelEPURL interface not available! %v", err)
time.Sleep(time.Duration(s.c.Sync.Frequency.ErrorWait))
continue
}
// update the state
if err == nil && res != nil {
for _, v := range delCont {
_, err := s.dao.SyncCont(ctx, v.EPID)
if err != nil {
log.Error("SyncCont EP %v to auditing fail!", v.ID)
continue
}
}
}
// break after each loop
time.Sleep(1 * time.Second)
}
}

View File

@@ -0,0 +1,54 @@
package pgc
import (
"database/sql"
"time"
"go-common/app/job/main/tv/dao/lic"
"go-common/library/log"
)
// sync the deleted season data to the license owner
func (s *Service) delSeason() {
var (
sign = s.c.Sync.Sign
prefix = s.c.Sync.AuditPrefix
)
defer s.waiter.Done()
for {
if s.daoClosed {
log.Info("delSeason DB closed!")
return
}
delSeason, err := s.dao.DelSeason(ctx)
if err == sql.ErrNoRows || len(delSeason) == 0 {
log.Info("No deleted data to pick from Season to sync")
time.Sleep(time.Duration(s.c.Sync.Frequency.FreModSeason))
continue
}
for _, v := range delSeason {
data := lic.DelLic(sign, prefix, v.ID)
// ignore the program part during modified season sync
body := lic.PrepareXML(data)
res, err := s.licDao.CallRetry(ctx, s.c.Sync.API.DelSeasonURL, body)
// 3 times still error
if err != nil {
log.Error("DelSeasonURL interface not available!Sid: %v, Err: %v", v.ID, err)
s.dao.DelaySeason(ctx, v.ID)
time.Sleep(time.Duration(s.c.Sync.Frequency.ErrorWait))
// avoid always be stuck by one error data
break
}
if err == nil && res != nil {
_, err := s.dao.RejectSeason(ctx, int(v.ID))
if err != nil {
log.Error("DelSeasonSync season %v to rejected fail!", v.ID)
// sync next one
continue
}
}
}
// break after each loop
time.Sleep(1 * time.Second)
}
}

View File

@@ -0,0 +1,91 @@
package pgc
import (
"context"
"fmt"
"strings"
model "go-common/app/job/main/tv/model/pgc"
"go-common/library/log"
"go-common/library/xstr"
)
// batchFilter picks a batch of seasonCMS data, to define their newest ep and update the struct
func (s *Service) batchFilter(ctx context.Context, snCMS []*model.SeasonCMS) {
if len(snCMS) == 0 {
return
}
for _, v := range snCMS {
if newest, err := s.newestNB(v.SeasonID); err != nil || newest == 0 {
continue
} else {
v.NewestNb = newest
}
}
}
// newestNB picks all the eps of the season and do the title fitler to calculate the newest episode
func (s *Service) newestNB(sid int) (newest int, err error) {
var (
keywords = s.c.Cfg.TitleFilter
strategy = s.c.Cfg.LessStrategy
)
eps, err := s.dao.AllEP(ctx, sid, strategy)
if err != nil {
log.Warn("AllEP newestNB SeasonID %d, Err %v", sid, err)
return
}
for _, v := range eps {
if titleCheck(keywords, v.Title) {
continue
}
newest++
}
if newest == 0 {
log.Warn("AllEP newestNB SeasonID %d, After Filter it's empty", sid)
}
return
}
// titleCheck checks whether the title matches some forbidden keywords
func titleCheck(keywords []string, title string) bool {
for _, v := range keywords {
if strings.Contains(title, v) {
return true
}
}
return false
}
func (s *Service) cmsShelve() {
var (
ctx = context.Background()
cfg = s.c.Cfg.Merak
validMap map[int64]int
onIDs, offIDs []int64
err error
)
if validMap, err = s.cmsDao.ValidSns(ctx, cfg.Onlyfree); err != nil {
log.Error("cmsShelve ValidSns Err %v", err)
return
}
if onIDs, offIDs, err = s.cmsDao.ShelveOp(ctx, validMap); err != nil {
log.Error("cmsShelve ShelveOp err %v", err)
return
}
if len(onIDs) > 0 {
if err = s.cmsDao.ActOps(ctx, onIDs, true); err != nil {
log.Error("cmsShelve ActOps OnIDs %v, Err %v", onIDs, err)
}
}
if len(offIDs) > 0 {
if err = s.cmsDao.ActOps(ctx, offIDs, false); err != nil {
log.Error("cmsShelve ActOps OffIDs %v, Err %v", offIDs, err)
}
}
log.Info("cmsShelve OnIDs %v, OffIDs %v", onIDs, offIDs)
content := fmt.Sprintf(cfg.Template, xstr.JoinInts(onIDs), xstr.JoinInts(offIDs))
if err = s.cmsDao.MerakNotify(ctx, cfg.Title, content); err != nil {
log.Error("Merak Content %s, Err %v", content, err)
}
}

View File

@@ -0,0 +1,68 @@
package pgc
import (
"fmt"
"strconv"
"go-common/app/job/main/tv/conf"
"go-common/app/job/main/tv/dao/lic"
model "go-common/app/job/main/tv/model/pgc"
)
var categories = map[int8]string{
1: "番剧",
2: "电影",
3: "纪录片",
4: "国漫",
5: "电视剧",
}
var zones = map[int64]string{
1: "中国",
2: "日本",
}
const _zoneNotFound = "其他"
// newLic create the skeleton of the license struct
func newLic(Season *model.TVEpSeason, conf *conf.Sync) *model.License {
// one license stryct oer season
var (
ps []*model.PS
sign = conf.Sign
area string
ok bool
)
if areaInt, _ := strconv.ParseInt(Season.Area, 10, 64); areaInt != 0 { //compatible with old version ( area was int )
if area, ok = zones[areaInt]; !ok {
area = _zoneNotFound
}
} else { // new logic, directly transform
area = Season.Area
}
var programS = &model.PS{
ProgramSetID: conf.AuditPrefix + fmt.Sprintf("%d", Season.ID),
ProgramSetName: Season.Title,
ProgramSetClass: Season.Style,
ProgramSetType: categories[Season.Category],
PublishDate: Season.PlayTime.Time().Format("2006-01-02"),
Copyright: Season.Copyright,
ProgramCount: int(Season.TotalNum),
CREndData: "1970-01-01",
DefinitionType: "SD",
CpCode: conf.LConf.CPCode,
PayStatus: Season.Status,
PrimitiveName: Season.OriginName,
Alias: Season.Alias,
Zone: area,
LeadingRole: Season.Role,
ProgramSetDesc: Season.Desc,
Staff: Season.Staff,
ProgramSetPoster: Season.Cover,
ProgramList: &model.ProgramList{},
Producer: Season.Producer,
SubGenre: Season.Version,
}
ps = append(ps, programS)
return lic.BuildLic(sign, ps, 0)
}

View File

@@ -0,0 +1,88 @@
package pgc
import (
"encoding/json"
"os"
"time"
model "go-common/app/job/main/tv/model/pgc"
"go-common/library/log"
)
const (
//ContLimit is used for getting ugc value 50 records every time
_ContLimit = 50
)
func (s *Service) seaPgcContproc() {
for {
if s.daoClosed {
log.Info("seaPgcContproc DB closed!")
return
}
s.seaPgcCont()
time.Sleep(time.Duration(s.c.Search.Cfg.UploadFre))
}
}
// seaPgcCont is used for generate search content content
func (s *Service) seaPgcCont() {
var (
err error
seasons []*model.SearPgcCon
str []byte // the json string to write in file
cnt int
cycle int //cycle count and every cycle is 50 records
id int
)
if cnt, err = s.dao.PgcContCount(ctx); err != nil {
log.Error(errFormat, "searchCont", "OnlineSeasonsC", err)
return
}
cycle = cnt / _ContLimit
if cnt%_ContLimit != 0 {
cycle = cnt/_ContLimit + 1
}
// write into the file
file, error := os.OpenFile(s.c.Search.PgcContPath, os.O_RDWR|os.O_CREATE|os.O_TRUNC, 0766)
if error != nil {
log.Error(errFormat, "searchSug", "OpenFile", err)
return
}
//cycle get sql value
for i := 0; i < cycle; i++ {
if i == 0 {
id = 0
} else {
id = seasons[len(seasons)-1].ID
}
if seasons, err = s.dao.PgcCont(ctx, id, _ContLimit); err != nil {
log.Error(errFormat, "PgcCont", "PgcCont", err)
return
}
for _, v := range seasons {
if str, err = json.Marshal(v); err != nil {
log.Error(errFormat, "searchSug", "JsonMarshal", err)
return
}
file.WriteString(string(str) + "\n")
}
}
file.Close()
//calculate file's md5
if err = s.ftpDao.FileMd5(s.c.Search.PgcContPath, s.c.Search.PgcContMd5Path); err != nil {
log.Error(errFormat, "searPgcCont", "fileMd5", err)
return
}
// upload original file
if err = s.ftpDao.UploadFile(s.c.Search.PgcContPath, s.c.Search.FTP.RemotePgcCont, s.c.Search.FTP.RemotePgcURL); err != nil {
log.Error(errFormat, "searPgcCont-File", "uploadFile", err)
return
}
// upload md5 file
if err = s.ftpDao.UploadFile(s.c.Search.PgcContMd5Path, s.c.Search.FTP.RemotePgcContMd5, s.c.Search.FTP.RemotePgcURL); err != nil {
log.Error(errFormat, "searPgcCont-Md5", "uploadFile", err)
return
}
log.Info("FTP Upload Success")
}

View File

@@ -0,0 +1,13 @@
package pgc
import (
"testing"
. "github.com/smartystreets/goconvey/convey"
)
func TestService_SearPgcCon(t *testing.T) {
Convey("search season content count", t, WithService(func(s *Service) {
s.seaPgcCont()
}))
}

View File

@@ -0,0 +1,101 @@
package pgc
import (
"encoding/json"
"os"
"time"
model "go-common/app/job/main/tv/model/pgc"
"go-common/library/log"
)
const (
errFormat = "Func:[%s] - Step:[%s] - Error:[%v]"
)
func (s *Service) pgcSeaSug(f *os.File) (err error) {
var (
str []byte // the json string to write in file
sug []*model.SearchSug
)
if sug, err = s.dao.PgcSeaSug(ctx); err != nil {
log.Error(errFormat, "searchSug", "PgcSeaSug", err)
return
}
for _, v := range sug {
if str, err = json.Marshal(v); err != nil {
log.Error(errFormat, "searchSug", "JsonMarshal", err)
return
}
f.WriteString(string(str) + "\n")
}
return
}
func (s *Service) ugcSeaSug(f *os.File) (err error) {
var (
str []byte // the json string to write in file
sug []*model.SearchSug
)
if sug, err = s.dao.UgcSeaSug(ctx); err != nil {
log.Error(errFormat, "ugcSeaSug", "UgcSeaSug", err)
return
}
for _, v := range sug {
if str, err = json.Marshal(v); err != nil {
log.Error(errFormat, "ugcSeaSug", "JsonMarshal", err)
return
}
f.WriteString(string(str) + "\n")
}
return
}
func (s *Service) searchSugproc() {
for {
if s.daoClosed {
log.Info("searchSugproc DB closed!")
return
}
s.searchSug()
time.Sleep(time.Duration(s.c.Search.Cfg.UploadFre))
}
}
// generate the valid seasons file for search suggestion
func (s *Service) searchSug() {
// write into the file
file, err := os.OpenFile(s.c.Search.SugPath, os.O_RDWR|os.O_CREATE|os.O_TRUNC, 0766)
if err != nil {
log.Error(errFormat, "searchSug", "OpenFile", err)
return
}
if err := s.pgcSeaSug(file); err != nil {
log.Error(errFormat, "searchSug", "OpenFile", err)
return
}
//if switch is on then onpen ugc search suggest
if s.c.Search.UgcSwitch == "on" {
if err := s.ugcSeaSug(file); err != nil {
log.Error(errFormat, "searchSug", "OpenFile", err)
return
}
}
file.Close()
// calculate file's md5
if err := s.ftpDao.FileMd5(s.c.Search.SugPath, s.c.Search.Md5Path); err != nil {
log.Error(errFormat, "searchSug", "fileMd5", err)
return
}
// upload original file
if err := s.ftpDao.UploadFile(s.c.Search.SugPath, s.c.Search.FTP.RemoteFName, s.c.Search.FTP.URL); err != nil {
log.Error(errFormat, "searchSug-File", "uploadFile", err)
return
}
//upload md5 file
if err := s.ftpDao.UploadFile(s.c.Search.Md5Path, s.c.Search.FTP.RemoteMd5, s.c.Search.FTP.URL); err != nil {
log.Error(errFormat, "searchSug-Md5", "uploadFile", err)
return
}
log.Error("FTP Upload Success")
}

View File

@@ -0,0 +1,109 @@
package pgc
import (
"context"
"math/rand"
"sync"
"time"
"go-common/app/job/main/tv/conf"
"go-common/app/job/main/tv/dao/app"
"go-common/app/job/main/tv/dao/cms"
"go-common/app/job/main/tv/dao/lic"
playdao "go-common/app/job/main/tv/dao/playurl"
model "go-common/app/job/main/tv/model/pgc"
"go-common/library/log"
"go-common/library/queue/databus"
"go-common/app/job/main/tv/dao/ftp"
"github.com/robfig/cron"
)
var ctx = context.Background()
// Service struct of service.
type Service struct {
dao *app.Dao
daoClosed bool // logic close the dao's DB
playurlDao *playdao.Dao
licDao *lic.Dao
ftpDao *ftp.Dao
cmsDao *cms.Dao
c *conf.Config
waiter *sync.WaitGroup // general waiter
waiterConsumer *sync.WaitGroup
contentSub *databus.Databus // consumer for state change
cron *cron.Cron
ResuEps []*model.Content
ResuSns []*model.TVEpSeason
resuRetry map[string]int
}
// New create service instance and return.
func New(c *conf.Config) (s *Service) {
s = &Service{
c: c,
dao: app.New(c),
playurlDao: playdao.New(c),
licDao: lic.New(c),
ftpDao: ftp.New(c),
cmsDao: cms.New(c),
daoClosed: false,
waiter: new(sync.WaitGroup),
waiterConsumer: new(sync.WaitGroup),
contentSub: databus.New(c.ContentSub),
cron: cron.New(),
resuRetry: make(map[string]int),
}
rand.Seed(time.Now().UnixNano())
// flush Redis - zone list
go s.ZoneIdx()
if err := s.cron.AddFunc(s.c.Redis.CronPGC, s.ZoneIdx); err != nil {
panic(err)
}
if err := s.cron.AddFunc(s.c.PlayControl.ProducerCron, s.refreshCache); err != nil {
panic(err)
}
if err := s.cron.AddFunc(s.c.Cfg.Merak.Cron, s.cmsShelve); err != nil {
panic(err)
}
s.cron.Start()
go s.searchSugproc() // uploads the passed season's list to search sug's FTP
go s.seaPgcContproc() // uploads pgc search content to sug's FTP
s.waiter.Add(1)
go s.syncEPs()
s.waiter.Add(1)
go s.resubEps()
s.waiter.Add(1)
go s.resubSns()
s.waiter.Add(1)
go s.syncSeason()
s.waiter.Add(1)
go s.delSeason()
s.waiter.Add(1)
go s.delCont()
// Databus
s.waiterConsumer.Add(1)
go s.consumeContent() // consume Databus Message to update MC
return
}
// Close dao.
func (s *Service) Close() {
if s.dao != nil {
s.daoClosed = true
log.Info("Dao Closed!")
}
log.Info("Crontab Closed!")
s.cron.Stop()
log.Info("Databus Closed!")
s.contentSub.Close()
log.Info("Wait Producer!")
s.waiter.Wait()
log.Info("Wait SyncMC Consumers")
s.waiterConsumer.Wait()
log.Info("Physical Dao Closed!")
s.dao.Close()
log.Info("tv-job has been closed.")
}

View File

@@ -0,0 +1,36 @@
package pgc
import (
"flag"
"path/filepath"
"testing"
"time"
"go-common/app/job/main/tv/conf"
. "github.com/smartystreets/goconvey/convey"
)
var (
srv *Service
)
func init() {
dir, _ := filepath.Abs("../../cmd/tv-job-test.toml")
flag.Set("conf", dir)
conf.Init()
srv = New(conf.Conf)
time.Sleep(time.Second)
}
func WithService(f func(s *Service)) func() {
return func() {
f(srv)
}
}
func TestService_SearchSug(t *testing.T) {
Convey("TestService_SearchSug", t, WithService(func(s *Service) {
s.searchSug()
}))
}

View File

@@ -0,0 +1,98 @@
package pgc
import (
"context"
"time"
"go-common/app/job/main/tv/dao/lic"
model "go-common/app/job/main/tv/model/pgc"
"go-common/library/ecode"
"go-common/library/log"
)
// pick the data from DB to audit and combine the XML for the license owner
// producer, content data => channel
func (s *Service) syncEPs() {
defer s.waiter.Done()
for {
if s.daoClosed {
log.Info("syncEPs DB closed!")
return
}
readySids, err := s.dao.ReadySns(ctx)
if err != nil || len(readySids) == 0 {
time.Sleep(time.Duration(s.c.Sync.Frequency.ErrorWait))
continue
}
for _, sid := range readySids {
var contSlices [][]*model.Content
if contSlices, err = s.dao.PickData(ctx, sid); err != nil || len(contSlices) == 0 {
continue
}
for _, conts := range contSlices {
if err = s.epsSync(sid, conts); err != nil {
s.addRetryEps(conts)
}
s.dao.AuditingCont(ctx, conts) // update status to auditing
}
}
time.Sleep(1 * time.Second)
}
}
func (s *Service) epsSync(sid int64, conts []*model.Content) (err error) {
var reqCall = &model.ReqEpLicCall{
SID: sid,
Conts: conts,
}
if reqCall.EpLic, err = s.epLicCreate(ctx, sid, conts); err != nil {
return
}
return s.epLicCall(ctx, reqCall)
}
// epLicCreate picks the sid and conts to create the license model
func (s *Service) epLicCreate(ctx context.Context, sid int64, conts []*model.Content) (epLic *model.License, err error) {
var (
season *model.TVEpSeason
prefix = s.c.Sync.AuditPrefix
programs []*model.Program
)
if season, err = s.dao.Season(ctx, int(sid)); err != nil {
log.Error("Season ID %d, Err %v", sid, err)
return
}
epLic = newLic(season, s.c.Sync)
epLic.XMLData.Service.Head.Count = len(conts)
for _, v := range conts {
s.dao.WaitCall(ctx, v.EPID) // avoid always selecting the same data, give time to the caller
url, _, errPlay := s.playurlDao.Playurl(ctx, v.CID)
if errPlay != nil {
log.Error("syncEPs EP Playurl EPID = %d, Error: %v", v.EPID, errPlay)
s.addRetryEp(v)
continue
}
ep, errEP := s.dao.EP(ctx, v.EPID)
if errEP != nil {
log.Error("EpContent EPID %d Can't found", v.EPID)
continue
}
program := model.CreateProgram(prefix, ep)
program.ProgramMediaList = &model.PMList{
ProgramMedia: []*model.PMedia{model.CreatePMedia(s.c.Sync.AuditPrefix, v.EPID, url)},
}
programs = append(programs, program)
}
epLic.XMLData.Service.Body.ProgramSetList.ProgramSet[0].ProgramList.Program = programs
return
}
// epLicCall picks the license and sync to audit
func (s *Service) epLicCall(ctx context.Context, req *model.ReqEpLicCall) (err error) {
var cfg = s.c.Sync
res, err := s.licDao.CallRetry(ctx, cfg.API.AddURL, lic.PrepareXML(req.EpLic))
if res == nil {
err = ecode.TvSyncErr
}
return
}

View File

@@ -0,0 +1,249 @@
package pgc
import (
"context"
"encoding/json"
"fmt"
"reflect"
"time"
appDao "go-common/app/job/main/tv/dao/app"
model "go-common/app/job/main/tv/model/pgc"
"go-common/library/ecode"
"go-common/library/log"
timex "go-common/library/time"
)
type cntFunc func(ctx context.Context) (count int, err error)
type refreshFunc func(ctx context.Context, LastID int, nbData int) (myLast int, err error)
type reqCachePro struct {
cnt cntFunc
proName string
refresh refreshFunc
ps int
}
func (s *Service) cacheProducer(ctx context.Context, req *reqCachePro) (err error) {
var (
count int
pagesize = req.ps
maxID = 0 // the max ID of the latest piece
begin = time.Now()
)
if count, err = req.cnt(ctx); err != nil {
log.Error("[%s] CountEP error [%v]", req.proName, err)
return
}
nbPiece := appDao.NumPce(count, pagesize)
log.Info("[%s] NumPiece %d, Pagesize %d", req.proName, nbPiece, pagesize)
for i := 0; i < nbPiece; i++ {
newMaxID, errR := req.refresh(ctx, maxID, pagesize)
if errR != nil {
log.Error("[%s] Pick Piece %d Error, Ignore it", req.proName, i)
continue
}
if newMaxID > maxID {
maxID = newMaxID
} else { // fatal error
log.Error("[%s] MaxID is not increasing! [%d,%d]", req.proName, newMaxID, maxID)
return
}
time.Sleep(time.Duration(s.c.UgcSync.Frequency.ProducerFre)) // pause after each piece produced
log.Info("[%s] Pagesize %d, Num of piece %d, Time Already %v", req.proName, pagesize, i, time.Since(begin))
}
log.Info("[%s] Finish! Pagesize %d, Num of piece %d, Time %v", req.proName, pagesize, nbPiece, time.Since(begin))
return
}
// refreshCache refreshes the cache of ugc and pgc
func (s *Service) refreshCache() {
var (
ctx = context.Background()
begin = time.Now()
pgcPS = s.c.PlayControl.PieceSize
reqEp = &reqCachePro{
cnt: s.dao.CountEP,
proName: "epProducer",
refresh: s.dao.RefreshEPMC,
ps: pgcPS,
}
reqSn = &reqCachePro{
cnt: s.dao.CountSeason,
proName: "snProducer",
refresh: s.dao.RefreshSnMC,
ps: pgcPS,
}
)
if err := s.cacheProducer(ctx, reqEp); err != nil {
log.Error("reqEp Err %v", err)
return
}
if err := s.cacheProducer(ctx, reqSn); err != nil {
log.Error("reqSn Err %v", err)
}
log.Info("refreshCache Finish, Time %v", time.Since(begin))
}
// stock EP&Season auth info and intervention info in MC
func (s *Service) stockContent(jsonstr json.RawMessage, tableName string) (err error) {
// season stock in MC
if tableName == "tv_ep_season" {
sn := &model.DatabusSeason{}
if err = json.Unmarshal(jsonstr, sn); err != nil {
log.Error("json.Unmarshal(%s) error(%v)", jsonstr, err)
return
}
if reflect.DeepEqual(sn.Old, sn.New) { // if media fields not modified, no need to update
log.Info("SeasonID %d No need to update", sn.New.ID)
return
}
return s.stockSeason(sn)
// ep stock in MC
} else if tableName == "tv_content" {
ep := &model.DatabusEP{}
if err = json.Unmarshal(jsonstr, ep); err != nil {
log.Error("json.Unmarshal(%s) error(%v)", jsonstr, err)
return
}
if reflect.DeepEqual(ep.Old, ep.New) { // if media fields not modified, no need to update
log.Info("Epid %d No need to update", ep.New.EPID)
return
}
return s.stockEP(ep)
} else {
return fmt.Errorf("Databus Msg (%s) - Incorrect Table (%s) ", jsonstr, tableName)
}
}
func (s *Service) composeSnCMS(sn *model.MediaSn) *model.SeasonCMS {
var (
epid, order int
err error
playtime int64
)
if epid, order, err = s.dao.NewestOrder(ctx, sn.ID); err != nil {
log.Warn("stockSeason NewestOrder Sid: %d, Err %v", sn.ID, err)
}
if playtime, err = appDao.TimeTrans(sn.Playtime); err != nil {
log.Warn("stockSeason Playtime Sid: %d, Err %v", sn.ID, err)
}
return &model.SeasonCMS{
SeasonID: int(sn.ID),
Cover: sn.Cover,
Desc: sn.Desc,
Title: sn.Title,
UpInfo: sn.UpInfo,
Category: sn.Category,
Area: sn.Area,
Playtime: timex.Time(playtime),
Role: sn.Role,
Staff: sn.Staff,
TotalNum: sn.TotalNum,
Style: sn.Style,
NewestOrder: order,
NewestEPID: epid,
PayStatus: sn.Status, // databus sn logic
}
}
// treat the databus season msg, stock the auth & media info in MC
func (s *Service) stockSeason(sn *model.DatabusSeason) (err error) {
var (
snSub *model.TVEpSeason
snAuth = sn.New.ToSimple() // auth info in MC
snMedia = s.composeSnCMS(sn.New) // media info in MC
)
s.batchFilter(ctx, []*model.SeasonCMS{snMedia}) // treat the newest NB logic
if sn.New.Check == _seasonPassed && sn.Old.Check == _seasonPassed { // keep already passed logic
if snSub, err = s.dao.Season(ctx, int(sn.New.ID)); err != nil {
return
}
s.addRetrySn(snSub)
}
if err = s.dao.SetSeason(ctx, snAuth); err != nil { // auth
log.Error("SetSeason error(%v)", snAuth, err)
return
}
if err = s.dao.SetSnCMSCache(ctx, snMedia); err != nil { // media
log.Error("SetSnCMSCache error(%v)", snMedia, err)
return
}
if err = s.listMtn(sn.Old, sn.New); err != nil { // maintenance of the zone list in Redis
log.Error("stockContent listMtn error(%v)", sn.New, err)
}
return
}
// treat the databus ep msg, stock the auth & media info in MC
func (s *Service) stockEP(ep *model.DatabusEP) (err error) {
var (
epAuth = ep.New.ToSimple()
epMedia = ep.New.ToCMS()
epSub *model.Content
)
if ep.New.State == _epPassed && ep.Old.State == _epPassed { // keep already passed logic
if epSub, err = s.dao.Cont(ctx, ep.New.EPID); err != nil {
return
}
s.addRetryEp(epSub)
}
if err = s.dao.SetEP(ctx, epAuth); err != nil { // set ep auth MC
return
}
if err = s.dao.SetEpCMSCache(ctx, epMedia); err != nil { // set ep media MC
return
}
err = s.updateSnCMS(epAuth.SeasonID)
return
}
// updateSnCMS picks the season info from DB and update the CMS cache
func (s *Service) updateSnCMS(sid int) (err error) {
var snMedia *model.SeasonCMS
if snMedia, err = s.dao.PickSeason(ctx, sid); err != nil { // pick season cms info
log.Error("stockEP PickSeason Sid: %d, Err: %v", sid, err)
return
}
if snMedia == nil { // season info not found
err = ecode.NothingFound
log.Error("stockEP PickSeason Sid: %d, Err: %v", sid, err)
return
}
s.batchFilter(ctx, []*model.SeasonCMS{snMedia})
if err = s.dao.SetSnCMSCache(ctx, snMedia); err != nil { // ep update, we also consider to update its season info for the "latest" info
log.Error("SetSnCMSCache error(%v)", snMedia, err)
}
return
}
// consume Databus message; because daily modification is not many, so use simple loop
func (s *Service) consumeContent() {
defer s.waiterConsumer.Done()
for {
msg, ok := <-s.contentSub.Messages()
if !ok {
log.Info("databus: tv-job ep/season consumer exit!")
return
}
msg.Commit()
s.treatMsg(msg.Value)
time.Sleep(1 * time.Millisecond)
}
}
func (s *Service) treatMsg(msg json.RawMessage) {
m := &model.DatabusRes{}
log.Info("[ConsumeContent] New Message: %s", msg)
if err := json.Unmarshal(msg, m); err != nil {
log.Error("json.Unmarshal(%s) error(%v)", msg, err)
return
}
if m.Action == "delete" {
log.Info("[ConsumeContent] Content Deletion, We ignore:<%v>,<%v>", m, msg)
return
}
if err := s.stockContent(msg, m.Table); err != nil {
log.Error("stockContent.(%s,%s), error(%v)", msg, m.Table, err)
return
}
}

View File

@@ -0,0 +1,13 @@
package pgc
import (
"testing"
. "github.com/smartystreets/goconvey/convey"
)
func TestService_FullRefresh(t *testing.T) {
Convey("No redundant data", t, WithService(func(s *Service) {
s.refreshCache()
}))
}

View File

@@ -0,0 +1,165 @@
package pgc
import (
"time"
"go-common/app/job/main/tv/model/common"
"go-common/app/job/main/tv/model/pgc"
"go-common/library/log"
)
func (s *Service) addRetryEp(in *pgc.Content) {
s.addRetryEps([]*pgc.Content{in})
}
// addRetryEps adds eps into retry list
func (s *Service) addRetryEps(in []*pgc.Content) {
var (
epids []int
newConts []*pgc.Content
)
for _, v := range in {
if !s.retryLimit(false, int64(v.EPID)) { // filter retried too many times ep
continue
}
newConts = append(newConts, v)
epids = append(epids, v.EPID)
}
if len(newConts) == 0 {
return
}
log.Warn("addRetryEps Add IDs %v", epids)
s.ResuEps = append(s.ResuEps, newConts...)
}
// pickRetryEp picks the to-retry eps from memory
func (s *Service) pickRetryEp() (res []*pgc.Content) {
if len(s.ResuEps) == 0 {
return
}
res = append(res, s.ResuEps...)
log.Info("pickRetry EP Len %d", len(res))
s.ResuEps = make([]*pgc.Content, 0)
return
}
// re-submit eps
func (s *Service) resubEps() {
defer s.waiter.Done()
for {
if s.daoClosed {
log.Info("resubEps DB closed!")
return
}
readyEps := s.pickRetryEp() // pick to-retry eps from memory
if len(readyEps) == 0 {
log.Info("resubEps Empty")
time.Sleep(time.Duration(s.c.Cfg.SyncRetry.RetryFre))
continue
}
againEps := make([]*pgc.Content, 0)
for _, ep := range readyEps { // retry them
if err := s.epsSync(int64(ep.SeasonID), []*pgc.Content{ep}); err != nil { // if error, re-add this item into re-sub list
log.Error("resubEps Sid %d, Epid %v, Err %v", ep.SeasonID, ep.EPID, err)
againEps = append(againEps, ep)
continue
}
retry := &common.SyncRetry{}
retry.FromEp(0, int64(ep.EPID))
s.dao.DelRetry(ctx, retry) // after succ, del it from MC
}
if len(againEps) > 0 {
s.addRetryEps(againEps)
}
time.Sleep(time.Duration(s.c.Cfg.SyncRetry.RetryFre))
}
}
func (s *Service) addRetrySn(in *pgc.TVEpSeason) {
s.addRetrySns([]*pgc.TVEpSeason{in})
}
// addRetrySns adds sns into retry list
func (s *Service) addRetrySns(in []*pgc.TVEpSeason) {
var (
sids []int64
newConts []*pgc.TVEpSeason
)
for _, v := range in {
if !s.retryLimit(true, v.ID) { // filter retried too many times ep
continue
}
newConts = append(newConts, v)
sids = append(sids, v.ID)
}
log.Warn("addRetrySns Add IDs %v", sids)
s.ResuSns = append(s.ResuSns, newConts...)
}
// pickRetryEp picks the to-retry eps from memory
func (s *Service) pickRetrySn() (res []*pgc.TVEpSeason) {
if len(s.ResuSns) == 0 {
return
}
res = append(res, s.ResuSns...)
log.Info("pickRetry Sn Len %d", len(res))
s.ResuSns = make([]*pgc.TVEpSeason, 0)
return
}
// re-submit eps
func (s *Service) resubSns() {
defer s.waiter.Done()
for {
if s.daoClosed {
log.Info("resubSns DB closed!")
return
}
readySns := s.pickRetrySn()
if len(readySns) == 0 {
log.Info("resubSns Empty")
time.Sleep(time.Duration(s.c.Cfg.SyncRetry.RetryFre))
continue
}
againSns := make([]*pgc.TVEpSeason, 0)
for _, sn := range readySns {
if err := s.snSync(sn); err != nil { // if error, re-add this item into re-sub list
log.Error("resubSns Sid %d, Err %v", sn.ID, err)
againSns = append(againSns, sn)
continue
}
retry := &common.SyncRetry{}
retry.FromSn(0, sn.ID)
s.dao.DelRetry(ctx, retry)
}
if len(againSns) > 0 {
s.addRetrySns(againSns)
}
time.Sleep(time.Duration(s.c.Cfg.SyncRetry.RetryFre))
}
}
// retryLimit limits the retry times
func (s *Service) retryLimit(isSn bool, id int64) bool {
var req = &common.SyncRetry{}
if isSn {
req.FromSn(0, id)
} else {
req.FromEp(0, id)
}
retryTms, err := s.dao.GetRetry(ctx, req)
if err != nil {
log.Error("GetRetry Req %s, Err %v", req.MCKey(), err)
return true
}
if retryTms > s.c.Cfg.SyncRetry.MaxRetry {
log.Error("retryLimit Req %s, Retry Already %d times, stop here", req.MCKey(), retryTms)
return false
}
s.dao.SetRetry(ctx, &common.SyncRetry{
Ctype: req.Ctype,
CID: req.CID,
Retry: retryTms + 1,
})
return true
}

View File

@@ -0,0 +1,46 @@
package pgc
import (
"database/sql"
"time"
"go-common/app/job/main/tv/dao/lic"
model "go-common/app/job/main/tv/model/pgc"
"go-common/library/ecode"
"go-common/library/log"
)
// Sync modified season data to the license owner
func (s *Service) syncSeason() {
defer s.waiter.Done()
for {
if s.daoClosed {
log.Info("syncSeason DB closed!")
return
}
modSeason, err := s.dao.ModSeason(ctx)
if err == sql.ErrNoRows || len(modSeason) == 0 {
log.Info("No modified data to pick from Season to audit")
time.Sleep(time.Duration(s.c.Sync.Frequency.FreModSeason))
continue
}
for _, v := range modSeason {
if err = s.snSync(v); err != nil {
s.addRetrySn(v)
}
s.dao.AuditSeason(ctx, int(v.ID)) // update season status after succ
}
time.Sleep(1 * time.Second) // break after each loop
}
}
func (s *Service) snSync(sn *model.TVEpSeason) (err error) {
cfg := s.c.Sync
data := newLic(sn, cfg)
data.XMLData.Service.Head.Count = 1
res, err := s.licDao.CallRetry(ctx, cfg.API.UpdateURL, lic.PrepareXML(data))
if res == nil {
err = ecode.TvSyncErr
}
return
}

View File

@@ -0,0 +1,60 @@
package pgc
import (
"context"
model "go-common/app/job/main/tv/model/pgc"
"go-common/library/log"
)
const (
_seasonPassed = 1
_epPassed = 3
_cmsValid = 1
_notDeleted = 0
)
// ZoneIdx finds out all the passed seasons in DB and then arrange them in a sorted set in Redis
func (s *Service) ZoneIdx() {
var (
_pgcZones = s.c.Cfg.PGCZonesID
ctx = context.Background()
)
for _, v := range _pgcZones {
zoneSns, err := s.dao.PassedSn(ctx, v)
if err != nil {
log.Error("ZoneIdx - PassedSn %d Error %v", v, err)
continue
}
if err = s.dao.Flush(ctx, v, zoneSns); err != nil {
log.Error("ZoneIdx - Flush %d Error %v", v, err)
continue
}
}
}
// listMtn maintains the list of zone index
func (s *Service) listMtn(oldSn *model.MediaSn, newSn *model.MediaSn) (err error) {
if oldSn == nil {
log.Info("ListMtn OldSn is Nil, NewSn is %v", newSn)
oldSn = &model.MediaSn{}
}
if oldSn.Check == _seasonPassed && oldSn.IsDeleted == _notDeleted && oldSn.Valid == _cmsValid { // previously passed
if !(newSn.Check == _seasonPassed && newSn.IsDeleted == _notDeleted && newSn.Valid == _cmsValid) { // not passed now
if err = s.dao.ZRemIdx(ctx, newSn.Category, newSn.ID); err != nil {
log.Error("listMtn - ZRemIdx - Category: %d, Sn: %s, Error: %v", newSn.Category, newSn, err)
return
}
log.Info("Remove Sid %d From Zone %d", newSn.ID, newSn.Category)
}
} else { // previously not passed, or not exist
if newSn.Check == _seasonPassed && newSn.IsDeleted == _notDeleted && newSn.Valid == _cmsValid { // passed now
if err = s.dao.ZAddIdx(ctx, newSn.Category, newSn.Ctime, newSn.ID); err != nil {
log.Error("listMtn - ZAddIdx - Category: %d, Sn: %s, Error: %v", newSn.Category, newSn, err)
return
}
log.Info("Add Sid %d Into Zone %d", newSn.ID, newSn.Category)
}
}
return
}

View File

@@ -0,0 +1,13 @@
package pgc
import (
"testing"
. "github.com/smartystreets/goconvey/convey"
)
func TestService_ZoneIdx(t *testing.T) {
Convey("ZoneIdx", t, WithService(func(s *Service) {
s.ZoneIdx()
}))
}