Create & Init Project...

This commit is contained in:
2019-04-22 18:49:16 +08:00
commit fc4fa37393
25440 changed files with 4054998 additions and 0 deletions

21
app/job/main/tv/BUILD Normal file
View File

@@ -0,0 +1,21 @@
filegroup(
name = "package-srcs",
srcs = glob(["**"]),
tags = ["automanaged"],
visibility = ["//visibility:private"],
)
filegroup(
name = "all-srcs",
srcs = [
":package-srcs",
"//app/job/main/tv/cmd:all-srcs",
"//app/job/main/tv/conf:all-srcs",
"//app/job/main/tv/dao:all-srcs",
"//app/job/main/tv/http:all-srcs",
"//app/job/main/tv/model:all-srcs",
"//app/job/main/tv/service:all-srcs",
],
tags = ["automanaged"],
visibility = ["//visibility:public"],
)

View File

@@ -0,0 +1,244 @@
#### tv-job
#### Version 1.7.6
> 1. tv数据上报多表联合查询上报改为单表查询上报
#### Version 1.7.5
> 1. seasonCMS结构体新增pay_status字段写入缓存中用于tv-interface的卡片贴标
> 2. 消费databus更新ep缓存逻辑中新增pay_status和subtitle字段
#### Version 1.7.4
> 1. Season的Area字段由int改为string型兼容多个地区合拍的情况目前兼容老数据当area仍为int时走老逻辑否则走新逻辑
> 2. 修复空season提审问题
#### Version 1.7.3
> 1. 同步pgc付费信息给牌照
#### Version 1.7.2
> 1. 修改sync.Waitgroup的写法将s.waiter.Add()移出
> 2. 修改cache.New的写法使用新的Fanout
#### Version 1.7.1
> 1. 补全dao/app下的ut
#### Version 1.7.0
> 1. 判断UGC稿件为付费稿件时不导入tv后台
> 2. 对稿件的判断如ugc付费原创等全部收敛到model中方便以后迭代
#### Version 1.6.9
> 1. 稿件自动cms上架逻辑如果当前稿件为过审且cms下架状态且稿件新增
#### Version 1.6.8
> 1. 补dao层中archive和ftp的ut
#### Version 1.6.7
> 1. 优化tv数据上报逻辑
#### Version 1.6.6
> 1. 修复一个cid出现在两个稿件时其中一个cid发送到牌照方后另外一个cid虽未发送但也显示已发送
#### Version 1.6.5
> 1. 添加详情页风格标签及跳转
#### Version 1.6.4
> 1. UGC提审加速
- videoDatabus中判断是否为转码完成并且需要提交的如是则加入channel中
- fullRefresh中的aid遍历逻辑也进由channel处理
> 2. 写法优化原创逻辑、可提审逻辑等迁入model中方便复用
#### Version 1.6.3
> 1. 为准备tv会员临时对所有无过审或免费的season做下架处理反之做上架处理上下架操作结果发送到企业微信
> 2. 在配置中设置开关tv会员上下后只对无过审的season做下架不再考虑付费状态
#### Version 1.6.2
> 1. 修复数据上报bug
#### Version 1.6.1
> 1. databus监听逻辑优化比较媒资字段是否修改后再更新
> 2. 优化PGC过审保持逻辑
- 去掉所有带7的逻辑
- 改为由databus更新放入内存中另一侧进行消费
- 提审由ep改为season异步改同步
> 3. PGC提审优化
- 第一次提审失败后即进入resub队列最多重试5次
- 去掉audit_time相关的操作
> 4. PGC提审新增字段出品方和剧集类型
#### Version 1.6.0
> 1. tv数据上报
#### Version 1.5.9
> 1. tv-job做grpc改造
> 2. 一些log.error改成log.warn
#### Version 1.5.8
> 1. 筛选archive-Notify T消息时需要大量查询DB中upper表优化为查询内存中的map
> 2. pgc的全量缓存刷新由异步改为同步使用type func统一season和ep的刷新方式减少代码量
> 3. call视频云改为异步批量操作提升效率降低DB依赖
> 4. UGC的视频全量扫描进行以下任务
- (原有) 稿件缓存刷新
- (原有) 视频缓存刷新
- (新增) 处理删除稿件如其下仍有未删除的分p则进行删除
- (新增) 处理cid > 12780000 并且 转码失败的分p进行分p及稿件如稿件下无有效分p的删除
- (新增) 获取稿件下所有可提审的video分片后拼接提审xml取消原有扫描DB提审的方式避免慢查询
- (新增) 提审时如果命中视频云指定错误码如10005则分p删除和稿件如无其他分p删除
> 5. 修复seasonCMS的model中的json tag保证芒果媒资中的origin_name字段可以正常从MC中获取
> 6. dao层所有rows.Next补上rows.Err
#### Version 1.5.7
> 1. 芒果媒资同步配套
#### Version 1.5.6
> 1. 转载视频从manual、auto和init三个途径禁止进入
> 2. cid <= 12780000的分p视频无需提交视频云
#### Version 1.5.5
> 1. 导入up主的每页视频中增加间隙防止cpu、db query等过快增长触发报警
> 2. 消费databus的消息时通过配置可限制goroutines的数量
> 3. ugc全量缓存所有配置和pgc配置分开、ugc全量缓存生产时增加间隙、取消job启时刷新全量ugc缓存
> 4. 优化选取ugc提审video和archive的sql以避免扫描全表
> 5. 修改稿件重新提审不再依赖DB转而由databus获取消息后放入channel中进行通知
#### Version 1.5.4
> 1. 配合tv-interface的历史记录需求增加pgc中ep的cms信息的缓存逻辑
> 2. 简化pgc索引页逻辑
#### Version 1.5.3
> 1. service、dao、model层整理全部改为文件夹封装
> 2. 在redis中维护ugc索引页数据生活科技游戏时尚音乐
#### Version 1.5.2
> 1. ugc稿件提审条件中不必要视频转码完成若其cid <= 12780000认为已转码完成无需提审
> 2. 完善tv-job的UT提升ut通过率至100%
#### Version 1.5.1
> 1. 修复up主刷新时异步回写缓存导致缓存中数据不全的问题
> 2. 修复up主face因为bfs节点导致误认为是头像更新的问题
#### Version 1.5.0
> 1. [up主管理及同步] 新增dao/upper
- 将手动提审的aid落库时如发现其up主未在白名单内加入到白名单内
- 同步ugc分p或者稿件时携带其稿件的up主信息给牌照取原名
- 定时全量刷新tv端up主白名单信息检测up主头像和昵称的变动触发牌照同步
> 2. 增加UT覆盖率
#### Version 1.4.7
> 1. 新增逻辑新增ugc稿件时如命中pgc分区则忽略
> 2. 新增逻辑ugc稿件分区修改同步如命中pgc分区则删除该稿件
#### Version 1.4.6
> 1. 针对archive-notify T中的archive的稿件的封面字段进行补全
#### Version 1.4.5
> 1. 修复UGC搜索数据源上传ftp时的字段由id改为aid
> 2. 修改全量刷新缓存的策略采用串行的方式降低并发度避免过多timeout
#### Version 1.4.4
> 1. UGC和PGC搜索数据源上传ftp
> 2. 修改ftp上传的方式由cron改为sleep
#### Version 1.4.3
> 1. ugc提审处判断视频云转码状态如果转码未完成不提审
#### Version 1.4.2
> 1. 修复copyright导致ugc不提审的问题
> 2. 修复pgc删除接口通知牌照时sign和prefix倒反的问题
> 3. 对于删除接口,如果返回为-404视频未找到不认为是错误更新通知状态为成功
#### Version 1.4.1
> 1. 针对当贝和芒果的最新更新第X集的需求使用过滤方式进行计算加入缓存中
> 2. 修复问题在最新ep过审后不会更新season的缓存导致前端拿到的newestEP不准确
#### Version 1.4.0
> 1. 在自身表的变更databus消息的消费逻辑中增加如果archive为过审状态则缓存archive的arc和view的rpc结果便于ugc详情页调用archive的数据
> 2. 在fullRefresh中添加ugc的video的逻辑将ugc的video全量缓存铺满mc
#### Version 1.3.8
> 1. 修复缓存中newestOrder字段的逻辑
#### Version 1.3.7
> 1. 针对ugc稿件定时刷新全量的cms和鉴权信息databus刷新增量的cms和鉴权信息
#### Version 1.3.6
> 1. 去掉ping方法中的内容避免被踢节点
> 2. 向搜索ftp上传文件时增加重试
> 3. 去掉一些重复的log.Error
> 4. 维护全量和增量的pgc和ugc的媒资信息用于在tv-interface中吐出给当贝
> 扩展在mc中的pgc的媒资信息定时全量刷新databus增量刷新
> 新增在mc中的ugc的媒资信息定时全量刷新databus增量刷新
#### Version 1.3.5
> 1. 不同步UGC转载的稿件到牌照方
#### Version 1.3.4
> 1. 在mc中的ep鉴权信息中增加watermark信息
#### Version 1.3.3
> 1. 同步新增视频至视频云
#### Version 1.3.2
> 1. 修复牌照字段错误
#### Version 1.3.1
> 1. 修复重启时调用service.Close报错的问题
#### Version 1.3.0
> 1. 对接UGC视频数据
> 2. 手动提审视频落库
> 3. 导入up主的全量历史数据
> 4. 新增数据提审!!
> 5. 数据修改删除通知牌照方分p全量diff判断修改
#### Version 1.2.10
> 1.fix xml错误的 panic
#### Version 1.2.9
> 1.添加pgc下架过审状态保持的配套逻辑当season/ep状态为7时通知牌照方已删除之后恢复到过审状态。
#### Version 1.2.8
> 1. 修改配置
#### Version 1.2.7
> 1.修复文件写入不截断的问题加入TRUNC模式
#### Version 1.2.6
> 1.定期向搜索的ftp上传过审season的title的文件
> 2. Update Bazel
#### Version 1.2.5
> 1.同步修改过的season时检查其下的ep数量为0时不提交
#### Version 1.2.4
> 1.修改牌照提审前缀为可配置xds
#### Version 1.2.3
> 1.添加redis相关逻辑在redis中为每个分区维护一个过审season id的列表
> 2.增加databus消息同步redis逻辑
#### Version 1.2.2
> 1.playurl接口添加参数qn=16, fix视频质量为16
#### Version 1.2.1
> 1.修复文件名命名,去除大小和驼峰
> 2.对接playurl接口获取playurl后提供给牌照方
#### Version 1.2.0
> 1.增加重新提审逻辑如果原来已过审season的check字段ep的state字段会在pgc更新后变为7。tv-job对于7的进行单独重新提审而后立刻恢复上线状态。
##### Version 1.1.0
> 1. 添加全量数据MC同步 - 每日一次定时任务将DB中数据刷新到MC中
> 2, 添加异步数据MC同步 - 监听Databus数据更新MC中 - 审核状态+干预数据
##### Version 1.0.1
> 1. 添加season的zone的翻译除了1中国2日本之外的数字全部翻译为“其他”
##### Version 1.0.0
> 1. 初始版本对比pgc同步过来的ep、season表和tv_content_ep表的数据差值插入到content表state为1待审核
> 2. 选取所有待审核的ep信息调用牌照方接口提审包含视频云临时视频url

View File

@@ -0,0 +1,13 @@
# Owner
renwei
liweijia
# Author
zhaoshichen
gukai
# Reviewer
renwei
wuhao
liweijia
guanyanliang

20
app/job/main/tv/OWNERS Normal file
View File

@@ -0,0 +1,20 @@
# See the OWNERS docs at https://go.k8s.io/owners
approvers:
- gukai
- liweijia
- renwei
- zhaoshichen
labels:
- job
- job/main/tv
- main
options:
no_parent_owners: true
reviewers:
- guanyanliang
- gukai
- liweijia
- renwei
- wuhao
- zhaoshichen

10
app/job/main/tv/README.md Normal file
View File

@@ -0,0 +1,10 @@
#### tv-job
##### 项目简介
> 1.电视端后端
##### 编译环境
> 请只用golang v1.7.x以上版本编译执行。
##### 依赖包
> 1.公共包go-common

42
app/job/main/tv/cmd/BUILD Normal file
View File

@@ -0,0 +1,42 @@
load(
"@io_bazel_rules_go//go:def.bzl",
"go_binary",
"go_library",
)
go_binary(
name = "cmd",
embed = [":go_default_library"],
tags = ["automanaged"],
)
go_library(
name = "go_default_library",
srcs = ["main.go"],
data = ["tv-job-test.toml"],
importpath = "go-common/app/job/main/tv/cmd",
tags = ["automanaged"],
visibility = ["//visibility:public"],
deps = [
"//app/job/main/tv/conf:go_default_library",
"//app/job/main/tv/http:go_default_library",
"//app/job/main/tv/service/pgc:go_default_library",
"//app/job/main/tv/service/ugc:go_default_library",
"//library/log:go_default_library",
"//library/net/trace:go_default_library",
],
)
filegroup(
name = "package-srcs",
srcs = glob(["**"]),
tags = ["automanaged"],
visibility = ["//visibility:private"],
)
filegroup(
name = "all-srcs",
srcs = [":package-srcs"],
tags = ["automanaged"],
visibility = ["//visibility:public"],
)

View File

@@ -0,0 +1,58 @@
package main
import (
"flag"
"os"
"os/signal"
"syscall"
"time"
"go-common/app/job/main/tv/conf"
"go-common/app/job/main/tv/http"
"go-common/app/job/main/tv/service/pgc"
"go-common/app/job/main/tv/service/ugc"
"go-common/library/log"
"go-common/library/net/trace"
)
var (
pgcsrv *pgc.Service
ugcsrv *ugc.Service
)
func main() {
flag.Parse()
if err := conf.Init(); err != nil {
log.Error("conf.Init() error(%v)", err)
panic(err)
}
// init log
log.Init(conf.Conf.Log)
trace.Init(conf.Conf.Tracer)
defer trace.Close()
defer log.Close()
log.Info("tv-job start")
pgcsrv = pgc.New(conf.Conf)
ugcsrv = ugc.New(conf.Conf)
http.Init(conf.Conf)
signalHandler()
}
func signalHandler() {
var ch = make(chan os.Signal, 1)
signal.Notify(ch, syscall.SIGHUP, syscall.SIGQUIT, syscall.SIGTERM, syscall.SIGINT)
for {
si := <-ch
switch si {
case syscall.SIGQUIT, syscall.SIGTERM, syscall.SIGINT:
log.Info("get a signal %s, stop the consume process", si.String())
pgcsrv.Close()
ugcsrv.Close()
time.Sleep(time.Second)
return
case syscall.SIGHUP:
default:
return
}
}
}

View File

@@ -0,0 +1,285 @@
# This is a TOML document. Boom.
version = "2.3.0"
user = "nobody"
pid = "/tmp/tv-job.pid"
dir = "./"
family = "tv-job"
address = ""
[cfg]
titleFilter = ["预告","番外","花絮","先导","彩蛋","特别篇","PV","总集","剧场版","SP","干颜篇","精编版","小剧场","合集","新年篇","序幕","宣传"]
lessStrategy = 1
pgcTypes = ["番剧","电视剧","电影","纪录片"]
pgcZonesID = [1,2,3,4,5]
[cfg.merak]
host = "http://merak.bilibili.co/"
Key = "71f079db59672ecec5b8d6f252c4b59ab2a8a227mainsite@bilibili.com"
Secret = "37ba757817b4e9c45c7e97f6ed5eee4e1c7bac52"
Names = ['hekai01','huangyi01','zhaoshichen']
Template = "今日season状态同步完成。上线season%s下线season%s。"
Title = "TV CMS自动上下架操作"
cron = "0 0 18 * * *" # every minute, test
onlyFree = true
[cfg.SyncRetry]
MaxRetry = 10
RetryFre = "15s"
[cfg.UgcZones]
[cfg.UgcZones.6]
TID=3
Name="音乐"
[cfg.UgcZones.7]
TID=4
Name="游戏"
[cfg.UgcZones.8]
TID=36
Name="科技"
[cfg.UgcZones.9]
TID=160
Name="生活"
[cfg.UgcZones.10]
TID=155
Name="时尚"
[search]
UgcSwitch = "off"
sugPath = "/tmp/tvsug"
md5Path = "/tmp/tvsug.md5"
PgcContPath = "/tmp/tvpgc"
PgcContMd5Path = "/tmp/tvpgc.md5"
UgcContPath = "/tmp/tvugc"
UgcContMd5Path = "/tmp/tvugc.md5"
[search.cfg]
UploadFre = "100s"
[search.ftp]
PASS="MhxzKhl"
USER="work"
HOST="172.16.33.203:21"
URL="/home/work/open/tvsug/"
Timeout="5s"
UseEPSV=false
RemoteFName="tvsug"
RemoteMd5="tvsug.md5"
RemotePgcURL="/home/work/open/tvpgc/"
RemotePgcCont="tvpgc"
RemotePgcContMd5="tvpgc.md5"
RemoteUgcURL="/home/work/open/tvugc/"
RemoteUgcCont="tvugc"
RemoteUgcContMd5="tvugc.md5"
[sync]
NbMsg = 1
HTTPTimeout = "5s"
DialTimeout = "800ms"
LogSize = 400
AuditPrefix = "xds"
UGCPrefix = "ugc"
sign = "timer-import_BILIBILI"
[sync.LConf]
SizeMsg = 80
NbSeason = 10
CPCode = "BILIBILI"
[sync.PlayURL]
upsigsecret = "20170607920cbd5211831ce2a97066a8b544fa7b"
deadline = "720h"
playpath = "upos-hz-tvshenhe.acgvideo.com"
#api = "http://videodispatch-pgc.bilibili.co/v2/playurl"
api = "http://uat-manager.bilibili.co/api/v4/test/playurltest"
qn = "16"
deadcodes = [10005]
[sync.Frequency]
FreModSeason = "20s"
ErrorWait = "10s"
WaitCall = 60
RejectWait = 86400
[sync.API]
AddURL = "http://183.3.131.82:8012/importAgent/bilibili/add2.0"
UpdateURL = "http://uat-manager.bilibili.co/api/v4/test/tvtest"
DelSeasonURL = "http://183.3.131.82:8012/importAgent/bilibili/ps_del2.0"
DelEPURL = "http://183.3.131.82:8012/importAgent/bilibili/p_del2.0"
[ugcSync]
[ugcSync.Frequency]
ErrorWait = 100
ManualFre = "30s"
ImportFre = "30s"
SyncFre = "10s"
TypesCron = "0 */5 * * * *"
UpperRefresh = "10m"
UpInitFre = "10s"
ProducerFre = "2s" # 生产全量缓存时候的间隙时间
UpperPause = "30s"
FullRefreshFre = "2h"
FullRefArcFre = "20ms"
[ugcSync.Batch]
ManualNum = 20
ImportNum = 20
ArcPS = 20
SyncPS = 2
ReportCidPS = 100
ProducerPS = 5 # 生产全量缓存时的ps大小
ReshelfPS = 2
[ugcSync.cfg]
Copyright = "bilibili"
ReportCidURL = "http://172.18.33.140:7703/api/v1/task/push/batch/watermarkless"
BFSPrefix = "http://i0.hdslb.com"
SendUpper = true
CriticalCid = 12780000
ThreadLimit = 1024
[archiveNotifySub]
key = "9765cdac5894f2ba"
secret = "f4237d712c3ed1e7fab0137b81418b14"
group = "ArchiveNotify-MainWebSvr-S"
topic = "ArchiveNotify-T"
action = "sub"
name = "tv-job/archiveNotifysub"
proto = "tcp"
addr = "172.18.33.50:6205"
idle = 1
active = 1
dialTimeout = "1s"
readTimeout = "60s"
writeTimeout = "1s"
idleTimeout = "10s"
[contentSub]
key = "9765cdac5894f2ba"
secret="f4237d712c3ed1e7fab0137b81418b14"
group= "PgcTvBinlog-MainWebSvr-S"
topic= "PgcTvBinlog-T"
action="sub"
name = "tv-job/content-sub" #?
proto = "tcp"
addr = "172.16.33.158:6205"
idle = 100
active = 100
dialTimeout = "1s"
readTimeout = "60s"
writeTimeout = "1s"
idleTimeout = "10s"
[mysql]
addr = "172.16.33.205:3308"
dsn = "test:test@tcp(172.16.33.205:3308)/bilibili_tv?timeout=5s&readTimeout=5s&writeTimeout=5s&parseTime=true&loc=Local&charset=utf8,utf8mb4"
active = 100
idle = 10
queryTimeout = "2s"
execTimeout = "10s"
tranTimeout = "5s"
[mysql.breaker]
window = "3s"
sleep = "100ms"
bucket = 10
ratio = 0.5
request = 100
[log]
dir = "/data/log/tv-job/"
[HTTPServer]
addr = "0.0.0.0:6431"
timeout = "1s"
[memcache]
name = "tv"
proto = "tcp"
#addr = "172.16.33.54:11211"
addr = "127.0.0.1:9393"
idle = 50
active = 100
dialTimeout = "1s"
readTimeout = "1s"
writeTimeout = "1s"
idleTimeout = "10s"
expire = "240h" #都改为10天
expireMedia = "240h" #都改为10天
[playControl]
ProducerCron = "* */10 * * * *"
pieceSize = 50
[httpClient]
key = "cadf599ba8b3796a"
secret = "42fbb979aa742013d713a088f912673b"
dial = "2s"
timeout = "10s"
keepAlive = "60s"
timer = 10
[httpClient.breaker]
window = "10s"
sleep = "100ms"
bucket = 10
ratio = 0.5
request = 100
[redis]
name = "tv-job"
proto = "tcp"
addr = "127.0.0.1:6379"
active = 10
idle = 5
dialTimeout = "1s"
readTimeout = "1s"
writeTimeout = "1s"
idleTimeout = "10s"
expire = "24h"
cronPGC = "0 */5 * * * *"
cronUGC = "59 */5 * * * * "
[archiveRPC]
pullInterval = "10s"
group = "uat"
env = "uat"
timeout = "3s"
[accountRPC]
timeout = "1s"
[ugcSub]
key = "9765cdac5894f2ba"
secret="1448f5f2cd6029f6af6c5d438cd31edd"
group= "UgcTvBinlog-MainWebSvr-S"
topic= "UgcTvBinlog-T"
action="sub"
name = "tv-job/ugc-sub"
proto = "tcp"
addr = "172.16.33.158:6205"
idle = 100
active = 100
dialTimeout = "1s"
readTimeout = "60s"
writeTimeout = "1s"
idleTimeout = "10s"
[dpClient]
key = "8a16e50429e739278daeb4b65fa349f6"
secret="69bbb1c1016d9055fed6202d70b24a05"
dial = "2s"
timeout = "1s"
keepAlive = "60s"
[dpClient.breaker]
window = "10s"
sleep = "100ms"
bucket = 10
ratio = 0.5
request = 100
[report]
reportURI = "http://berserker.bilibili.co/avenger/api/133/query"
upDataURI = "http://data.capture.atianqi.com:8090/xdstvlog"
env = "uat"
cronAc = "0 5 * * * *"
cronAd = "0 15 * * * *"
cronPd = "0 25 * * * *"
cronVe = "0 40 * * * *"
timeDelay = "24h" # 获取昨天的数据
sendDataDelay = "10ms" # 给牌照方发数据
seTimeSpan = "1h" # 获取时间内的数据
routineCount = 5
readSize = 500
expire = "240h"
[style]
labelSpan = "168h"
styleSpan = "5m"

View File

@@ -0,0 +1,46 @@
package(default_visibility = ["//visibility:public"])
load(
"@io_bazel_rules_go//go:def.bzl",
"go_library",
)
go_library(
name = "go_default_library",
srcs = [
"conf.go",
"license.go",
"ugc.go",
],
importpath = "go-common/app/job/main/tv/conf",
tags = ["automanaged"],
visibility = ["//visibility:public"],
deps = [
"//library/cache/memcache:go_default_library",
"//library/cache/redis:go_default_library",
"//library/conf:go_default_library",
"//library/database/sql:go_default_library",
"//library/log:go_default_library",
"//library/net/http/blademaster:go_default_library",
"//library/net/rpc:go_default_library",
"//library/net/rpc/warden:go_default_library",
"//library/net/trace:go_default_library",
"//library/queue/databus:go_default_library",
"//library/time:go_default_library",
"//vendor/github.com/BurntSushi/toml:go_default_library",
],
)
filegroup(
name = "package-srcs",
srcs = glob(["**"]),
tags = ["automanaged"],
visibility = ["//visibility:private"],
)
filegroup(
name = "all-srcs",
srcs = [":package-srcs"],
tags = ["automanaged"],
visibility = ["//visibility:public"],
)

View File

@@ -0,0 +1,232 @@
package conf
import (
"errors"
"flag"
"go-common/library/cache/memcache"
"go-common/library/cache/redis"
"go-common/library/conf"
"go-common/library/database/sql"
"go-common/library/log"
bm "go-common/library/net/http/blademaster"
"go-common/library/net/rpc"
"go-common/library/net/rpc/warden"
"go-common/library/net/trace"
"go-common/library/queue/databus"
xtime "go-common/library/time"
"github.com/BurntSushi/toml"
)
// Conf global variable.
var (
Conf = &Config{}
client *conf.Client
confPath string
)
// Config struct of conf.
type Config struct {
// base
// log
Log *log.Config
// Databus cfg
ContentSub *databus.Config
ArchiveNotifySub *databus.Config
UgcSub *databus.Config
// tracer
Tracer *trace.Config
// http
HTTPServer *bm.ServerConfig
// db
Mysql *sql.Config
// sync params
Sync *Sync
// memcache
Memcache *Memcache
// redis
Redis *Redis
// playControl related config
PlayControl *PlayControl
// HTTPClient .
HTTPClient *bm.ClientConfig
// Search Cfg
Search *Search
// UgcSync cfg
UgcSync *UgcSync
// grpc
ArcClient *warden.ClientConfig
ArchiveRPC *rpc.ClientConfig
AccClient *warden.ClientConfig
Cfg *Cfg
Report *Report
DpClient *bm.ClientConfig
Style *Style
}
// Style .
type Style struct {
LabelSpan xtime.Duration
StyleSpan xtime.Duration
}
// Report data .
type Report struct {
ReportURI string
UpDataURI string
TimeDelay string
SendDataDelay xtime.Duration
Env string
RoutineCount int
ReadSize int
Expire xtime.Duration
SeTimeSpan xtime.Duration
CronAc string
CronAd string
CronPd string
CronVe string
}
// Cfg contains various of configuration
type Cfg struct {
TitleFilter []string
LessStrategy int
PgcTypes []string // pgc types name, need to filter these ugc archives
PGCZonesID []int // all the zones' ID that need to be loaded
UgcZones map[string]*UgcType
SyncRetry *SyncRetry // sync retry
Merak *Merak
}
// Merak cfg
type Merak struct {
Host string
Key string
Secret string
Names []string
Template string
Title string
Cron string
Onlyfree bool // if true, we consider free+audited episodes, otherwise we consider only audited episodes
}
// SyncRetry def.
type SyncRetry struct {
MaxRetry int // max retry times for pgc already-passed sn & ep
RetryFre xtime.Duration
}
// UgcType def.
type UgcType struct {
TID int32
Name string
}
// Search represents the config for the search suggestion module
type Search struct {
UgcSwitch string // the Ugc search suggest Switch
SugPath string // the tvsug file local path
Md5Path string // the tvsug md5 file local path
FTP *FTP // the ftp info
PgcContPath string // the pgc content file local path
PgcContMd5Path string // the pgc content md5 file local path
UgcContPath string // the ugc content file local path
UgcContMd5Path string // the ugc content md5 file local path
Cfg *SearchCfg
}
//SearchCfg synchronize files time
type SearchCfg struct {
UploadFre xtime.Duration
}
// FTP represents the ftp login info
type FTP struct {
Pass string
User string
Host string
URL string
Timeout xtime.Duration // timeout in seconds
UseEPSV bool
RemoteFName string // file name in remote ftp server
RemoteMd5 string // md5 file name in remote ftp server
RemotePgcCont string // pgc file name in remote ftp server
RemotePgcURL string // RemotePgcURL remote search pgc url dir
RemotePgcContMd5 string // pgc md5 file name in remote ftp server
RemoteUgcCont string // ugc file name in remote ftp server
RemoteUgcURL string // RemotePgcCont remote search ugc url dir
RemoteUgcContMd5 string // ugc md5 file name in remote ftp server
}
// Redis redis
type Redis struct {
*redis.Config
Expire xtime.Duration
CronPGC string
CronUGC string
}
// PlayControl is the configuration for the play control interface, related to MC
type PlayControl struct {
ProducerCron string
PieceSize int
}
// Memcache config
type Memcache struct {
*memcache.Config
Expire xtime.Duration
ExpireMedia xtime.Duration
}
func local() (err error) {
_, err = toml.DecodeFile(confPath, &Conf)
return
}
func remote() (err error) {
if client, err = conf.New(); err != nil {
return
}
if err = load(); err != nil {
return
}
go func() {
for range client.Event() {
log.Info("config reload")
if load() != nil {
log.Error("config reload error (%v)", err)
}
}
}()
return
}
func load() (err error) {
var (
s string
ok bool
tmpConf *Config
)
if s, ok = client.Toml2(); !ok {
return errors.New("load config center error")
}
if _, err = toml.Decode(s, &tmpConf); err != nil {
return errors.New("could not decode config")
}
*Conf = *tmpConf
return
}
func init() {
flag.StringVar(&confPath, "conf", "", "default config path")
}
// Init int config
func Init() error {
if confPath != "" {
return local()
}
return remote()
}

View File

@@ -0,0 +1,59 @@
package conf
import xtime "go-common/library/time"
// Sync struct defines the parameters for the data sync to license owner
type Sync struct {
HTTPTimeout xtime.Duration
DialTimeout xtime.Duration
LogSize int
LConf LicenseConf // conf for the sync with License Owner
PlayURL PlayURL // playurl config
API LicenseURL // license owner url
Frequency Duration
AuditPrefix string // the prefix for audit pgc data
UGCPrefix string // the prefix for audit ugc data
Sign string
}
// LicenseConf defubes the configuration about the comm with the license owner
type LicenseConf struct {
// how many programs can be contained in one message
SizeMsg int
// cpcode recognized by License owner
CPCode string
// number of modified season to sync in one time
NbSeason int
}
// PlayURL defines the conf to have the temp play URL
type PlayURL struct {
Upsigsecret string // key of playurl
Deadline string // deadline of playurl
PlayPath string // path of playurl
API string // the api to get the playurl with CID
Qn string // quality of the video
Deadcodes []int // playurl response codes, for them we think the video is dead and delete it
}
// Duration defines the frequencies of the data sync/wait
type Duration struct {
// Modified Season sync frequency
FreModSeason xtime.Duration
// how much time wait if error
ErrorWait xtime.Duration
// unit: seconds. if it's 3600, that means when we found season is delayed ( not in DB yet ), we postpone all its eps auditing one hour
AuditDelay int64
// unit: seconds. used for rejected season case, we re-audit its content in one day
RejectWait int
// one minute for the data to sync ( avoid selecting the same data )
WaitCall int
}
// LicenseURL defines the API address of the license owner
type LicenseURL struct {
AddURL string
DelSeasonURL string
DelEPURL string
UpdateURL string
}

View File

@@ -0,0 +1,48 @@
package conf
import (
xtime "go-common/library/time"
)
// UgcSync defines the params for ugc sync
type UgcSync struct {
Frequency *UgcFre
Batch *Batch
Cfg *UgcCfg
}
// UgcCfg is for various ugc cfg
type UgcCfg struct {
Copyright string
ReportCidURL string // the url of VideoCloud for reporting Cid
BFSPrefix string
CriticalCid int64 // critical cid 12780000, under it no need to ask for transcoding
ThreadLimit int64 // thread limit
}
// Batch is for the number of data to pick each time
type Batch struct {
ManualNum int // manually added archives
ImportNum int // the number of uppers to import all his video
ArcPS int // the page size to pick the upper's archives
SyncPS int // the page size of sync message ( nb of videos )
ReportCidPS int // the page size to update cid's mark status
ProducerPS int // producer page size
ReshelfPS int // reshelf arc page size
}
// UgcFre defines the ugc sync frequencies
type UgcFre struct {
ErrorWait int // postpone the operation due to error
ManualFre xtime.Duration // re-check the manual import need frequency
ImportFre xtime.Duration // import upper's video frequency
TypesCron string // import the ugc types cron
SyncFre xtime.Duration
UpperRefresh xtime.Duration // upper refresh duration
ReportCid xtime.Duration // 1 minute to check report cid
UpInitFre xtime.Duration // pause between each page of upper's archive
UpperPause xtime.Duration // pause between each import upper
ProducerFre xtime.Duration // producer pause time
FullRefreshFre xtime.Duration // video refresh frequency
FullRefArcFre xtime.Duration // pause between each archive
}

24
app/job/main/tv/dao/BUILD Normal file
View File

@@ -0,0 +1,24 @@
filegroup(
name = "package-srcs",
srcs = glob(["**"]),
tags = ["automanaged"],
visibility = ["//visibility:private"],
)
filegroup(
name = "all-srcs",
srcs = [
":package-srcs",
"//app/job/main/tv/dao/app:all-srcs",
"//app/job/main/tv/dao/archive:all-srcs",
"//app/job/main/tv/dao/cms:all-srcs",
"//app/job/main/tv/dao/ftp:all-srcs",
"//app/job/main/tv/dao/lic:all-srcs",
"//app/job/main/tv/dao/playurl:all-srcs",
"//app/job/main/tv/dao/report:all-srcs",
"//app/job/main/tv/dao/ugc:all-srcs",
"//app/job/main/tv/dao/upper:all-srcs",
],
tags = ["automanaged"],
visibility = ["//visibility:public"],
)

View File

@@ -0,0 +1,86 @@
package(default_visibility = ["//visibility:public"])
load(
"@io_bazel_rules_go//go:def.bzl",
"go_test",
"go_library",
)
go_test(
name = "go_default_test",
srcs = [
"cont_del_test.go",
"cont_sync_test.go",
"dao_test.go",
"media_cache_test.go",
"new_test.go",
"playurl_test.go",
"redis_test.go",
"search_con_test.go",
"search_sug_test.go",
"season_del_test.go",
"season_sync_test.go",
"sync_mc_test.go",
"sync_retry_test.go",
"zone_index_test.go",
],
embed = [":go_default_library"],
rundir = ".",
tags = ["automanaged"],
deps = [
"//app/job/main/tv/conf:go_default_library",
"//app/job/main/tv/model/common:go_default_library",
"//app/job/main/tv/model/pgc:go_default_library",
"//library/database/sql:go_default_library",
"//vendor/github.com/smartystreets/goconvey/convey:go_default_library",
"//vendor/gopkg.in/h2non/gock.v1:go_default_library",
],
)
go_library(
name = "go_default_library",
srcs = [
"cont_del.go",
"cont_sync.go",
"dao.go",
"media_cache.go",
"playurl.go",
"redis.go",
"search_con.go",
"search_sug.go",
"season_del.go",
"season_sync.go",
"sync_mc.go",
"sync_retry.go",
"zone_index.go",
],
importpath = "go-common/app/job/main/tv/dao/app",
tags = ["automanaged"],
visibility = ["//visibility:public"],
deps = [
"//app/job/main/tv/conf:go_default_library",
"//app/job/main/tv/model/common:go_default_library",
"//app/job/main/tv/model/pgc:go_default_library",
"//library/cache/memcache:go_default_library",
"//library/cache/redis:go_default_library",
"//library/database/sql:go_default_library",
"//library/log:go_default_library",
"//library/net/http/blademaster:go_default_library",
"//library/stat/prom:go_default_library",
"//library/xstr:go_default_library",
],
)
filegroup(
name = "package-srcs",
srcs = glob(["**"]),
tags = ["automanaged"],
visibility = ["//visibility:private"],
)
filegroup(
name = "all-srcs",
srcs = [":package-srcs"],
tags = ["automanaged"],
visibility = ["//visibility:public"],
)

View File

@@ -0,0 +1,71 @@
package app
import (
"context"
dsql "database/sql"
"fmt"
"time"
model "go-common/app/job/main/tv/model/pgc"
"go-common/library/database/sql"
"go-common/library/log"
"go-common/library/xstr"
)
const (
_deletedContSQL = "SELECT id, title, subtitle, `desc`, cover, epid, season_id, cid FROM tv_content WHERE state = ? AND is_deleted = 1 AND audit_time < UNIX_TIMESTAMP(now()) ORDER BY season_id, epid LIMIT 0,"
_delaySyncSQL = "UPDATE tv_content SET audit_time = ? WHERE epid IN (%s)"
_delSyncContSQL = "UPDATE tv_content SET state = ? WHERE is_deleted = 1 AND epid = ?"
)
// DelCont picks the deleted content data to sync
func (d *Dao) DelCont(c context.Context) (res []*model.Content, err error) {
var (
rows *sql.Rows
nbData = d.conf.Sync.LConf.SizeMsg
)
if rows, err = d.DB.Query(c, _deletedContSQL+fmt.Sprintf("%d", nbData), EPToAudit); err != nil {
log.Error("d._deletedEPSQL.Query error(%v)", err)
return
}
defer rows.Close()
for rows.Next() {
var r = &model.Content{}
if err = rows.Scan(&r.ID, &r.Title, &r.Subtitle, &r.Desc, &r.Cover, &r.EPID, &r.SeasonID, &r.CID); err != nil {
log.Error("DelCont row.Scan() error(%v)", err)
return
}
res = append(res, r)
}
if err = rows.Err(); err != nil {
log.Error("d.DelCont.Query error(%v)", err)
}
return
}
// SyncCont updates the delete content's state to not pass, avoid being selected again
func (d *Dao) SyncCont(c context.Context, cid int) (nbRows int64, err error) {
var res dsql.Result
if res, err = d.DB.Exec(c, _delSyncContSQL, EPNotPass, cid); err != nil {
log.Error("_delSyncContSQL, failed to update to auditing: (%v), Error: %v", cid, err)
return
}
return res.RowsAffected()
}
// DelaySync postpones the sync of the deleted content, in case of there is an error happenning in the interface
func (d *Dao) DelaySync(c context.Context, conts []*model.Content) (nbRows int64, err error) {
var (
res dsql.Result
xstrIds []int64
delay = time.Now().Unix() + int64(d.conf.Sync.Frequency.AuditDelay) // postpone the season's eps auditing to when
)
for _, v := range conts {
xstrIds = append(xstrIds, int64(v.EPID))
}
if res, err = d.DB.Exec(c, fmt.Sprintf(_delaySyncSQL, xstr.JoinInts(xstrIds)), delay); err != nil {
log.Error("_delSyncContSQL, failed to delay: (%v,%v), Error: %v", delay, xstrIds, err)
return
}
return res.RowsAffected()
}

View File

@@ -0,0 +1,89 @@
package app
import (
"context"
"fmt"
"testing"
model "go-common/app/job/main/tv/model/pgc"
"github.com/smartystreets/goconvey/convey"
)
func pickDelEpid() (epid int64, err error) {
if err := d.DB.QueryRow(context.Background(), "select epid from tv_content where is_deleted = 1 limit 1").Scan(&epid); err != nil {
fmt.Println("Pick EPid Err ", err)
}
return
}
func pickDelSid() (sid int64, err error) {
if err := d.DB.QueryRow(context.Background(), "select id from tv_ep_season where is_deleted = 1 limit 1").Scan(&sid); err != nil {
fmt.Println("Pick sid Err ", err)
}
return
}
func TestAppDelCont(t *testing.T) {
var (
c = context.Background()
)
convey.Convey("DelCont", t, func(ctx convey.C) {
res, err := d.DelCont(c)
if err == nil && len(res) == 0 {
fmt.Println("No deleted data, let me create one")
epid, errPick := pickDelEpid()
if errPick != nil {
fmt.Println("pick err ", errPick)
return
}
d.DB.Exec(c, "update tv_content set state = 1,audit_time = 0 where epid = ?", epid)
res, err = d.DelCont(c)
}
ctx.Convey("Then err should be nil.res should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
ctx.So(res, convey.ShouldNotBeNil)
})
})
convey.Convey("DelCont", t, func(ctx convey.C) {
d.conf.Sync.LConf.SizeMsg = -1
_, err := d.DelCont(c)
ctx.So(err, convey.ShouldNotBeNil)
fmt.Println(err)
})
}
func TestAppSyncCont(t *testing.T) {
var (
c = context.Background()
)
convey.Convey("SyncCont", t, func(ctx convey.C) {
epid, errPick := pickDelEpid()
if errPick != nil {
fmt.Println("pick err ", errPick)
return
}
nbRows, err := d.SyncCont(c, int(epid))
ctx.Convey("Then err should be nil.nbRows should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
ctx.So(nbRows, convey.ShouldNotBeNil)
})
})
}
func TestAppDelaySync(t *testing.T) {
var c = context.Background()
convey.Convey("DelaySync", t, func(ctx convey.C) {
epid, errPick := pickDelEpid()
if errPick != nil {
fmt.Println("pick err ", errPick)
return
}
conts := []*model.Content{{EPID: int(epid)}}
nbRows, err := d.DelaySync(c, conts)
ctx.Convey("Then err should be nil.nbRows should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
ctx.So(nbRows, convey.ShouldNotBeNil)
})
})
}

View File

@@ -0,0 +1,196 @@
package app
import (
"context"
dsql "database/sql"
"fmt"
"time"
model "go-common/app/job/main/tv/model/pgc"
"go-common/library/database/sql"
"go-common/library/log"
"go-common/library/xstr"
)
const (
_pickDataSQL = "SELECT id, title, subtitle, `desc`, cover, epid, season_id, cid FROM tv_content " +
"WHERE season_id = ? AND state = 1 AND audit_time < UNIX_TIMESTAMP(now()) AND is_deleted = 0"
_pickSeasonSQL = "SELECT id,origin_name,title,alias,category,`desc`,style,area,play_time,info,state,total_num,upinfo,staff,role,copyright,`check`,is_deleted,cover,producer,version,status FROM tv_ep_season WHERE id = ?"
_pickEPSQL = "SELECT a.id,a.season_id,a.title,a.long_title,a.cover,a.length,a.cid,b.pay_status FROM tv_ep_content AS a LEFT JOIN tv_content AS b ON a.id=b.epid WHERE a.id=? AND a.is_deleted=0"
_waitCallSQL = "UPDATE tv_content SET audit_time = ? WHERE epid = ? AND state = ? AND is_deleted = 0"
_deleteEPSQL = "UPDATE tv_ep_content SET is_deleted = 1 WHERE season_id = ? AND is_deleted = 0"
_deleteContSQL = "UPDATE tv_content SET is_deleted = 1 WHERE season_id = ? AND is_deleted = 0"
_rejectContSQL = "UPDATE tv_content SET audit_time = ? WHERE season_id = ? AND state = ? AND is_deleted = 0"
_auditingContSQL = "UPDATE tv_content SET state = ? WHERE state = ? AND is_deleted = 0 AND epid IN (%s)"
_removeContSQL = "UPDATE tv_content SET state = ?,is_deleted = 1 WHERE state = ? AND is_deleted = 0 AND epid = ?"
_ContSQL = "SELECT id, title, subtitle, `desc`, cover, epid, season_id, cid FROM tv_content WHERE epid = ?"
_readySns = "SELECT DISTINCT a.season_id FROM tv_content a LEFT JOIN tv_ep_season b ON a.season_id = b.id " +
"WHERE a.state = 1 AND a.is_deleted = 0 AND a.audit_time < UNIX_TIMESTAMP(now()) " +
"AND b.`check` != 0 AND b.is_deleted = 0"
)
// RemoveCont is used to treat invalid ep data's content
func (d *Dao) RemoveCont(c context.Context, epid int) (nbRows int64, err error) {
var (
res dsql.Result
)
if res, err = d.DB.Exec(c, _removeContSQL, EPNotPass, EPToAudit, epid); err != nil {
log.Error("_removeContSQL, failed to delay: (%v,%v), Error: %v", err)
return
}
return res.RowsAffected()
}
// PickData picks the source content data to sync
func (d *Dao) PickData(c context.Context, sid int64) (res [][]*model.Content, err error) {
var (
rows *sql.Rows
nbData = d.conf.Sync.LConf.SizeMsg
conts []*model.Content
)
if rows, err = d.DB.Query(c, _pickDataSQL, sid); err != nil {
log.Error("d._pickDataSQL error(%v)", err)
return
}
defer rows.Close()
for rows.Next() {
var r = &model.Content{}
if err = rows.Scan(&r.ID, &r.Title, &r.Subtitle, &r.Desc, &r.Cover, &r.EPID, &r.SeasonID, &r.CID); err != nil {
log.Error("Conts row.Scan() error(%v)", err)
return
}
conts = append(conts, r)
if len(conts) >= nbData {
res = append(res, conts)
conts = []*model.Content{}
}
}
if err = rows.Err(); err != nil {
log.Error("d.PickData.Query error(%v)", err)
}
if len(conts) > 0 {
res = append(res, conts)
}
return
}
// Season gets one season data
func (d *Dao) Season(c context.Context, sid int) (r *model.TVEpSeason, err error) {
row := d.DB.QueryRow(c, _pickSeasonSQL, sid)
r = &model.TVEpSeason{}
if err = row.Scan(&r.ID, &r.OriginName, &r.Title, &r.Alias, &r.Category, &r.Desc, &r.Style, &r.Area, &r.PlayTime, &r.Info,
&r.State, &r.TotalNum, &r.Upinfo, &r.Staff, &r.Role, &r.Copyright, &r.Check, &r.IsDeleted, &r.Cover, &r.Producer, &r.Version, &r.Status); err != nil {
return
}
return
}
// EP gets one EP data
func (d *Dao) EP(c context.Context, epid int) (r *model.TVEpContent, err error) {
row := d.DB.QueryRow(c, _pickEPSQL, epid)
r = &model.TVEpContent{}
if err = row.Scan(&r.ID, &r.SeasonID, &r.Title, &r.LongTitle, &r.Cover, &r.Length, &r.CID, &r.PayStatus); err != nil {
if err != sql.ErrNoRows {
log.Error("row.Scan error(%v)", err)
}
return
}
return
}
// Cont picks the content data
func (d *Dao) Cont(c context.Context, epid int) (res *model.Content, err error) {
row := d.DB.QueryRow(c, _ContSQL, epid)
res = &model.Content{}
if err = row.Scan(&res.ID, &res.Title, &res.Subtitle, &res.Desc, &res.Cover, &res.EPID, &res.SeasonID, &res.CID); err != nil {
if err != sql.ErrNoRows {
log.Error("row.Scan error(%v)", err)
}
return
}
return
}
// WaitCall updates the audit time ( wait for caliing also )
func (d *Dao) WaitCall(c context.Context, epid int) (nbRows int64, err error) {
var (
res dsql.Result
delay = time.Now().Unix() + int64(d.conf.Sync.Frequency.WaitCall)
)
if res, err = d.DB.Exec(c, _waitCallSQL, delay, epid, EPToAudit); err != nil {
log.Error("_waitCallSQL, failed to delay: (%v,%v), Error: %v", err)
return
}
return res.RowsAffected()
}
// DeleteEP deletes ep data of a deleted season
func (d *Dao) DeleteEP(c context.Context, sid int) (nbRows int64, err error) {
var res dsql.Result
if res, err = d.DB.Exec(c, _deleteEPSQL, sid); err != nil {
log.Error("_deleteEPSQL, failed to delete: (%v), Error: %v", sid, err)
return
}
return res.RowsAffected()
}
// DeleteCont deletes cont data of a deleted season
func (d *Dao) DeleteCont(c context.Context, sid int) (nbRows int64, err error) {
var res dsql.Result
if res, err = d.DB.Exec(c, _deleteContSQL, sid); err != nil {
log.Error("_deleteContSQL, failed to delete: (%v), Error: %v", sid, err)
return
}
return res.RowsAffected()
}
// RejectCont postpones its content in 1 day
func (d *Dao) RejectCont(c context.Context, sid int) (nbRows int64, err error) {
var (
res dsql.Result
delay = time.Now().Unix() + int64(d.conf.Sync.Frequency.RejectWait)
)
if res, err = d.DB.Exec(c, _rejectContSQL, delay, sid, EPToAudit); err != nil {
log.Error("_rejectContSQL, failed to reject: (%v), Error: %v", sid, err)
return
}
return res.RowsAffected()
}
// AuditingCont updates the content state from 1 ( auditing ) to 2
func (d *Dao) AuditingCont(c context.Context, conts []*model.Content) (nbRows int64, err error) {
var (
res dsql.Result
epids []int64
)
for _, v := range conts {
epids = append(epids, int64(v.EPID))
}
if res, err = d.DB.Exec(c, fmt.Sprintf(_auditingContSQL, xstr.JoinInts(epids)), EPAuditing, EPToAudit); err != nil {
log.Error("_auditingContSQL, failed to update to auditing: (%v), Error: %v", epids, err)
return
}
return res.RowsAffected()
}
// ReadySns picks ready to sync seasons
func (d *Dao) ReadySns(c context.Context) (res []int64, err error) {
var rows *sql.Rows
if rows, err = d.DB.Query(c, _readySns); err != nil {
log.Error("d.ReadySns.Query: %s error(%v)", _readySns, err)
return
}
defer rows.Close()
for rows.Next() {
var r int64
if err = rows.Scan(&r); err != nil {
log.Error("ReadySns row.Scan() error(%v)", err)
return
}
res = append(res, r)
}
if err = rows.Err(); err != nil {
log.Error("ReadySns rows.Err() error(%v)", err)
}
return
}

View File

@@ -0,0 +1,175 @@
package app
import (
"context"
"testing"
model "go-common/app/job/main/tv/model/pgc"
"go-common/library/database/sql"
"github.com/smartystreets/goconvey/convey"
)
func TestAppRemoveCont(t *testing.T) {
var (
c = context.Background()
epid = int(12373)
)
convey.Convey("RemoveCont", t, func(ctx convey.C) {
nbRows, err := d.RemoveCont(c, epid)
ctx.Convey("Then err should be nil.nbRows should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
ctx.So(nbRows, convey.ShouldNotBeNil)
})
})
}
func TestAppPickData(t *testing.T) {
var (
c = context.Background()
sid = int64(296)
)
convey.Convey("PickData", t, func(ctx convey.C) {
_, err := d.PickData(c, sid)
if err == sql.ErrNoRows {
println(err)
} else {
ctx.So(err, convey.ShouldBeNil)
}
})
}
func TestAppSeason(t *testing.T) {
var (
c = context.Background()
sid = int(296)
)
convey.Convey("Season", t, func(ctx convey.C) {
r, err := d.Season(c, sid)
if err == sql.ErrNoRows {
println(err)
} else {
ctx.So(err, convey.ShouldBeNil)
ctx.So(r, convey.ShouldNotBeNil)
}
})
}
func TestAppEP(t *testing.T) {
var (
c = context.Background()
epid = int(12373)
)
convey.Convey("EP", t, func(ctx convey.C) {
r, err := d.EP(c, epid)
if err == sql.ErrNoRows {
println(err)
} else {
ctx.So(err, convey.ShouldBeNil)
ctx.So(r, convey.ShouldNotBeNil)
}
})
}
func TestAppCont(t *testing.T) {
var (
c = context.Background()
epid = int(12373)
)
convey.Convey("Cont", t, func(ctx convey.C) {
res, err := d.Cont(c, epid)
if err == sql.ErrNoRows {
println(err)
} else {
ctx.So(err, convey.ShouldBeNil)
ctx.So(res, convey.ShouldNotBeNil)
}
})
}
func TestAppWaitCall(t *testing.T) {
var (
c = context.Background()
epid = int(12373)
)
convey.Convey("WaitCall", t, func(ctx convey.C) {
nbRows, err := d.WaitCall(c, epid)
ctx.Convey("Then err should be nil.nbRows should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
ctx.So(nbRows, convey.ShouldNotBeNil)
})
})
}
func TestAppDeleteEP(t *testing.T) {
var (
c = context.Background()
sid = int(296)
)
convey.Convey("DeleteEP", t, func(ctx convey.C) {
nbRows, err := d.DeleteEP(c, sid)
ctx.Convey("Then err should be nil.nbRows should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
ctx.So(nbRows, convey.ShouldNotBeNil)
})
})
}
func TestAppDeleteCont(t *testing.T) {
var (
c = context.Background()
sid = int(296)
)
convey.Convey("DeleteCont", t, func(ctx convey.C) {
nbRows, err := d.DeleteCont(c, sid)
ctx.Convey("Then err should be nil.nbRows should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
ctx.So(nbRows, convey.ShouldNotBeNil)
})
})
}
func TestAppRejectCont(t *testing.T) {
var (
c = context.Background()
sid = int(296)
)
convey.Convey("RejectCont", t, func(ctx convey.C) {
nbRows, err := d.RejectCont(c, sid)
ctx.Convey("Then err should be nil.nbRows should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
ctx.So(nbRows, convey.ShouldNotBeNil)
})
})
}
func TestAppAuditingCont(t *testing.T) {
var (
c = context.Background()
conts []*model.Content
)
a := &model.Content{
EPID: 12373,
}
conts = append(conts, a)
convey.Convey("AuditingCont", t, func(ctx convey.C) {
_, err := d.AuditingCont(c, conts)
if err == sql.ErrNoRows {
println(err)
} else {
ctx.So(err, convey.ShouldBeNil)
}
})
}
func TestAppReadySns(t *testing.T) {
var (
c = context.Background()
)
convey.Convey("ReadySns", t, func(ctx convey.C) {
_, err := d.ReadySns(c)
ctx.Convey("Then err should be nil.res should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
})
})
}

View File

@@ -0,0 +1,83 @@
package app
import (
"time"
"go-common/app/job/main/tv/conf"
"go-common/library/cache/memcache"
"go-common/library/cache/redis"
"go-common/library/database/sql"
httpx "go-common/library/net/http/blademaster"
"go-common/library/stat/prom"
)
// Dao dao.
type Dao struct {
conf *conf.Config
// DB
DB *sql.DB
// Memcache
mc *memcache.Pool
mcExpire int32
mcMediaExpire int32
// Http client
client *httpx.Client
// redis
redis *redis.Pool
redisExpire int32
}
var (
errorsCount = prom.BusinessErrCount
infosCount = prom.BusinessInfoCount
)
// PromError prometheus error count.
func PromError(name string) {
errorsCount.Incr(name)
}
// PromInfo prometheus info count.
func PromInfo(name string) {
infosCount.Incr(name)
}
// New create a instance of Dao and return.
func New(c *conf.Config) (d *Dao) {
d = &Dao{
// conf
conf: c,
// db
DB: sql.NewMySQL(c.Mysql),
// mc
mc: memcache.NewPool(c.Memcache.Config),
mcExpire: int32(time.Duration(c.Memcache.Expire) / time.Second),
mcMediaExpire: int32(time.Duration(c.Memcache.ExpireMedia) / time.Second),
client: httpx.NewClient(conf.Conf.HTTPClient),
redis: redis.NewPool(c.Redis.Config),
redisExpire: int32(time.Duration(c.Redis.Expire) / time.Second),
}
return
}
// Close close the redis and kafka resource.
func (d *Dao) Close() {
if d.DB != nil {
d.DB.Close()
}
if d.mc != nil {
d.mc.Close()
}
if d.redis != nil {
d.redis.Close()
}
}
// NumPce calculates number of piece
func NumPce(count int, pagesize int) (numPce int) {
if count%pagesize == 0 {
numPce = count / pagesize
return
}
return count/pagesize + 1
}

View File

@@ -0,0 +1,40 @@
package app
import (
"flag"
"os"
"testing"
"go-common/app/job/main/tv/conf"
"gopkg.in/h2non/gock.v1"
)
var (
d *Dao
)
func TestMain(m *testing.M) {
// os.Setenv("DEPLOY_ENV", "")
if os.Getenv("DEPLOY_ENV") != "" {
flag.Set("app_id", "main.web-svr.tv-job")
flag.Set("conf_token", "ab3e9801a77c076b997de0ac5cb21775")
flag.Set("tree_id", "15260")
flag.Set("conf_version", "docker-1")
flag.Set("deploy_env", "uat")
flag.Set("conf_host", "config.bilibili.co")
flag.Set("conf_path", "/tmp")
flag.Set("region", "sh")
flag.Set("zone", "sh001")
} else {
flag.Set("conf", "../../cmd/tv-job-test.toml")
}
flag.Parse()
if err := conf.Init(); err != nil {
panic(err)
}
d = New(conf.Conf)
d.client.SetTransport(gock.DefaultTransport)
m.Run()
os.Exit(0)
}

View File

@@ -0,0 +1,94 @@
package app
import (
"context"
"fmt"
model "go-common/app/job/main/tv/model/pgc"
"go-common/library/cache/memcache"
"go-common/library/database/sql"
"go-common/library/log"
)
const (
_mcSnCMSKey = "sn_cms_%d"
_mcEPCMSKey = "ep_cms_%d"
_newestOrder = "SELECT a.epid,b.`order` FROM tv_content a LEFT JOIN tv_ep_content b ON a.epid=b.id " +
"WHERE a.season_id=? AND a.state= ? AND a.valid= ? AND a.is_deleted=0 ORDER BY b.`order` DESC LIMIT 1"
_AllEPs = "SELECT subtitle, epid FROM tv_content WHERE season_id = ?"
_lessStrategy = 1
)
// SnCMSCacheKey .
func (d *Dao) SnCMSCacheKey(sid int) string {
return fmt.Sprintf(_mcSnCMSKey, sid)
}
// EpCMSCacheKey .
func (d *Dao) EpCMSCacheKey(epid int) string {
return fmt.Sprintf(_mcEPCMSKey, epid)
}
// SetSnCMSCache save model.SeasonCMS to memcache
func (d *Dao) SetSnCMSCache(c context.Context, s *model.SeasonCMS) (err error) {
var (
key = d.SnCMSCacheKey(s.SeasonID)
conn = d.mc.Get(c)
)
defer conn.Close()
if err = conn.Set(&memcache.Item{Key: key, Object: s, Flags: memcache.FlagJSON, Expiration: d.mcMediaExpire}); err != nil {
log.Error("conn.Set error(%v)", err)
return
}
return
}
// SetEpCMSCache save model.EpCMS to memcache
func (d *Dao) SetEpCMSCache(c context.Context, s *model.EpCMS) (err error) {
var (
key = d.EpCMSCacheKey(s.EPID)
conn = d.mc.Get(c)
)
defer conn.Close()
if err = conn.Set(&memcache.Item{Key: key, Object: s, Flags: memcache.FlagJSON, Expiration: d.mcMediaExpire}); err != nil {
log.Error("conn.Set error(%v)", err)
return
}
return
}
// NewestOrder picks one season's newest passed ep's order column value
func (d *Dao) NewestOrder(c context.Context, sid int64) (epid, newestOrder int, err error) {
if err = d.DB.QueryRow(c, _newestOrder, sid, EPPassed, _CMSValid).Scan(&epid, &newestOrder); err != nil { // get the qualified aid to sync
log.Info("d.NewestOrder(sid %d).Query error(%v)", sid, err)
}
return
}
// AllEP picks all the not deleted ep of a season
func (d *Dao) AllEP(c context.Context, sid int, strategy int) (eps []*model.EpCMS, err error) {
var (
rows *sql.Rows
query = _AllEPs
)
if strategy == _lessStrategy {
query = _AllEPs + " AND is_deleted = 0" // less strategy
}
if rows, err = d.DB.Query(c, query, sid); err != nil {
log.Error("d.AllEP.Query error(%v)", err)
return
}
defer rows.Close()
for rows.Next() {
var r = &model.EpCMS{}
if err = rows.Scan(&r.Title, &r.EPID); err != nil {
log.Error("AllEP row.Scan() error(%v)", err)
return
}
eps = append(eps, r)
}
if err = rows.Err(); err != nil {
log.Error("d.AllEp.Query error(%v)", err)
}
return
}

View File

@@ -0,0 +1,102 @@
package app
import (
"context"
"fmt"
model "go-common/app/job/main/tv/model/pgc"
"testing"
"github.com/smartystreets/goconvey/convey"
)
func TestAppSnCMSCacheKey(t *testing.T) {
var (
sid = int(0)
)
convey.Convey("SnCMSCacheKey", t, func(ctx convey.C) {
p1 := d.SnCMSCacheKey(sid)
ctx.Convey("Then p1 should not be nil.", func(ctx convey.C) {
ctx.So(p1, convey.ShouldNotBeNil)
})
})
}
func TestAppEpCMSCacheKey(t *testing.T) {
var (
epid = int(0)
)
convey.Convey("EpCMSCacheKey", t, func(ctx convey.C) {
p1 := d.EpCMSCacheKey(epid)
ctx.Convey("Then p1 should not be nil.", func(ctx convey.C) {
ctx.So(p1, convey.ShouldNotBeNil)
})
})
}
func TestAppSetSnCMSCache(t *testing.T) {
var (
c = context.Background()
s = &model.SeasonCMS{}
)
convey.Convey("SetSnCMSCache", t, func(ctx convey.C) {
err := d.SetSnCMSCache(c, s)
ctx.Convey("Then err should be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
})
})
}
func TestAppSetEpCMSCache(t *testing.T) {
var (
c = context.Background()
s = &model.EpCMS{}
)
convey.Convey("SetEpCMSCache", t, func(ctx convey.C) {
err := d.SetEpCMSCache(c, s)
ctx.Convey("Then err should be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
})
})
}
func pickEpSid() (sid int64, err error) {
if err = d.DB.QueryRow(context.Background(), "select season_id from tv_content where is_deleted = 0 "+
"and valid = 1 and state = 3 limit 1").Scan(&sid); err != nil {
fmt.Println(err)
}
return
}
func TestAppNewestOrder(t *testing.T) {
var c = context.Background()
convey.Convey("NewestOrder", t, func(ctx convey.C) {
sid, errPick := pickEpSid()
if errPick != nil {
return
}
epid, newestOrder, err := d.NewestOrder(c, sid)
ctx.Convey("Then err should be nil.epid,newestOrder should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
ctx.So(newestOrder, convey.ShouldNotBeNil)
ctx.So(epid, convey.ShouldNotBeNil)
})
})
}
func TestAppAllEP(t *testing.T) {
var (
c = context.Background()
strategy = int(0)
)
convey.Convey("AllEP", t, func(ctx convey.C) {
sid, errPick := pickEpSid()
if errPick != nil {
return
}
eps, err := d.AllEP(c, int(sid), strategy)
ctx.Convey("Then err should be nil.eps should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
ctx.So(eps, convey.ShouldNotBeNil)
})
})
}

View File

@@ -0,0 +1,42 @@
package app
import (
"testing"
"github.com/smartystreets/goconvey/convey"
)
func TestAppPromError(t *testing.T) {
var (
name = "test"
)
convey.Convey("PromError", t, func(ctx convey.C) {
PromError(name)
ctx.Convey("No return values", func(ctx convey.C) {
})
})
}
func TestAppPromInfo(t *testing.T) {
var (
name = "test"
)
convey.Convey("PromInfo", t, func(ctx convey.C) {
PromInfo(name)
ctx.Convey("No return values", func(ctx convey.C) {
})
})
}
func TestAppNumPce(t *testing.T) {
var (
count = int(50)
pagesize = int(10)
)
convey.Convey("NumPce", t, func(ctx convey.C) {
numPce := NumPce(count, pagesize)
ctx.Convey("Then numPce should not be nil.", func(ctx convey.C) {
ctx.So(numPce, convey.ShouldNotBeNil)
})
})
}

View File

@@ -0,0 +1,42 @@
package app
import (
"context"
"fmt"
"net/url"
model "go-common/app/job/main/tv/model/pgc"
"go-common/library/log"
)
const _type = "mp4"
const _maxBackup = 0
const _otype = "json"
// Playurl calls the api of playurl to get the url to play the video
func (d *Dao) Playurl(ctx context.Context, cid int) (playurl string, err error) {
var (
result = model.PlayurlResp{}
params = url.Values{}
api = d.conf.Sync.PlayURL.API
)
params.Set("cid", fmt.Sprintf("%d", cid))
params.Set("type", _type) // to get one piece
params.Set("max_backup", fmt.Sprintf("%d", _maxBackup)) // no backup url needed
params.Set("otype", _otype) // json format response
params.Set("qn", d.conf.Sync.PlayURL.Qn) // quality fix to 16
if err = d.client.Get(ctx, api, "", params, &result); err != nil {
log.Error("ClientGet error[%v]", err)
return
}
if result.Code != 0 { // logic error
err = fmt.Errorf("Resp Code:[%v], Message:[%v]", result.Code, result.Message)
return
}
if len(result.Durl) < 1 { // result empty
err = fmt.Errorf("Playurl Result is Empty! Resp (%v)", result)
return
}
playurl = result.Durl[0].URL
return
}

View File

@@ -0,0 +1,22 @@
package app
import (
"context"
"testing"
"github.com/smartystreets/goconvey/convey"
)
func TestAppPlayurl(t *testing.T) {
var (
c = context.Background()
cid = int(2578706)
)
convey.Convey("Playurl", t, func(ctx convey.C) {
playurl, err := d.Playurl(c, cid)
ctx.Convey("Then err should be nil.playurl should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
ctx.So(playurl, convey.ShouldNotBeNil)
})
})
}

View File

@@ -0,0 +1,124 @@
package app
import (
"context"
"fmt"
"time"
commonMdl "go-common/app/job/main/tv/model/common"
"go-common/library/cache/redis"
"go-common/library/log"
)
// keyZone gets the key of the zone in Redis
func keyZone(category int) string {
return fmt.Sprintf("zone_idx_%d", category)
}
// Flush it flushes the list of one zone
func (d *Dao) Flush(c context.Context, category int, idxRanks []*commonMdl.IdxRank) (err error) {
var (
ctime int64
length int64
conn = d.redis.Get(c)
key = keyZone(category)
)
// remove the previous list
if err = conn.Send("EXPIRE", key, 0); err != nil {
log.Error("conn.Send(EXPIRE %s) error(%v)", key, err)
return
}
// add new ids inside
for _, v := range idxRanks {
ctime = int64(v.Ctime)
if err = conn.Send("ZADD", key, ctime, v.ID); err != nil {
log.Error("conn.Send(ZADD %s %v) error(%v)", key, v.ID, ctime)
return
}
}
// set expiration
if err = conn.Send("EXPIRE", key, d.redisExpire); err != nil {
log.Error("conn.Send(EXPIRE %s) error(%v)", key, err)
return
}
// check result
if err = conn.Send("ZCARD", key); err != nil {
log.Error("conn.Send(ZCARD %s) error(%v)", key, err)
return
}
// flush result
if err = conn.Flush(); err != nil {
log.Error("conn.Flush() error(%v)", err)
return
}
for i := 0; i < len(idxRanks)+2; i++ {
if _, err = conn.Receive(); err != nil {
log.Error("conn.Receive() error(%v)", err)
return
}
}
if length, err = redis.Int64(conn.Receive()); err != nil {
log.Error("conn.Receive() error(%v)", err)
}
log.Info("Flush Succ! Zone %d, Items: %d", category, length)
conn.Close()
return
}
// TimeTrans transform the time format to Unix Timestamp
func TimeTrans(stimeStr string) (stime int64, err error) {
local, _ := time.LoadLocation("Local")
var (
timeValue time.Time
)
timeValue, err = time.ParseInLocation("2006-01-02 15:04:05", stimeStr, local)
if err != nil {
log.Warn("TimeTrans %s, Error %v", stimeStr, err)
return
}
if stime = timeValue.Unix(); stime < 1 {
err = fmt.Errorf("time %s transform %d error", stimeStr, stime)
return
}
return
}
// ZAddIdx adds one valid season into the zone list
func (d *Dao) ZAddIdx(c context.Context, category int, ctimeStr string, id int64) (err error) {
var (
conn = d.redis.Get(c)
key = keyZone(category)
ctime int64
)
defer conn.Close()
if ctime, err = TimeTrans(ctimeStr); err != nil {
log.Warn("ZAddIdx Ctime %s Error %v", ctimeStr, err)
return
}
if err = conn.Send("ZADD", key, ctime, id); err != nil {
log.Error("conn.Send(ZADD %s - %v) error(%v)", key, id, err)
return
}
if err = conn.Flush(); err != nil {
log.Error("conn.Flush() error(%v)", err)
return
}
if _, err = conn.Receive(); err != nil {
log.Error("conn.Receive() error(%v)", err)
return
}
return
}
// ZRemIdx ZREM trim from trim queue.
func (d *Dao) ZRemIdx(c context.Context, category int, id int64) (err error) {
var (
conn = d.redis.Get(c)
key = keyZone(category)
)
if _, err = conn.Do("ZREM", key, id); err != nil {
log.Error("conn.Send(ZADD %s - %v) error(%v)", key, id, err)
}
conn.Close()
return
}

View File

@@ -0,0 +1,77 @@
package app
import (
"context"
commonMdl "go-common/app/job/main/tv/model/common"
"testing"
"github.com/smartystreets/goconvey/convey"
)
func TestAppkeyZone(t *testing.T) {
var (
category = int(0)
)
convey.Convey("keyZone", t, func(ctx convey.C) {
p1 := keyZone(category)
ctx.Convey("Then p1 should not be nil.", func(ctx convey.C) {
ctx.So(p1, convey.ShouldNotBeNil)
})
})
}
func TestAppFlush(t *testing.T) {
var (
c = context.Background()
category = int(0)
idxRanks = []*commonMdl.IdxRank{}
)
convey.Convey("Flush", t, func(ctx convey.C) {
err := d.Flush(c, category, idxRanks)
ctx.Convey("Then err should be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
})
})
}
func TestAppTimeTrans(t *testing.T) {
var (
stimeStr = "2018-09-16 08:00:01"
)
convey.Convey("TimeTrans", t, func(ctx convey.C) {
stime, err := TimeTrans(stimeStr)
ctx.Convey("Then err should be nil.stime should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
ctx.So(stime, convey.ShouldNotBeNil)
})
})
}
func TestAppZAddIdx(t *testing.T) {
var (
c = context.Background()
category = int(0)
ctimeStr = "2018-09-16 08:00:01"
id = int64(0)
)
convey.Convey("ZAddIdx", t, func(ctx convey.C) {
err := d.ZAddIdx(c, category, ctimeStr, id)
ctx.Convey("Then err should be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
})
})
}
func TestAppZRemIdx(t *testing.T) {
var (
c = context.Background()
category = int(0)
id = int64(0)
)
convey.Convey("ZRemIdx", t, func(ctx convey.C) {
err := d.ZRemIdx(c, category, id)
ctx.Convey("Then err should be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
})
})
}

View File

@@ -0,0 +1,48 @@
package app
import (
"context"
model "go-common/app/job/main/tv/model/pgc"
"go-common/library/database/sql"
"go-common/library/log"
)
const (
_PgcFromwhere = "FROM tv_ep_season WHERE `check` = 1 AND valid = 1 AND is_deleted = 0 "
_PgcCont = "SELECT id,category,cover,title,play_time,role,staff,`desc` " + _PgcFromwhere + "AND id > ? limit ?"
_PgcContCount = " SELECT count(*) " + _PgcFromwhere
)
// PgcCont is used for getting valid pgc season data
func (d *Dao) PgcCont(ctx context.Context, id int, limit int) (res []*model.SearPgcCon, err error) {
var (
rows *sql.Rows
)
if rows, err = d.DB.Query(ctx, _PgcCont, id, limit); err != nil {
log.Error("d.PgcCont.Query: %s error(%v)", _PgcCont, err)
return
}
defer rows.Close()
for rows.Next() {
var r = &model.SearPgcCon{}
if err = rows.Scan(&r.ID, &r.Category, &r.Cover, &r.Title, &r.PlayTime, &r.Role, &r.Staff, &r.Desc); err != nil {
log.Error("PgcCont row.Scan() error(%v)", err)
return
}
res = append(res, r)
}
if err = rows.Err(); err != nil {
log.Error("d.PgcCont.Query error(%v)", err)
}
return
}
// PgcContCount is used for getting valid data count
func (d *Dao) PgcContCount(ctx context.Context) (upCnt int, err error) {
row := d.DB.QueryRow(ctx, _PgcContCount)
if err = row.Scan(&upCnt); err != nil {
log.Error("d.SeaContCount.Query: %s error(%v)", _PgcContCount, err)
}
return
}

View File

@@ -0,0 +1,36 @@
package app
import (
"context"
"testing"
"github.com/smartystreets/goconvey/convey"
)
func TestAppPgcCont(t *testing.T) {
var (
c = context.Background()
id = int(0)
limit = int(10)
)
convey.Convey("PgcCont", t, func(ctx convey.C) {
res, err := d.PgcCont(c, id, limit)
ctx.Convey("Then err should be nil.res should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
ctx.So(res, convey.ShouldNotBeNil)
})
})
}
func TestAppPgcContCount(t *testing.T) {
var (
c = context.Background()
)
convey.Convey("PgcContCount", t, func(ctx convey.C) {
upCnt, err := d.PgcContCount(c)
ctx.Convey("Then err should be nil.upCnt should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
ctx.So(upCnt, convey.ShouldNotBeNil)
})
})
}

View File

@@ -0,0 +1,68 @@
package app
import (
"context"
model "go-common/app/job/main/tv/model/pgc"
"go-common/library/database/sql"
"go-common/library/log"
)
const (
_pgcSeaSug = "SELECT id,title FROM tv_ep_season WHERE `check` = ? AND valid = ? AND is_deleted = ?"
_ugcSeaSug = "SELECT aid,title FROM ugc_archive WHERE result=1 AND valid=1 AND deleted=0 "
_pgcType = "pgc"
_ugcType = "ugc"
)
// PgcSeaSug is used for getting pgc search suggest content
func (d *Dao) PgcSeaSug(ctx context.Context) (res []*model.SearchSug, err error) {
var (
rows *sql.Rows
)
if rows, err = d.DB.Query(ctx, _pgcSeaSug, SeasonPassed, _CMSValid, _NotDeleted); err != nil {
log.Error("d.PgcSeaSug.Query: %s error(%v)", _pgcSeaSug, err)
return
}
defer rows.Close()
for rows.Next() {
var r = &model.SearchSug{
Type: _pgcType,
}
if err = rows.Scan(&r.ID, &r.Term); err != nil {
log.Error("PgcSeaSug row.Scan() error(%v)", err)
return
}
res = append(res, r)
}
if err = rows.Err(); err != nil {
log.Error("d.PgcSeaSug.Query error(%v)", err)
}
return
}
// UgcSeaSug is used for getting ugc search suggest content
func (d *Dao) UgcSeaSug(ctx context.Context) (res []*model.SearchSug, err error) {
var (
rows *sql.Rows
)
if rows, err = d.DB.Query(ctx, _ugcSeaSug); err != nil {
log.Error("d.UgcSeaSug.Query: %s error(%v)", _ugcSeaSug, err)
return
}
defer rows.Close()
for rows.Next() {
var r = &model.SearchSug{
Type: _ugcType,
}
if err = rows.Scan(&r.ID, &r.Term); err != nil {
log.Error("UgcSeaSug row.Scan() error(%v)", err)
return
}
res = append(res, r)
}
if err = rows.Err(); err != nil {
log.Error("d.UgcSeaSug.Query error(%v)", err)
}
return
}

View File

@@ -0,0 +1,34 @@
package app
import (
"context"
"testing"
"github.com/smartystreets/goconvey/convey"
)
func TestAppPgcSeaSug(t *testing.T) {
var (
c = context.Background()
)
convey.Convey("PgcSeaSug", t, func(ctx convey.C) {
res, err := d.PgcSeaSug(c)
ctx.Convey("Then err should be nil.res should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
ctx.So(res, convey.ShouldNotBeNil)
})
})
}
func TestAppUgcSeaSug(t *testing.T) {
var (
c = context.Background()
)
convey.Convey("UgcSeaSug", t, func(ctx convey.C) {
res, err := d.UgcSeaSug(c)
ctx.Convey("Then err should be nil.res should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
ctx.So(res, convey.ShouldNotBeNil)
})
})
}

View File

@@ -0,0 +1,52 @@
package app
import (
"context"
dsql "database/sql"
"fmt"
model "go-common/app/job/main/tv/model/pgc"
"go-common/library/database/sql"
"go-common/library/log"
)
const (
_deletedSeasonSQL = "SELECT id,origin_name,title,alias,category,`desc`,style,area,play_time,info,state,total_num,upinfo,staff,role,copyright,`check`,is_deleted " +
"FROM tv_ep_season WHERE is_deleted = 1 AND `check` = ? AND audit_time < UNIX_TIMESTAMP(now()) LIMIT 0,"
_delSyncSeasonSQL = "UPDATE tv_ep_season SET `check` = ? WHERE is_deleted = 1 AND id = ?"
)
// DelSeason picks the modified season data to sync
func (d *Dao) DelSeason(c context.Context) (res []*model.TVEpSeason, err error) {
var (
rows *sql.Rows
)
if rows, err = d.DB.Query(c, _deletedSeasonSQL+fmt.Sprintf("%d", d.conf.Sync.LConf.NbSeason), SeasonToReAudit); err != nil {
log.Error("d._deletedSeasonSQL.Query error(%v)", err)
return
}
defer rows.Close()
for rows.Next() {
var r = &model.TVEpSeason{}
if err = rows.Scan(&r.ID, &r.OriginName, &r.Title, &r.Alias, &r.Category, &r.Desc, &r.Style, &r.Area, &r.PlayTime, &r.Info,
&r.State, &r.TotalNum, &r.Upinfo, &r.Staff, &r.Role, &r.Copyright, &r.Check, &r.IsDeleted); err != nil {
log.Error("modSeason row.Scan() error(%v)", err)
return
}
res = append(res, r)
}
if err = rows.Err(); err != nil {
log.Error("d.DelSeason.Query error(%v)", err)
}
return
}
// RejectSeason updates the indicated season to rejected status
func (d *Dao) RejectSeason(c context.Context, sid int) (nbRows int64, err error) {
var res dsql.Result
if res, err = d.DB.Exec(c, _delSyncSeasonSQL, SeasonRejected, sid); err != nil {
log.Error("_delSyncSeason, failed to update to auditing: (%v), Error: %v", sid, err)
return
}
return res.RowsAffected()
}

View File

@@ -0,0 +1,45 @@
package app
import (
"context"
"fmt"
"testing"
"github.com/smartystreets/goconvey/convey"
)
func TestAppDelSeason(t *testing.T) {
var (
c = context.Background()
)
convey.Convey("DelSeason", t, func(ctx convey.C) {
res, err := d.DelSeason(c)
if err == nil && len(res) == 0 {
sid, errPick := pickDelSid()
if errPick != nil {
return
}
fmt.Println(sid)
d.DB.Exec(c, "update tv_ep_season set `check` = 2,audit_time = 0 where id = ?", sid)
res, err = d.DelSeason(c)
}
ctx.Convey("Then err should be nil.res should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
ctx.So(res, convey.ShouldNotBeNil)
})
})
}
func TestAppRejectSeason(t *testing.T) {
var (
c = context.Background()
sid = int(0)
)
convey.Convey("RejectSeason", t, func(ctx convey.C) {
nbRows, err := d.RejectSeason(c, sid)
ctx.Convey("Then err should be nil.nbRows should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
ctx.So(nbRows, convey.ShouldNotBeNil)
})
})
}

View File

@@ -0,0 +1,109 @@
package app
import (
"context"
dsql "database/sql"
"fmt"
"time"
model "go-common/app/job/main/tv/model/pgc"
"go-common/library/database/sql"
"go-common/library/log"
)
// season `check` meaning
const (
SeasonRejected = 0
SeasonPassed = 1
SeasonToReAudit = 2
SeasonAuditing = 3
SeasonAlreadyPassed = 7
EPToAudit = 1
EPAuditing = 2
EPPassed = 3
EPNotPass = 4
_CMSValid = 1
_NotDeleted = 0
)
const (
_modifiedSeason = "SELECT id,origin_name,title,alias,category,`desc`,style,area,play_time,info,state,total_num,upinfo,staff,role,copyright,`check`,is_deleted,cover,producer,version,status" +
" FROM tv_ep_season WHERE `check` = ? AND audit_time < UNIX_TIMESTAMP(now()) AND is_deleted = 0 LIMIT 0,"
_snEmpty = "SELECT id FROM tv_content WHERE season_id = ? AND is_deleted = 0 LIMIT 1"
_auditingSeason = "UPDATE tv_ep_season SET `check` = ? WHERE is_deleted = 0 AND id = ?"
_delaySeason = "UPDATE tv_ep_season SET audit_time = ? WHERE id = ?"
)
// SnEmpty determines whether the
func (d *Dao) SnEmpty(c context.Context, sid int64) (res bool, err error) {
var epid int
if err = d.DB.QueryRow(c, _snEmpty, sid).Scan(&epid); err != nil {
if err == sql.ErrNoRows {
err = nil
res = true
return
}
log.Error("d.SnEmpty Error (%v)", err)
return
}
res = false
return
}
// ModSeason picks the modified season data to sync
func (d *Dao) ModSeason(c context.Context) (res []*model.TVEpSeason, err error) {
var (
rows *sql.Rows
isEmpty bool
)
if rows, err = d.DB.Query(c, _modifiedSeason+fmt.Sprintf("%d", d.conf.Sync.LConf.NbSeason), SeasonToReAudit); err != nil {
log.Error("d._modifiedSeason.Query error(%v)", err)
return
}
defer rows.Close()
for rows.Next() {
var r = &model.TVEpSeason{}
if err = rows.Scan(&r.ID, &r.OriginName, &r.Title, &r.Alias, &r.Category, &r.Desc, &r.Style, &r.Area, &r.PlayTime, &r.Info,
&r.State, &r.TotalNum, &r.Upinfo, &r.Staff, &r.Role, &r.Copyright, &r.Check, &r.IsDeleted, &r.Cover,
&r.Producer, &r.Version, &r.Status); err != nil {
log.Error("modSeason row.Scan() error(%v)", err)
return
}
if isEmpty, err = d.SnEmpty(c, r.ID); err != nil {
log.Error("modSeason SnEmpty Error (%v)", err)
return
}
if !isEmpty { // we don't submit empty season to audit
res = append(res, r)
} else {
d.DelaySeason(c, r.ID)
}
}
if err = rows.Err(); err != nil {
log.Error("d.ModSeason.Query error(%v)", err)
}
return
}
// AuditSeason updates the indicated season to Auditing Status
func (d *Dao) AuditSeason(c context.Context, sid int) (nbRows int64, err error) {
var res dsql.Result
if res, err = d.DB.Exec(c, _auditingSeason, SeasonAuditing, sid); err != nil {
log.Error("_auditingSeason, failed to update to auditing: (%v), Error: %v", sid, err)
return
}
return res.RowsAffected()
}
// DelaySeason postpones the season to sync in 30 minutes
func (d *Dao) DelaySeason(c context.Context, sid int64) (nbRows int64, err error) {
var (
res dsql.Result
delay = time.Now().Unix() + d.conf.Sync.Frequency.AuditDelay
)
if res, err = d.DB.Exec(c, _delaySeason, delay, sid); err != nil {
log.Error("_delaySeason, failed to delay: (%v,%v), Error: %v", err)
return
}
return res.RowsAffected()
}

View File

@@ -0,0 +1,71 @@
package app
import (
"context"
"fmt"
"testing"
"github.com/smartystreets/goconvey/convey"
)
func TestAppSnEmpty(t *testing.T) {
var (
c = context.Background()
sid = int64(12373)
)
convey.Convey("SnEmpty", t, func(ctx convey.C) {
res, err := d.SnEmpty(c, sid)
ctx.Convey("Then err should be nil.res should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
ctx.So(res, convey.ShouldNotBeNil)
})
})
}
func TestAppModSeason(t *testing.T) {
var c = context.Background()
convey.Convey("ModSeason", t, func(ctx convey.C) {
res, err := d.ModSeason(c)
if len(res) == 0 && err == nil {
sid, errPick := pickEpSid()
if errPick != nil {
return
}
fmt.Println(sid)
d.DB.Exec(c, "update tv_ep_season set `check` = 2,audit_time=0 where id = ?", sid)
res, err = d.ModSeason(c)
}
ctx.Convey("Then err should be nil.res should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
ctx.So(res, convey.ShouldNotBeNil)
})
})
}
func TestAppAuditSeason(t *testing.T) {
var (
c = context.Background()
sid = int(12373)
)
convey.Convey("AuditSeason", t, func(ctx convey.C) {
nbRows, err := d.AuditSeason(c, sid)
ctx.Convey("Then err should be nil.nbRows should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
ctx.So(nbRows, convey.ShouldNotBeNil)
})
})
}
func TestAppDelaySeason(t *testing.T) {
var (
c = context.Background()
sid = int64(12373)
)
convey.Convey("DelaySeason", t, func(ctx convey.C) {
nbRows, err := d.DelaySeason(c, sid)
ctx.Convey("Then err should be nil.nbRows should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
ctx.So(nbRows, convey.ShouldNotBeNil)
})
})
}

View File

@@ -0,0 +1,162 @@
package app
import (
"context"
"fmt"
model "go-common/app/job/main/tv/model/pgc"
"go-common/library/cache/memcache"
"go-common/library/database/sql"
"go-common/library/log"
)
const (
_seasonKey = "sn_%d"
_epKey = "ep_%d"
_countEP = "SELECT COUNT(1) AS cnt FROM tv_content"
_countSeason = "SELECT COUNT(1) AS cnt FROM tv_ep_season"
_pickEPMC = "SELECT is_deleted, state,valid, season_id, epid,id, mark, cover, title, subtitle, pay_status FROM tv_content " +
"WHERE id > ? ORDER BY id LIMIT 0,"
_pickSeasonMC = "SELECT is_deleted, `check`,valid, id, cover, `desc`, title, upinfo, category, area, play_time, role, staff, total_num, style, alias, origin_name, status FROM tv_ep_season " +
"WHERE id > ? ORDER BY id LIMIT 0,"
_singleSn = "SELECT id, cover, `desc`, title, upinfo, category, area, play_time, role, staff, total_num, style, alias, origin_name, status FROM tv_ep_season WHERE id = ?"
)
// EpCacheKey is used to generate the key of ep
func EpCacheKey(epid int) string {
return fmt.Sprintf(_epKey, epid)
}
// SeasonCacheKey is used to generate the key of season
func SeasonCacheKey(sid int) string {
return fmt.Sprintf(_seasonKey, sid)
}
// SetEP in MC
func (d *Dao) SetEP(ctx context.Context, res *model.SimpleEP) (err error) {
var (
key = EpCacheKey(res.EPID)
conn = d.mc.Get(ctx)
)
defer conn.Close()
if err = conn.Set(&memcache.Item{Key: key, Object: res, Expiration: d.mcExpire, Flags: memcache.FlagJSON}); err != nil {
log.Error("conn.Set(%s,%v) error(%v)", key, res, err)
}
return
}
// SetSeason in MC
func (d *Dao) SetSeason(ctx context.Context, res *model.SimpleSeason) (err error) {
var (
key = SeasonCacheKey(int(res.ID))
conn = d.mc.Get(ctx)
)
defer conn.Close()
if err = conn.Set(&memcache.Item{Key: key, Object: res, Expiration: d.mcExpire, Flags: memcache.FlagJSON}); err != nil {
log.Error("conn.Set(%s,%v) error(%v)", key, res, err)
}
return
}
// CountEP counts number of ep in DB
func (d *Dao) CountEP(ctx context.Context) (count int, err error) {
row := d.DB.QueryRow(ctx, _countEP)
err = row.Scan(&count)
return
}
// CountSeason counts number of ep in DB
func (d *Dao) CountSeason(ctx context.Context) (count int, err error) {
row := d.DB.QueryRow(ctx, _countSeason)
err = row.Scan(&count)
return
}
// RefreshEPMC picks data by Piece to sync in MC
func (d *Dao) RefreshEPMC(ctx context.Context, LastID int, nbData int) (myLast int, err error) {
var rows *sql.Rows
if rows, err = d.DB.Query(ctx, _pickEPMC+fmt.Sprintf("%d", nbData), LastID); err != nil {
log.Error("d._pickEPMC.Query: %s error(%v)", _pickEPMC+fmt.Sprintf("%d", nbData), err)
return
}
defer rows.Close()
for rows.Next() {
var (
r = &model.SimpleEP{}
rMeta = &model.EpCMS{}
)
if err = rows.Scan(&r.IsDeleted, &r.State, &r.Valid, &r.SeasonID, &r.EPID, &r.ID,
&r.NoMark, &rMeta.Cover, &rMeta.Title, &rMeta.Subtitle, &rMeta.PayStatus); err != nil {
log.Error("RefreshEPMC row.Scan() error(%v)", err)
return
}
rMeta.EPID = r.EPID
myLast = int(r.ID)
if err = d.SetEP(ctx, r); err != nil {
log.Warn("RefreshEPMC Auth Set EPID (%d), error (%v)", r.EPID, err)
}
if err = d.SetEpCMSCache(ctx, rMeta); err != nil {
log.Warn("RefreshEPMC Meta Set EPID (%d), error (%v)", r.EPID, err)
}
}
if err = rows.Err(); err != nil {
log.Error("d.RefreshEpMC.Query error(%v)", err)
}
return
}
// RefreshSnMC picks data by Piece to sync into MC
func (d *Dao) RefreshSnMC(ctx context.Context, LastID int, nbData int) (myLast int, err error) {
var rows *sql.Rows
if rows, err = d.DB.Query(ctx, _pickSeasonMC+fmt.Sprintf("%d", nbData), LastID); err != nil {
log.Error("d._pickSeasonMC.Query: %s error(%v)", _pickSeasonMC+fmt.Sprintf("%d", nbData), err)
return
}
defer rows.Close()
for rows.Next() {
var (
auth = &model.SimpleSeason{}
media = &model.SeasonCMS{}
)
// SELECT is_deleted, `check`,valid, id, cover, `desc`, title, upinfo, category, area, play_time, role, staff, total_num, style FROM tv_ep_season
if err = rows.Scan(&auth.IsDeleted, &auth.Check, &auth.Valid, &auth.ID, &media.Cover,
&media.Desc, &media.Title, &media.UpInfo, &media.Category, &media.Area, &media.Playtime,
&media.Role, &media.Staff, &media.TotalNum, &media.Style, &media.Alias, &media.OriginName,
&media.PayStatus); err != nil { // refresh cache sn logic
log.Error("RefreshSnMC row.Scan() error(%v)", err)
return
}
media.SeasonID = int(auth.ID) // season_id
media.NewestEPID, media.NewestOrder, _ = d.NewestOrder(ctx, auth.ID) // newest info
myLast = int(auth.ID)
if err = d.SetSeason(ctx, auth); err != nil {
log.Warn("RefreshSnMC Auth Set Sid (%d), error (%v)", auth.ID, err)
}
if err = d.SetSnCMSCache(ctx, media); err != nil {
log.Warn("RefreshSnMC Meta Set Sid (%d), error (%v)", auth.ID, err)
}
}
if err = rows.Err(); err != nil {
log.Error("d.RefreshSnMC.Query error(%v)", err)
}
return
}
// PickSeason picks one season CMS struct
func (d *Dao) PickSeason(ctx context.Context, sid int) (media *model.SeasonCMS, err error) {
media = &model.SeasonCMS{}
if err = d.DB.QueryRow(ctx, _singleSn, sid).Scan(&media.SeasonID, &media.Cover,
&media.Desc, &media.Title, &media.UpInfo, &media.Category, &media.Area, &media.Playtime,
&media.Role, &media.Staff, &media.TotalNum, &media.Style, &media.Alias, &media.OriginName,
&media.PayStatus); err != nil { // databus ep logic, with sn
log.Error("d.PickSeason Sid %d Error %v", sid, err)
if err == sql.ErrNoRows {
err = nil
media = nil
return
}
return
}
media.NewestEPID, media.NewestOrder, _ = d.NewestOrder(ctx, int64(media.SeasonID)) // newest info
return
}

View File

@@ -0,0 +1,130 @@
package app
import (
"context"
model "go-common/app/job/main/tv/model/pgc"
"testing"
"github.com/smartystreets/goconvey/convey"
)
func TestAppEpCacheKey(t *testing.T) {
var (
epid = int(0)
)
convey.Convey("EpCacheKey", t, func(ctx convey.C) {
p1 := EpCacheKey(epid)
ctx.Convey("Then p1 should not be nil.", func(ctx convey.C) {
ctx.So(p1, convey.ShouldNotBeNil)
})
})
}
func TestAppSeasonCacheKey(t *testing.T) {
var (
sid = int(0)
)
convey.Convey("SeasonCacheKey", t, func(ctx convey.C) {
p1 := SeasonCacheKey(sid)
ctx.Convey("Then p1 should not be nil.", func(ctx convey.C) {
ctx.So(p1, convey.ShouldNotBeNil)
})
})
}
func TestAppSetEP(t *testing.T) {
var (
ctx = context.Background()
res = &model.SimpleEP{}
)
convey.Convey("SetEP", t, func(cx convey.C) {
err := d.SetEP(ctx, res)
cx.Convey("Then err should be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
})
})
}
func TestAppSetSeason(t *testing.T) {
var (
ctx = context.Background()
res = &model.SimpleSeason{}
)
convey.Convey("SetSeason", t, func(cx convey.C) {
err := d.SetSeason(ctx, res)
cx.Convey("Then err should be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
})
})
}
func TestAppCountEP(t *testing.T) {
var (
ctx = context.Background()
)
convey.Convey("CountEP", t, func(cx convey.C) {
count, err := d.CountEP(ctx)
cx.Convey("Then err should be nil.count should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
ctx.So(count, convey.ShouldNotBeNil)
})
})
}
func TestAppCountSeason(t *testing.T) {
var (
ctx = context.Background()
)
convey.Convey("CountSeason", t, func(cx convey.C) {
count, err := d.CountSeason(ctx)
cx.Convey("Then err should be nil.count should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
ctx.So(count, convey.ShouldNotBeNil)
})
})
}
func TestAppRefreshEPMC(t *testing.T) {
var (
ctx = context.Background()
LastID = int(0)
nbData = int(0)
)
convey.Convey("RefreshEPMC", t, func(cx convey.C) {
myLast, err := d.RefreshEPMC(ctx, LastID, nbData)
cx.Convey("Then err should be nil.myLast should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
ctx.So(myLast, convey.ShouldNotBeNil)
})
})
}
func TestAppRefreshSnMC(t *testing.T) {
var (
ctx = context.Background()
LastID = int(0)
nbData = int(0)
)
convey.Convey("RefreshSnMC", t, func(cx convey.C) {
myLast, err := d.RefreshSnMC(ctx, LastID, nbData)
cx.Convey("Then err should be nil.myLast should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
ctx.So(myLast, convey.ShouldNotBeNil)
})
})
}
func TestAppPickSeason(t *testing.T) {
var ctx = context.Background()
convey.Convey("PickSeason", t, func(cx convey.C) {
sid, errPick := pickEpSid()
if errPick != nil {
return
}
media, err := d.PickSeason(ctx, int(sid))
cx.Convey("Then err should be nil.media should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
ctx.So(media, convey.ShouldNotBeNil)
})
})
}

View File

@@ -0,0 +1,58 @@
package app
import (
"context"
"encoding/json"
"go-common/app/job/main/tv/model/common"
"go-common/library/cache/memcache"
"go-common/library/log"
)
// SetRetry save retry model to memcache
func (d *Dao) SetRetry(c context.Context, retry *common.SyncRetry) (err error) {
var conn = d.mc.Get(c)
defer conn.Close()
if err = conn.Set(&memcache.Item{Key: retry.MCKey(), Object: retry, Flags: memcache.FlagJSON, Expiration: d.mcMediaExpire}); err != nil {
log.Error("conn.Set error(%v)", err)
return
}
return
}
// GetRetry gets retry times
func (d *Dao) GetRetry(c context.Context, req *common.SyncRetry) (times int, err error) {
var (
conn memcache.Conn
item *memcache.Item
)
conn = d.mc.Get(c)
defer conn.Close()
if item, err = conn.Get(req.MCKey()); err != nil {
if err == memcache.ErrNotFound {
err = nil
return // 0
}
log.Error("GetRetry Req %s, Err %v", req.MCKey(), err)
return
}
if err = json.Unmarshal(item.Value, &req); err != nil {
log.Error("GetRetry Req %s, Json Err %v", req.MCKey(), err)
return
}
times = req.Retry
return
}
// DelRetry deletes the cache from MC
func (d *Dao) DelRetry(c context.Context, req *common.SyncRetry) (err error) {
var (
key = req.MCKey()
conn = d.mc.Get(c)
)
defer conn.Close()
if err = conn.Delete(key); err != nil {
log.Error("conn.Set error(%v)", err)
}
return
}

View File

@@ -0,0 +1,49 @@
package app
import (
"context"
"go-common/app/job/main/tv/model/common"
"testing"
"github.com/smartystreets/goconvey/convey"
)
func TestAppSetRetry(t *testing.T) {
var (
c = context.Background()
retry = &common.SyncRetry{}
)
convey.Convey("SetRetry", t, func(ctx convey.C) {
err := d.SetRetry(c, retry)
ctx.Convey("Then err should be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
})
})
}
func TestAppGetRetry(t *testing.T) {
var (
c = context.Background()
req = &common.SyncRetry{}
)
convey.Convey("GetRetry", t, func(ctx convey.C) {
times, err := d.GetRetry(c, req)
ctx.Convey("Then err should be nil.times should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
ctx.So(times, convey.ShouldNotBeNil)
})
})
}
func TestAppDelRetry(t *testing.T) {
var (
c = context.Background()
req = &common.SyncRetry{}
)
convey.Convey("DelRetry", t, func(ctx convey.C) {
err := d.DelRetry(c, req)
ctx.Convey("Then err should be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
})
})
}

View File

@@ -0,0 +1,36 @@
package app
import (
"context"
"go-common/app/job/main/tv/model/common"
"go-common/library/database/sql"
"go-common/library/log"
)
const (
_passedSn = "SELECT id,ctime FROM tv_ep_season WHERE `check` = ? AND valid = ? AND category = ? AND is_deleted = 0"
)
// PassedSn picks the passed seasons data to sync to search
func (d *Dao) PassedSn(c context.Context, category int) (res []*common.IdxRank, err error) {
var (
rows *sql.Rows
)
if rows, err = d.DB.Query(c, _passedSn, SeasonPassed, _CMSValid, category); err != nil {
log.Error("d.PassedSn.Query error(%v)", err)
return
}
defer rows.Close()
for rows.Next() {
var r = &common.IdxRank{}
if err = rows.Scan(&r.ID, &r.Ctime); err != nil {
log.Error("PassedSn row.Scan() error(%v)", err)
return
}
res = append(res, r)
}
if err = rows.Err(); err != nil {
log.Error("d.PassedSn.Query error(%v)", err)
}
return
}

View File

@@ -0,0 +1,22 @@
package app
import (
"context"
"testing"
"github.com/smartystreets/goconvey/convey"
)
func TestAppPassedSn(t *testing.T) {
var (
c = context.Background()
category = int(1)
)
convey.Convey("PassedSn", t, func(ctx convey.C) {
res, err := d.PassedSn(c, category)
ctx.Convey("Then err should be nil.res should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
ctx.So(res, convey.ShouldNotBeNil)
})
})
}

View File

@@ -0,0 +1,53 @@
package(default_visibility = ["//visibility:public"])
load(
"@io_bazel_rules_go//go:def.bzl",
"go_library",
"go_test",
)
go_library(
name = "go_default_library",
srcs = [
"archive.go",
"dao.go",
],
importpath = "go-common/app/job/main/tv/dao/archive",
tags = ["automanaged"],
visibility = ["//visibility:public"],
deps = [
"//app/job/main/tv/conf:go_default_library",
"//app/service/main/archive/api:go_default_library",
"//library/cache/memcache:go_default_library",
"//vendor/github.com/pkg/errors:go_default_library",
],
)
filegroup(
name = "package-srcs",
srcs = glob(["**"]),
tags = ["automanaged"],
visibility = ["//visibility:private"],
)
filegroup(
name = "all-srcs",
srcs = [":package-srcs"],
tags = ["automanaged"],
visibility = ["//visibility:public"],
)
go_test(
name = "go_default_test",
srcs = [
"archive_test.go",
"dao_test.go",
],
embed = [":go_default_library"],
tags = ["automanaged"],
deps = [
"//app/job/main/tv/conf:go_default_library",
"//app/service/main/archive/api:go_default_library",
"//vendor/github.com/smartystreets/goconvey/convey:go_default_library",
],
)

View File

@@ -0,0 +1,46 @@
package archive
import (
"context"
"strconv"
arccli "go-common/app/service/main/archive/api"
"go-common/library/cache/memcache"
"github.com/pkg/errors"
)
const (
_prefixArc = "a3p_"
_prefixView = "avp_"
)
func keyArc(aid int64) string {
return _prefixArc + strconv.FormatInt(aid, 10)
}
func keyView(aid int64) string {
return _prefixView + strconv.FormatInt(aid, 10)
}
// UpArcCache update archive cache
func (d *Dao) UpArcCache(c context.Context, a *arccli.Arc) (err error) {
conn := d.mc.Get(c)
item := &memcache.Item{Key: keyArc(a.Aid), Object: a, Flags: memcache.FlagJSON, Expiration: 0}
if err = conn.Set(item); err != nil {
err = errors.Wrapf(err, "conn.Set(%v)", item)
}
conn.Close()
return
}
// UpViewCache up all app cache .
func (d *Dao) UpViewCache(c context.Context, v *arccli.ViewReply) (err error) {
conn := d.mc.Get(c)
item := &memcache.Item{Key: keyView(v.Arc.Aid), Object: v, Flags: memcache.FlagJSON, Expiration: 0}
if err = conn.Set(item); err != nil {
err = errors.Wrapf(err, "conn.Set(%v)", item)
}
conn.Close()
return
}

View File

@@ -0,0 +1,64 @@
package archive
import (
"context"
"testing"
arccli "go-common/app/service/main/archive/api"
"github.com/smartystreets/goconvey/convey"
)
func TestArchivekeyArc(t *testing.T) {
var (
aid = int64(0)
)
convey.Convey("keyArc", t, func(ctx convey.C) {
p1 := keyArc(aid)
ctx.Convey("Then p1 should not be nil.", func(ctx convey.C) {
ctx.So(p1, convey.ShouldNotBeNil)
})
})
}
func TestArchivekeyView(t *testing.T) {
var (
aid = int64(0)
)
convey.Convey("keyView", t, func(ctx convey.C) {
p1 := keyView(aid)
ctx.Convey("Then p1 should not be nil.", func(ctx convey.C) {
ctx.So(p1, convey.ShouldNotBeNil)
})
})
}
func TestArchiveUpArcCache(t *testing.T) {
var (
c = context.Background()
a = &arccli.Arc{}
)
convey.Convey("UpArcCache", t, func(ctx convey.C) {
err := d.UpArcCache(c, a)
ctx.Convey("Then err should be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
})
})
}
func TestArchiveUpViewCache(t *testing.T) {
var (
c = context.Background()
v = &arccli.ViewReply{
Arc: &arccli.Arc{
Aid: 123,
},
}
)
convey.Convey("UpViewCache", t, func(ctx convey.C) {
err := d.UpViewCache(c, v)
ctx.Convey("Then err should be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
})
})
}

View File

@@ -0,0 +1,25 @@
package archive
import (
"time"
"go-common/app/job/main/tv/conf"
"go-common/library/cache/memcache"
)
// Dao is archive dao.
type Dao struct {
// memcache
mc *memcache.Pool
expireView int32
}
// New new a archive dao.
func New(c *conf.Config) (d *Dao) {
d = &Dao{
// memcache
mc: memcache.NewPool(c.Memcache.Config),
expireView: int32(time.Duration(c.Memcache.ExpireMedia) / time.Second),
}
return
}

View File

@@ -0,0 +1,34 @@
package archive
import (
"flag"
"go-common/app/job/main/tv/conf"
"os"
"testing"
)
var (
d *Dao
)
func TestMain(m *testing.M) {
if os.Getenv("DEPLOY_ENV") != "" {
flag.Set("app_id", "main.web-svr.tv-job")
flag.Set("conf_token", "ab3e9801a77c076b997de0ac5cb21775")
flag.Set("tree_id", "15260")
flag.Set("conf_version", "docker-1")
flag.Set("deploy_env", "uat")
flag.Set("conf_host", "config.bilibili.co")
flag.Set("conf_path", "/tmp")
flag.Set("region", "sh")
flag.Set("zone", "sh001")
} else {
flag.Set("conf", "../../cmd/tv-job-test.toml")
}
flag.Parse()
if err := conf.Init(); err != nil {
panic(err)
}
d = New(conf.Conf)
os.Exit(m.Run())
}

View File

@@ -0,0 +1,58 @@
package(default_visibility = ["//visibility:public"])
load(
"@io_bazel_rules_go//go:def.bzl",
"go_test",
"go_library",
)
go_test(
name = "go_default_test",
srcs = [
"dao_test.go",
"merak_test.go",
"shelve_test.go",
],
embed = [":go_default_library"],
tags = ["automanaged"],
deps = [
"//app/job/main/tv/conf:go_default_library",
"//library/database/sql:go_default_library",
"//vendor/github.com/smartystreets/goconvey/convey:go_default_library",
"//vendor/gopkg.in/h2non/gock.v1:go_default_library",
],
)
go_library(
name = "go_default_library",
srcs = [
"dao.go",
"merak.go",
"shelve.go",
],
importpath = "go-common/app/job/main/tv/dao/cms",
tags = ["automanaged"],
visibility = ["//visibility:public"],
deps = [
"//app/job/main/tv/conf:go_default_library",
"//library/database/sql:go_default_library",
"//library/log:go_default_library",
"//library/net/http/blademaster:go_default_library",
"//library/xstr:go_default_library",
"//vendor/github.com/pkg/errors:go_default_library",
],
)
filegroup(
name = "package-srcs",
srcs = glob(["**"]),
tags = ["automanaged"],
visibility = ["//visibility:private"],
)
filegroup(
name = "all-srcs",
srcs = [":package-srcs"],
tags = ["automanaged"],
visibility = ["//visibility:public"],
)

View File

@@ -0,0 +1,24 @@
package cms
import (
"go-common/app/job/main/tv/conf"
"go-common/library/database/sql"
httpx "go-common/library/net/http/blademaster"
)
// Dao dao.
type Dao struct {
conf *conf.Config
DB *sql.DB
client *httpx.Client
}
// New create a instance of Dao and return.
func New(c *conf.Config) (d *Dao) {
d = &Dao{
conf: c,
DB: sql.NewMySQL(c.Mysql),
client: httpx.NewClient(conf.Conf.HTTPClient),
}
return
}

View File

@@ -0,0 +1,37 @@
package cms
import (
"flag"
"os"
"testing"
"go-common/app/job/main/tv/conf"
"gopkg.in/h2non/gock.v1"
)
var d *Dao
func TestMain(m *testing.M) {
if os.Getenv("DEPLOY_ENV") != "" {
flag.Set("app_id", "main.web-svr.tv-job")
flag.Set("conf_token", "ab3e9801a77c076b997de0ac5cb21775")
flag.Set("tree_id", "15260")
flag.Set("conf_version", "docker-1")
flag.Set("deploy_env", "uat")
flag.Set("conf_host", "config.bilibili.co")
flag.Set("conf_path", "/tmp")
flag.Set("region", "sh")
flag.Set("zone", "sh001")
} else {
flag.Set("conf", "../../cmd/tv-job-test.toml")
}
flag.Parse()
if err := conf.Init(); err != nil {
panic(err)
}
d = New(conf.Conf)
d.client.SetTransport(gock.DefaultTransport)
m.Run()
os.Exit(0)
}

View File

@@ -0,0 +1,84 @@
package cms
import (
"bytes"
"context"
"crypto/sha1"
"encoding/hex"
"encoding/json"
"net/http"
"sort"
"strings"
"go-common/library/log"
"github.com/pkg/errors"
)
// MerakNotify send notify
func (d *Dao) MerakNotify(ctx context.Context, title, content string) (err error) {
var (
cfg = d.conf.Cfg.Merak
sign string
req *http.Request
body []byte
)
params := map[string]string{
"Action": "CreateWechatMessage",
"PublicKey": cfg.Key,
"UserName": strings.Join(cfg.Names, ","),
"Title": title,
"Content": content,
"TreeId": "",
}
if sign, err = MerakSign(params, cfg.Secret); err != nil {
log.Error("MerakNotify Failed to sign params: %+v: %+v", params, err)
return err
}
params["Signature"] = sign
if body, err = json.Marshal(params); err != nil {
log.Error("MerakNotify Json %v, Err %v", params, err)
return
}
if req, err = http.NewRequest(http.MethodPost, cfg.Host, bytes.NewReader(body)); err != nil {
log.Error("MerakNotify NewRequest Err %v, Host %v", err, cfg.Host)
return
}
req.Header.Set("Content-Type", "application/json; charset=utf-8")
res := struct {
Action string `json:"Action"`
RetCode int `json:"RetCode"`
Data []string `json:"Data"`
}{}
if err = d.client.Do(ctx, req, &res); err != nil {
return
}
if res.RetCode != 0 {
err = errors.Errorf("Merak error: %d", res.RetCode)
log.Error("Failed to send notify by merak with params: %+v: %+v", string(body), err)
return
}
return
}
// MerakSign is used to sign for merak wechat msg
func MerakSign(params map[string]string, secret string) (string, error) {
keys := make([]string, 0, len(params))
for k := range params {
keys = append(keys, k)
}
sort.Strings(keys)
buf := bytes.Buffer{}
for _, k := range keys {
buf.WriteString(k + params[k])
}
h := sha1.New()
if _, err := h.Write(buf.Bytes()); err != nil {
return "", errors.WithStack(err)
}
if _, err := h.Write([]byte(secret)); err != nil {
return "", errors.WithStack(err)
}
sum := h.Sum(nil)
return hex.EncodeToString(sum), nil
}

View File

@@ -0,0 +1,50 @@
package cms
import (
"context"
"fmt"
"strings"
"testing"
"github.com/smartystreets/goconvey/convey"
)
func TestDaoSendWechat(t *testing.T) {
convey.Convey("SendWechat", t, func(ctx convey.C) {
var (
content = "测试内容"
title = "测试标题"
)
ctx.Convey("When everything goes positive", func(ctx convey.C) {
err := d.MerakNotify(context.Background(), content, title)
fmt.Println(err)
ctx.Convey("Then err should be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
})
})
})
}
func TestDaoGetMerakSign(t *testing.T) {
convey.Convey("sign", t, func(ctx convey.C) {
var (
params = map[string]string{
"Action": "CreateWechatMessage",
"PublicKey": "1",
"UserName": strings.Join([]string{"user1", "user2"}, ","),
"Title": "测试标题",
"Content": "测试内容",
"TreeId": "",
}
)
ctx.Convey("When everything goes positive", func(ctx convey.C) {
p1, err := MerakSign(params, "secret")
ctx.Convey("Then err should be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
})
ctx.Convey("Then p1 should not be nil.", func(ctx convey.C) {
ctx.So(p1, convey.ShouldEqual, "59cd4e74b225a7d326ee7d6c89bf27cf2f6015dc")
})
})
})
}

View File

@@ -0,0 +1,117 @@
package cms
import (
"context"
"fmt"
"go-common/library/database/sql"
"go-common/library/log"
"go-common/library/xstr"
)
const (
_validSns = "SELECT DISTINCT a.id FROM tv_ep_season a LEFT JOIN tv_content b ON a.id = b.season_id " +
"WHERE a.is_deleted = 0 AND a.`check` = ? AND b.is_deleted = 0 AND b.valid = ? AND b.state = ?"
_allPassedSns = "SELECT id, valid FROM tv_ep_season WHERE is_deleted = 0 AND `check` = 1"
_actSns = "UPDATE tv_ep_season SET valid = ? WHERE id IN (%s)"
_offArcs = "SELECT aid FROM ugc_archive WHERE aid IN (%s) AND valid = 0 AND deleted = 0 AND result = 1 "
_reshelfArcs = "UPDATE ugc_archive SET valid = 1 WHERE aid IN (%s)"
_cmsOnline = 1
_cmsOffline = 0
_epPassed = 3
)
// ValidSns gets all the seasons that should be on the shelves, which includes free and audited episodes.
func (d *Dao) ValidSns(ctx context.Context, onlyfree bool) (res map[int64]int, err error) {
var (
rows *sql.Rows
validSql = _validSns
)
res = make(map[int64]int)
if onlyfree {
validSql = validSql + " AND b.pay_status = 2" // free episode
}
if rows, err = d.DB.Query(ctx, validSql, _cmsOnline, _cmsOnline, _epPassed); err != nil {
log.Error("d.ValidSns.Query: %s error(%v)", validSql, err)
return
}
defer rows.Close()
for rows.Next() {
var sid int64
if err = rows.Scan(&sid); err != nil {
log.Error("ValidSns row.Scan() error(%v)", err)
return
}
res[sid] = 1
}
if err = rows.Err(); err != nil {
log.Error("d.PgcCont.Query error(%v)", err)
}
return
}
// ShelveOp gets the status of all audited seasons on and off shelves, and compare the results with the "ValidSns" method above to determine which episodes need to be on or off shelves.
func (d *Dao) ShelveOp(ctx context.Context, validSns map[int64]int) (onIDs, offIDs []int64, err error) {
var rows *sql.Rows
if rows, err = d.DB.Query(ctx, _allPassedSns); err != nil {
log.Error("d.ShelveOp.Query: %s error(%v)", _allPassedSns, err)
return
}
defer rows.Close()
for rows.Next() {
var sid, valid int64
if err = rows.Scan(&sid, &valid); err != nil {
log.Error("ValidSns row.Scan() error(%v)", err)
return
}
_, ok := validSns[sid]
if ok && valid == 0 {
onIDs = append(onIDs, sid)
}
if !ok && valid == 1 {
offIDs = append(offIDs, sid)
}
}
if err = rows.Err(); err != nil {
log.Error("d.PgcCont.Query error(%v)", err)
}
return
}
// ActOps carries out the action on the season which need to be on/off shelves
func (d *Dao) ActOps(ctx context.Context, ids []int64, on bool) (err error) {
var action int
if on {
action = _cmsOnline
} else {
action = _cmsOffline
}
if _, err = d.DB.Exec(ctx, fmt.Sprintf(_actSns, xstr.JoinInts(ids)), action); err != nil {
log.Error("ActOps, Ids %v, Err %v", ids, err)
}
return
}
// OffArcs takes the archives that passed but cms invalid archives
func (d *Dao) OffArcs(ctx context.Context, aids []int64) (offAids []int64, err error) {
var rows *sql.Rows
if rows, err = d.DB.Query(ctx, fmt.Sprintf(_offArcs, xstr.JoinInts(aids))); err != nil {
return
}
defer rows.Close()
for rows.Next() {
var aid int64
if err = rows.Scan(&aid); err != nil {
return
}
offAids = append(offAids, aid)
}
err = rows.Err()
return
}
// ReshelfArcs re-put the arcs onshelf ( CMS valid )
func (d *Dao) ReshelfArcs(ctx context.Context, aids []int64) (err error) {
_, err = d.DB.Exec(ctx, fmt.Sprintf(_reshelfArcs, xstr.JoinInts(aids)))
return
}

View File

@@ -0,0 +1,117 @@
package cms
import (
"context"
"fmt"
"testing"
"go-common/library/database/sql"
"github.com/smartystreets/goconvey/convey"
)
func TestCmsValidSns(t *testing.T) {
var (
ctx = context.Background()
)
convey.Convey("ValidSns", t, func(cx convey.C) {
res, err := d.ValidSns(ctx, false)
cx.Convey("Consider Audited, Then err should be nil.res should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
ctx.So(res, convey.ShouldNotBeNil)
fmt.Println(len(res))
})
res, err = d.ValidSns(ctx, true)
cx.Convey("Consider Audited and Free, Then err should be nil.res should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
ctx.So(res, convey.ShouldNotBeNil)
fmt.Println(len(res))
})
})
}
func TestCmsShelveOp(t *testing.T) {
var (
ctx = context.Background()
validSns, _ = d.ValidSns(ctx, true)
)
convey.Convey("ShelveOp", t, func(cx convey.C) {
onIDs, offIDs, err := d.ShelveOp(ctx, validSns)
cx.Convey("Then err should be nil.onIDs,offIDs should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
ctx.So(offIDs, convey.ShouldNotBeNil)
ctx.So(onIDs, convey.ShouldNotBeNil)
fmt.Println(offIDs)
fmt.Println(onIDs)
})
})
}
func TestCmsActOps(t *testing.T) {
var (
ctx = context.Background()
sid int64
)
d.DB.QueryRow(ctx, "SELECT id FROM tv_ep_season WHERE valid = 1 and is_deleted = 0 AND `check` = 1 LIMIT 1").Scan(&sid)
convey.Convey("ActOps", t, func(cx convey.C) {
err := d.ActOps(ctx, []int64{sid}, false)
cx.Convey("Action 0 Then err should be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
})
err = d.ActOps(ctx, []int64{sid}, true)
cx.Convey("Action 1 Then err should be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
})
fmt.Println(sid)
})
}
func TestCmsOffArcs(t *testing.T) {
var (
ctx = context.Background()
aid int64
)
convey.Convey("OffArcs", t, func(cx convey.C) {
cx.Convey("Then err should be nil.offAids should not be nil.", func(cx convey.C) {
if err := d.DB.QueryRow(ctx, "select aid from ugc_archive where deleted = 0 and valid = 0 and result = 1 limit 1").Scan(&aid); err != nil {
offAids, err := d.OffArcs(context.Background(), []int64{1, 2, 3})
cx.So(err, convey.ShouldBeNil)
cx.So(offAids, convey.ShouldBeNil)
} else {
fmt.Println("Have Aid ", aid)
offAids, err := d.OffArcs(context.Background(), []int64{1, 2, 3, aid})
cx.So(err, convey.ShouldBeNil)
cx.So(offAids, convey.ShouldNotBeNil)
}
})
cx.Convey("Arg Error", func(cx convey.C) {
_, err := d.OffArcs(context.Background(), []int64{})
cx.So(err, convey.ShouldNotBeNil)
})
cx.Convey("DB Error", func(cx convey.C) {
d.DB.Close()
_, err := d.OffArcs(context.Background(), []int64{1, 2, 3})
cx.So(err, convey.ShouldNotBeNil)
d.DB = sql.NewMySQL(d.conf.Mysql)
})
})
}
func TestCmsReshelfArcs(t *testing.T) {
convey.Convey("OffArcs", t, func(cx convey.C) {
cx.Convey("Then err should be nil.offAids should not be nil.", func(cx convey.C) {
err := d.ReshelfArcs(context.Background(), []int64{1, 2, 3})
cx.So(err, convey.ShouldBeNil)
})
cx.Convey("Arg Error", func(cx convey.C) {
err := d.ReshelfArcs(context.Background(), []int64{})
cx.So(err, convey.ShouldNotBeNil)
})
cx.Convey("DB Error", func(cx convey.C) {
d.DB.Close()
err := d.ReshelfArcs(context.Background(), []int64{1, 2, 3})
cx.So(err, convey.ShouldNotBeNil)
d.DB = sql.NewMySQL(d.conf.Mysql)
})
})
}

View File

@@ -0,0 +1,51 @@
package(default_visibility = ["//visibility:public"])
load(
"@io_bazel_rules_go//go:def.bzl",
"go_library",
"go_test",
)
go_library(
name = "go_default_library",
srcs = [
"dao.go",
"ftp.go",
],
importpath = "go-common/app/job/main/tv/dao/ftp",
tags = ["automanaged"],
visibility = ["//visibility:public"],
deps = [
"//app/job/main/tv/conf:go_default_library",
"//library/log:go_default_library",
"//vendor/github.com/ftp-master:go_default_library",
],
)
filegroup(
name = "package-srcs",
srcs = glob(["**"]),
tags = ["automanaged"],
visibility = ["//visibility:private"],
)
filegroup(
name = "all-srcs",
srcs = [":package-srcs"],
tags = ["automanaged"],
visibility = ["//visibility:public"],
)
go_test(
name = "go_default_test",
srcs = [
"dao_test.go",
"ftp_test.go",
],
embed = [":go_default_library"],
tags = ["automanaged"],
deps = [
"//app/job/main/tv/conf:go_default_library",
"//vendor/github.com/smartystreets/goconvey/convey:go_default_library",
],
)

View File

@@ -0,0 +1,18 @@
package ftp
import (
"go-common/app/job/main/tv/conf"
)
// Dao dao.
type Dao struct {
conf *conf.Config
}
// New create a instance of Dao and return.
func New(c *conf.Config) (d *Dao) {
d = &Dao{
conf: c,
}
return
}

View File

@@ -0,0 +1,35 @@
package ftp
import (
"flag"
"os"
"testing"
"go-common/app/job/main/tv/conf"
)
var (
d *Dao
)
func TestMain(m *testing.M) {
if os.Getenv("DEPLOY_ENV") != "" {
flag.Set("app_id", "main.web-svr.tv-job")
flag.Set("conf_token", "ab3e9801a77c076b997de0ac5cb21775")
flag.Set("tree_id", "15260")
flag.Set("conf_version", "docker-1")
flag.Set("deploy_env", "uat")
flag.Set("conf_host", "config.bilibili.co")
flag.Set("conf_path", "/tmp")
flag.Set("region", "sh")
flag.Set("zone", "sh001")
} else {
flag.Set("conf", "../../cmd/tv-job-test.toml")
}
flag.Parse()
if err := conf.Init(); err != nil {
panic(err)
}
d = New(conf.Conf)
os.Exit(m.Run())
}

View File

@@ -0,0 +1,112 @@
package ftp
import (
"bytes"
"crypto/md5"
"encoding/hex"
"fmt"
"io"
"io/ioutil"
"os"
"time"
"go-common/library/log"
"github.com/ftp-master"
)
const (
_ftpRetry = 3
errFormat = "Func:[%s] - Step:[%s] - Error:[%v]"
_sleep = 100 * time.Millisecond
)
// Retry . retry one function until no error
func Retry(callback func() error, retry int, sleep time.Duration) (err error) {
for i := 0; i < retry; i++ {
if err = callback(); err == nil {
return
}
time.Sleep(sleep)
}
return
}
// FileMd5 calculates the local file's md5 and store it in a file
func (d *Dao) FileMd5(path string, md5Path string) (err error) {
var (
content []byte
)
if content, err = ioutil.ReadFile(path); err != nil {
log.Error(errFormat+" FilePath: %s", "fileMd5", "ReadFile", err, path)
return
}
md5hash := md5.New()
if _, err = io.Copy(md5hash, bytes.NewReader(content)); err != nil {
log.Error(errFormat, "fileMd5", "CopyContent", err)
return
}
md5 := md5hash.Sum(nil)
fMd5 := hex.EncodeToString(md5[:])
file, error := os.OpenFile(md5Path, os.O_RDWR|os.O_CREATE, 0766)
if error != nil {
log.Error(errFormat, "fileMd5", "OpenFile", err)
return
}
file.WriteString(fMd5)
file.Close()
return
}
// UploadFile the file to remote frp server and update the md5 file
func (d *Dao) UploadFile(localPath string, remotePath string, url string) (err error) {
var (
ftpInfo = d.conf.Search.FTP
c *ftp.ServerConn
content []byte // file's content
fileSize int64
)
// Dial
if c, err = ftp.DialTimeout(ftpInfo.Host, time.Duration(ftpInfo.Timeout)); err != nil {
log.Error(errFormat, "uploadFile", "DialTimeout", err)
return
}
// use EPSV or not
if !ftpInfo.UseEPSV {
c.DisableEPSV = true
}
// Login
if err = c.Login(ftpInfo.User, ftpInfo.Pass); err != nil {
log.Error(errFormat, "uploadFile", "Login", err)
return
}
// Change dir
if err = c.ChangeDir(url); err != nil {
log.Error(errFormat, "uploadFile", "ChangeDir", err)
return
}
// Upload the file
if content, err = ioutil.ReadFile(localPath); err != nil {
log.Error(errFormat, "uploadFile", "ReadFile", err)
return
}
data := bytes.NewBuffer(content)
if err = Retry(func() (err error) {
return c.Stor(remotePath, data)
}, _ftpRetry, _sleep); err != nil {
log.Error("upArchives Error %+v", err)
return
}
// Calculate the file size to check it's ok
if fileSize, err = c.FileSize(remotePath); err != nil {
log.Error(errFormat, "uploadFile", "FileSize", err)
return
}
if localSize := int64(len(content)); localSize != fileSize {
err = fmt.Errorf("LocalSize is %d, RemoteSize is %d", localSize, fileSize)
log.Error(errFormat, "uploadFile", "FileSize", err)
return
}
log.Info("File %s is uploaded successfully, size: %d", remotePath, fileSize)
return
}

View File

@@ -0,0 +1,78 @@
package ftp
import (
"fmt"
"os"
"testing"
"time"
"github.com/smartystreets/goconvey/convey"
)
func fileExist(path string) bool {
_, err := os.Stat(path)
if err != nil {
if os.IsNotExist(err) {
return false
}
}
return true
}
func createFile(path string) {
if !fileExist(path) {
// If the file doesn't exist, create it, or append to the file
f, err := os.OpenFile(path, os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644)
if err != nil {
fmt.Println(err)
}
_, err = f.Write([]byte("Hello"))
if err != nil {
fmt.Println(err)
}
f.Close()
}
}
func TestFtpRetry(t *testing.T) {
var (
callback func() error
retry = int(0)
sleep time.Duration
)
convey.Convey("Retry", t, func(ctx convey.C) {
err := Retry(callback, retry, sleep)
ctx.Convey("Then err should be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
})
})
}
func TestFtpFileMd5(t *testing.T) {
var (
path = "/tmp/testMd5.source"
md5Path = "/tmp/testMd5.target"
)
convey.Convey("FileMd5", t, func(ctx convey.C) {
createFile(path)
err := d.FileMd5(path, md5Path)
ctx.Convey("Then err should be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
})
})
}
func TestFtpUploadFile(t *testing.T) {
var (
cfg = d.conf.Search
path = "/tmp/testMd5.source"
)
convey.Convey("UploadFile", t, func(ctx convey.C) {
createFile(path)
err := d.UploadFile(path, "testMd5.remote", cfg.FTP.RemotePgcURL)
ctx.Convey("Then err should be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
})
})
}

View File

@@ -0,0 +1,58 @@
package(default_visibility = ["//visibility:public"])
load(
"@io_bazel_rules_go//go:def.bzl",
"go_library",
"go_test",
)
go_library(
name = "go_default_library",
srcs = [
"call.go",
"dao.go",
"lic.go",
"xml.go",
],
importpath = "go-common/app/job/main/tv/dao/lic",
tags = ["automanaged"],
visibility = ["//visibility:public"],
deps = [
"//app/job/main/tv/conf:go_default_library",
"//app/job/main/tv/model/pgc:go_default_library",
"//library/log:go_default_library",
"//library/net/http/blademaster:go_default_library",
"//vendor/github.com/pkg/errors:go_default_library",
],
)
filegroup(
name = "package-srcs",
srcs = glob(["**"]),
tags = ["automanaged"],
visibility = ["//visibility:private"],
)
filegroup(
name = "all-srcs",
srcs = [":package-srcs"],
tags = ["automanaged"],
visibility = ["//visibility:public"],
)
go_test(
name = "go_default_test",
srcs = [
"dao_test.go",
"lic_test.go",
"xml_test.go",
],
embed = [":go_default_library"],
rundir = ".",
tags = ["automanaged"],
deps = [
"//app/job/main/tv/conf:go_default_library",
"//app/job/main/tv/model/pgc:go_default_library",
"//vendor/github.com/smartystreets/goconvey/convey:go_default_library",
],
)

View File

@@ -0,0 +1,63 @@
package lic
import (
"bytes"
"context"
"encoding/xml"
"fmt"
"net/http"
"time"
model "go-common/app/job/main/tv/model/pgc"
"go-common/library/log"
"github.com/pkg/errors"
)
const (
_normalCode = "0000"
)
func (d *Dao) callLic(c context.Context, url string, xmlBody string) (result *model.Document, err error) {
var resp []byte
result = &model.Document{}
req, err := http.NewRequest(http.MethodPost, url, bytes.NewBuffer([]byte(xmlBody)))
if err != nil {
log.Error("http.NewRequest err - %v", err)
return
}
req.Header.Add("Content-Type", "text/xml; charset=utf-8")
if resp, err = d.client.Raw(c, req); err != nil {
log.Error("ClientGet error[%v]", err)
return
}
if err = xml.Unmarshal(resp, result); err != nil {
log.Error("XML Unmarshal %s, Error %v", string(resp), err)
return
}
if result == nil || result.Response == nil {
err = errors.Wrap(err, "Response Empty Error")
return
}
if result.Response.ResponseCode != _normalCode {
err = fmt.Errorf("Response Code Error %s", result.Response.ResponseCode)
return
}
if result.Response.ErrorList != nil && result.Response.ErrorList.Error != nil {
log.Warn("Response Error %v", result.Response.ErrorList.Error)
}
return
}
// CallRetry retries the xml call
func (d *Dao) CallRetry(c context.Context, url string, xmlBody string) (result *model.Document, err error) {
log.Info("callLic URL: %s, Body %s", url, xmlBody)
for i := 0; i < 3; i++ {
result, err = d.callLic(c, url, xmlBody)
if err == nil {
break
}
time.Sleep(10 * time.Second) // 5 seconds gap for each retrial
}
return
}

View File

@@ -0,0 +1,21 @@
package lic
import (
"go-common/app/job/main/tv/conf"
httpx "go-common/library/net/http/blademaster"
)
// Dao dao.
type Dao struct {
conf *conf.Config
client *httpx.Client
}
// New create a instance of Dao and return.
func New(c *conf.Config) (d *Dao) {
d = &Dao{
conf: c,
client: httpx.NewClient(conf.Conf.HTTPClient),
}
return
}

View File

@@ -0,0 +1,56 @@
package lic
import (
"context"
"flag"
"path/filepath"
"testing"
"go-common/app/job/main/tv/conf"
"fmt"
. "github.com/smartystreets/goconvey/convey"
)
var (
ctx = context.Background()
d *Dao
xmlBody = `inputTime=20180606&sign=timer-import_BILIBILI&tId=UHZFmufgweRpWhqAzToFYMWtYuZhMKCU&xmlData=<?xmlversion="1.0"encoding="UTF-8"?><Serviceid="dataSync"><Head><TradeId>UHZFmufgweRpWhqAzToFYMWtYuZhMKCU</TradeId><Date>2018-06-06</Date><Count>1</Count></Head><Body><programSetList><programSet><programSetId>ugc10100044</programSetId><programSetName>drm</programSetName><programSetClass></programSetClass><programSetType></programSetType><programSetPoster>http://i1.hdslb.com/bfs/archive/diuren.png</programSetPoster><publishDate>2018-01-30</publishDate><copyright>bilibili</copyright><programCount>1</programCount><cREndDate>1970-01-01</cREndDate><definitionType>SD</definitionType><cpCode>BILIBILI</cpCode><payStatus>0</payStatus><primitiveName></primitiveName><alias></alias><zone></zone><leadingRole></leadingRole><programSetDesc>drm</programSetDesc><Staff></Staff><programList><program><programId>ugc10114149</programId><programName>1</programName><programPoster></programPoster><programLength>1448</programLength><publishDate>1970-01-01</publishDate><ifPreview>0</ifPreview><number>1</number><definitionType>SD</definitionType><playCount>0</playCount><drm>0</drm><programMediaList><programMedia><mediaId>ugc10114149</mediaId><playUrl>http://upos-hz-tvshenhe.acgvideo.com/upgcxcode/87/75/41057587/41057587-1-6.mp4</playUrl><definition>SD</definition><htmlUrl>http://upos-hz-tvshenhe.acgvideo.com/upgcxcode/87/75/41057587/41057587-1-6.mp4</htmlUrl></programMedia></programMediaList></program></programList></programSet></programSetList></Body></Service>`
)
func WithDao(f func(d *Dao)) func() {
return func() {
dir, _ := filepath.Abs("../../cmd/tv-job-test.toml")
flag.Set("conf", dir)
conf.Init()
if d == nil {
d = New(conf.Conf)
}
f(d)
}
}
func TestDelEpLic(t *testing.T) {
Convey("TestDao_CallRetry", t, WithDao(func(d *Dao) {
res := DelEpLic("ugc", "timer-import_BILIBILI", []int{10109083, 10109084})
So(len(res), ShouldBeGreaterThan, 0)
fmt.Println(res)
}))
}
func TestDao_CallRetry(t *testing.T) {
Convey("TestDao_CallRetry", t, WithDao(func(d *Dao) {
res, err := d.CallRetry(ctx, d.conf.Sync.API.AddURL, xmlBody)
So(err, ShouldBeNil)
So(res, ShouldNotBeNil)
}))
}
func TestDao_CallLic(t *testing.T) {
Convey("TestDao_CallLic", t, WithDao(func(d *Dao) {
result, err := d.callLic(ctx, d.conf.Sync.API.AddURL, xmlBody)
So(err, ShouldBeNil)
So(result, ShouldNotBeNil)
}))
}

View File

@@ -0,0 +1,101 @@
package lic
import (
"encoding/xml"
"fmt"
model "go-common/app/job/main/tv/model/pgc"
"math/rand"
"net/url"
"time"
)
const (
letterBytes = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ-"
_serviceID = "dataSync"
)
// BuildLic builds the skeleton of a license
func BuildLic(sign string, ps []*model.PS, count int) *model.License {
var (
tid = RandStringBytesRmndr(32)
now = time.Now()
)
return &model.License{
TId: tid,
InputTime: now.Format("20060102"),
Sign: sign,
XMLData: &model.XMLData{
Service: &model.Service{
ID: _serviceID,
Head: &model.Head{
TradeID: tid,
Date: now.Format("2006-01-02"),
Count: count,
},
Body: &model.Body{
ProgramSetList: &model.PSList{
ProgramSet: ps,
},
},
},
},
}
}
// RandStringBytesRmndr generates an random string
func RandStringBytesRmndr(n int) string {
b := make([]byte, n)
for i := range b {
b[i] = letterBytes[rand.Int63()%int64(len(letterBytes))]
}
return string(b)
}
// DelLic creates the license message with only the Season ID, for deletion
func DelLic(sign string, prefix string, sid int64) *model.License {
var (
ps []*model.PS
programS = &model.PS{
ProgramSetID: fmt.Sprintf("%s%d", prefix, sid),
}
)
ps = append(ps, programS)
return BuildLic(sign, ps, 1)
}
// DelEpLic creates the license message with only the Ep IDs, for deletion
func DelEpLic(prefix string, sign string, delEps []int) string {
// message skeleton
var tid = RandStringBytesRmndr(32)
type Service struct {
ID string `xml:"id,attr"`
Head *model.Head
Body *model.DelBody `xml:"Body"`
}
Msg := &Service{
ID: _serviceID,
Head: &model.Head{
TradeID: tid,
Date: time.Now().Format("2006-01-02"),
Count: len(delEps),
},
Body: &model.DelBody{
ProgramList: &model.ProgramList{},
},
}
for _, v := range delEps {
pm := &model.Program{
ProgramID: fmt.Sprintf("%s%d", prefix, v),
}
Msg.Body.ProgramList.Program = append(Msg.Body.ProgramList.Program, pm)
}
// combine the xml message
xmlRes, _ := xml.MarshalIndent(Msg, " ", " ")
params := url.Values{}
params.Set("tId", tid)
params.Set("inputTime", time.Now().Format("20060102"))
params.Set("sign", sign)
body := params.Encode()
body = body + "&xmlData=<?xml version=\"1.0\" encoding=\"UTF-8\"?> " + string(xmlRes)
return body
}

View File

@@ -0,0 +1,62 @@
package lic
import (
model "go-common/app/job/main/tv/model/pgc"
"testing"
"github.com/smartystreets/goconvey/convey"
)
func TestLicBuildLic(t *testing.T) {
var (
sign = ""
ps = []*model.PS{}
count = int(0)
)
convey.Convey("BuildLic", t, func(ctx convey.C) {
p1 := BuildLic(sign, ps, count)
ctx.Convey("Then p1 should not be nil.", func(ctx convey.C) {
ctx.So(p1, convey.ShouldNotBeNil)
})
})
}
func TestLicRandStringBytesRmndr(t *testing.T) {
var (
n = int(0)
)
convey.Convey("RandStringBytesRmndr", t, func(ctx convey.C) {
p1 := RandStringBytesRmndr(n)
ctx.Convey("Then p1 should not be nil.", func(ctx convey.C) {
ctx.So(p1, convey.ShouldNotBeNil)
})
})
}
func TestLicDelLic(t *testing.T) {
var (
sign = ""
prefix = ""
sid = int64(0)
)
convey.Convey("DelLic", t, func(ctx convey.C) {
p1 := DelLic(sign, prefix, sid)
ctx.Convey("Then p1 should not be nil.", func(ctx convey.C) {
ctx.So(p1, convey.ShouldNotBeNil)
})
})
}
func TestLicDelEpLic(t *testing.T) {
var (
prefix = ""
sign = ""
delEps = []int{}
)
convey.Convey("DelEpLic", t, func(ctx convey.C) {
p1 := DelEpLic(prefix, sign, delEps)
ctx.Convey("Then p1 should not be nil.", func(ctx convey.C) {
ctx.So(p1, convey.ShouldNotBeNil)
})
})
}

View File

@@ -0,0 +1,20 @@
package lic
import (
"encoding/xml"
"net/url"
model "go-common/app/job/main/tv/model/pgc"
)
// PrepareXML combine the xml data to sync
func PrepareXML(v *model.License) (body string) {
xmlRes, _ := xml.MarshalIndent(v.XMLData.Service, " ", " ")
params := url.Values{}
params.Set("tId", v.TId)
params.Set("inputTime", v.InputTime)
params.Set("sign", v.Sign)
body = params.Encode()
body = body + "&xmlData=<?xml version=\"1.0\" encoding=\"UTF-8\"?> " + string(xmlRes)
return body
}

View File

@@ -0,0 +1,22 @@
package lic
import (
"encoding/json"
"testing"
model "go-common/app/job/main/tv/model/pgc"
"github.com/smartystreets/goconvey/convey"
)
func TestLicPrepareXML(t *testing.T) {
licStr := `{"TId":"cd-IqonHtgOndMAywoPLezvXwYmUxcHJ","InputTime":"20180904","Sign":"timer-import_BILIBILI","XMLData":{"Service":{"ID":"dataSync","Head":{"TradeID":"cd-IqonHtgOndMAywoPLezvXwYmUxcHJ","Date":"2018-09-04","Count":0},"Body":{"ProgramSetList":{"ProgramSet":[{"ProgramSetID":"xds296","ProgramSetName":"我家浴缸的二三事-Test","ProgramSetClass":"基腐,日常,泡面,治愈","ProgramSetType":"电影","ProgramSetPoster":"http://i0.hdslb.com/bfs/bangumi/23fbb5ece1d3adb8700988c02e8e97f30bbfbf33.jpg","Portrait":"","Producer":"","PublishDate":"2014-10-06","Copyright":"bilibili","ProgramCount":13,"CREndData":"1970-01-01","DefinitionType":"SD","CpCode":"BILIBILI","PayStatus":0,"PrimitiveName":"オレん家のフロ事情","Alias":"我家浴室的二三事,我家浴室的现况,オレん家のフロ事情","Zone":"日本","LeadingRole":"若狭:梅原裕一郎\n龙己岛崎信长\n鹰巢铃木达央\n真木津田健次郎\n三国花江夏树\n霞木户衣吹\n阿比留川原庆久","ProgramSetDesc":"一个人无忧无虑独自生活的男子高中生龙己,某日救下了一位倒在河边的美青年,没想到这位青年竟然是人鱼!于是这位人鱼先生似乎很中意龙己的浴缸,变住了下来!基友和卖萌人鱼同居的故事开始上演!","Staff":"原作:いときち\n监督青井小夜\n演出青井小夜\n脚本绫奈由仁子\n系列构成绫奈由仁子\n角色设计羽田浩二\n色彩设计小鹿绘里\n摄影监督藤坂めぐみ\n美术监督永吉幸树\n音响监督小泉纪介\n编辑斋藤朱里\n动画制作旭PRODUCTION","ProgramList":{"Program":null}}]}}}}}`
license := &model.License{}
json.Unmarshal([]byte(licStr), &license)
convey.Convey("PrepareXML", t, func(ctx convey.C) {
body := PrepareXML(license)
ctx.Convey("Then body should not be nil.", func(ctx convey.C) {
ctx.So(body, convey.ShouldNotBeNil)
})
})
}

View File

@@ -0,0 +1,53 @@
package(default_visibility = ["//visibility:public"])
load(
"@io_bazel_rules_go//go:def.bzl",
"go_test",
"go_library",
)
go_test(
name = "go_default_test",
srcs = [
"dao_test.go",
"playurl_test.go",
],
embed = [":go_default_library"],
rundir = ".",
tags = ["automanaged"],
deps = [
"//app/job/main/tv/conf:go_default_library",
"//vendor/github.com/smartystreets/goconvey/convey:go_default_library",
],
)
go_library(
name = "go_default_library",
srcs = [
"dao.go",
"playurl.go",
],
importpath = "go-common/app/job/main/tv/dao/playurl",
tags = ["automanaged"],
visibility = ["//visibility:public"],
deps = [
"//app/job/main/tv/conf:go_default_library",
"//app/job/main/tv/model/pgc:go_default_library",
"//library/log:go_default_library",
"//library/net/http/blademaster:go_default_library",
],
)
filegroup(
name = "package-srcs",
srcs = glob(["**"]),
tags = ["automanaged"],
visibility = ["//visibility:private"],
)
filegroup(
name = "all-srcs",
srcs = [":package-srcs"],
tags = ["automanaged"],
visibility = ["//visibility:public"],
)

View File

@@ -0,0 +1,21 @@
package playurl
import (
"go-common/app/job/main/tv/conf"
httpx "go-common/library/net/http/blademaster"
)
// Dao dao.
type Dao struct {
conf *conf.Config
client *httpx.Client
}
// New create a instance of Dao and return.
func New(c *conf.Config) (d *Dao) {
d = &Dao{
conf: c,
client: httpx.NewClient(conf.Conf.HTTPClient),
}
return
}

View File

@@ -0,0 +1,25 @@
package playurl
import (
"context"
"flag"
"go-common/app/job/main/tv/conf"
"path/filepath"
)
var (
ctx = context.TODO()
d *Dao
)
func WithDao(f func(d *Dao)) func() {
return func() {
dir, _ := filepath.Abs("../../cmd/tv-job-test.toml")
flag.Set("conf", dir)
conf.Init()
if d == nil {
d = New(conf.Conf)
}
f(d)
}
}

View File

@@ -0,0 +1,69 @@
package playurl
import (
"context"
"fmt"
"net/url"
model "go-common/app/job/main/tv/model/pgc"
"go-common/library/log"
)
const (
_type = "mp4"
_maxBackup = 0
_otype = "json"
)
// Playurl calls the api of playurl to get the url to play the video
func (d *Dao) Playurl(ctx context.Context, cid int) (playurl string, hitDead bool, err error) {
var (
result = model.PlayurlResp{}
params = url.Values{}
api = d.conf.Sync.PlayURL.API
originURL string
)
params.Set("cid", fmt.Sprintf("%d", cid))
params.Set("type", _type) // to get one piece
params.Set("max_backup", fmt.Sprintf("%d", _maxBackup)) // no backup url needed
params.Set("otype", _otype) // json format response
params.Set("qn", d.conf.Sync.PlayURL.Qn) // quality fix to 16
if err = d.client.Get(ctx, api, "", params, &result); err != nil {
log.Error("ClientGet error[%v]", err)
return
}
if result.Code != 0 { // logic error
for _, v := range d.conf.Sync.PlayURL.Deadcodes {
if result.Code == v { // hit dead code
hitDead = true
return
}
}
err = fmt.Errorf("Resp Code:[%v], Message:[%v]", result.Code, result.Message)
return
}
if len(result.Durl) < 1 { // result empty
err = fmt.Errorf("Playurl Result is Empty! Resp (%v)", result)
return
}
originURL = result.Durl[0].URL
if playurl, err = d.hostChange(originURL); err != nil { // replace the host of the playurl
log.Error("HostChange Origin: %s, Error: %v", originURL, err)
}
return
}
// replace the url's host by TV's dedicated host
func (d *Dao) hostChange(playurl string) (replaced string, err error) {
var host = d.conf.Sync.PlayURL.PlayPath
u, err := url.Parse(playurl)
if err != nil {
log.Error("hostChange ParseURL error (%v)", err)
return
}
log.Info("[hostChange] for URL: %s, Original Host: %s, Now we change it to: %s", playurl, u.Host, host)
u.Host = host // replace the host
u.RawQuery = "" // remove useless query
replaced = u.String()
return
}

View File

@@ -0,0 +1,16 @@
package playurl
import (
"testing"
. "github.com/smartystreets/goconvey/convey"
)
func TestDao_Playurl(t *testing.T) {
Convey("TestDao_Playurl", t, WithDao(func(d *Dao) {
url, hitDead, err := d.Playurl(ctx, 41057587)
So(err, ShouldBeNil)
So(hitDead, ShouldBeTrue)
So(url, ShouldNotBeEmpty)
}))
}

View File

@@ -0,0 +1,62 @@
package(default_visibility = ["//visibility:public"])
load(
"@io_bazel_rules_go//go:def.bzl",
"go_test",
"go_library",
)
go_test(
name = "go_default_test",
srcs = [
"dao_test.go",
"mc_test.go",
"report_test.go",
"style_test.go",
],
embed = [":go_default_library"],
rundir = ".",
tags = ["automanaged"],
deps = [
"//app/job/main/tv/conf:go_default_library",
"//vendor/github.com/smartystreets/goconvey/convey:go_default_library",
],
)
go_library(
name = "go_default_library",
srcs = [
"dao.go",
"mc.go",
"report.go",
"style.go",
],
importpath = "go-common/app/job/main/tv/dao/report",
tags = ["automanaged"],
visibility = ["//visibility:public"],
deps = [
"//app/job/main/tv/conf:go_default_library",
"//app/job/main/tv/model/pgc:go_default_library",
"//app/job/main/tv/model/report:go_default_library",
"//library/cache/memcache:go_default_library",
"//library/conf/env:go_default_library",
"//library/database/sql:go_default_library",
"//library/log:go_default_library",
"//library/net/http/blademaster:go_default_library",
"//library/net/metadata:go_default_library",
],
)
filegroup(
name = "package-srcs",
srcs = glob(["**"]),
tags = ["automanaged"],
visibility = ["//visibility:private"],
)
filegroup(
name = "all-srcs",
srcs = [":package-srcs"],
tags = ["automanaged"],
visibility = ["//visibility:public"],
)

View File

@@ -0,0 +1,27 @@
package report
import (
"go-common/app/job/main/tv/conf"
"go-common/library/cache/memcache"
"go-common/library/database/sql"
bm "go-common/library/net/http/blademaster"
)
// Dao dao .
type Dao struct {
conf *conf.Config
httpR *bm.Client
mc *memcache.Pool
DB *sql.DB
}
// New create a instance of Dao and return .
func New(c *conf.Config) (d *Dao) {
d = &Dao{
conf: c,
httpR: bm.NewClient(c.DpClient),
mc: memcache.NewPool(c.Memcache.Config),
DB: sql.NewMySQL(c.Mysql),
}
return
}

View File

@@ -0,0 +1,35 @@
package report
import (
"flag"
"os"
"testing"
"go-common/app/job/main/tv/conf"
)
var (
d *Dao
)
func TestMain(m *testing.M) {
if os.Getenv("DEPLOY_ENV") != "" {
flag.Set("app_id", "main.web-svr.tv-job")
flag.Set("conf_token", "ab3e9801a77c076b997de0ac5cb21775")
flag.Set("tree_id", "15260")
flag.Set("conf_version", "docker-1")
flag.Set("deploy_env", "uat")
flag.Set("conf_host", "config.bilibili.co")
flag.Set("conf_path", "/tmp")
flag.Set("region", "sh")
flag.Set("zone", "sh001")
} else {
flag.Set("conf", "../../cmd/tv-job-test.toml")
}
flag.Parse()
if err := conf.Init(); err != nil {
panic(err)
}
d = New(conf.Conf)
os.Exit(m.Run())
}

View File

@@ -0,0 +1,119 @@
package report
import (
"context"
"time"
mdlpgc "go-common/app/job/main/tv/model/pgc"
"go-common/library/cache/memcache"
"go-common/library/log"
)
const (
_report = "_report"
_style = "style_label"
_label = "label_data"
)
// SetReportCache set report cache .
func (d *Dao) SetReportCache(c context.Context, val map[string]interface{}) (err error) {
var (
conn = d.mc.Get(c)
key = _report
)
defer conn.Close()
item := &memcache.Item{
Key: key,
Object: val,
Flags: memcache.FlagJSON,
Expiration: int32(time.Duration(d.conf.Report.Expire) / time.Second),
}
if err = conn.Set(item); err != nil {
log.Error("conn.Set(%v) error(%v)", item, err)
}
return
}
// GetReportCache get report all data .
func (d *Dao) GetReportCache(c context.Context) (res map[string]interface{}, err error) {
var (
conn = d.mc.Get(c)
key = _report
rp *memcache.Item
)
res = make(map[string]interface{})
defer conn.Close()
if rp, err = conn.Get(key); err != nil {
if err == memcache.ErrNotFound {
err = nil
} else {
log.Error("mc.Get(%s) error(%v)", key, err)
}
return
}
if err = conn.Scan(rp, &res); err != nil {
log.Error("conn.Scan error(%v)", err)
}
return
}
// SetStyleCache style show .
func (d *Dao) SetStyleCache(c context.Context, val map[int][]*mdlpgc.ParamStyle) (err error) {
var (
conn = d.mc.Get(c)
key = _style
)
defer conn.Close()
item := &memcache.Item{
Key: key,
Object: val,
Flags: memcache.FlagJSON,
Expiration: 0,
}
if err = conn.Set(item); err != nil {
log.Error("conn.Set(%v) error(%v)", item, err)
}
return
}
// SetLabelCache label show .
func (d *Dao) SetLabelCache(c context.Context, val map[int]map[string]int) (err error) {
var (
conn = d.mc.Get(c)
key = _label
)
defer conn.Close()
item := &memcache.Item{
Key: key,
Object: val,
Flags: memcache.FlagJSON,
Expiration: 0,
}
if err = conn.Set(item); err != nil {
log.Error("conn.Set(%v) error(%v)", item, err)
}
return
}
//
// GetLabelCache get label all data .
func (d *Dao) GetLabelCache(c context.Context) (res map[int]map[string]int, err error) {
var (
conn = d.mc.Get(c)
key = _label
rp *memcache.Item
)
defer conn.Close()
if rp, err = conn.Get(key); err != nil {
if err == memcache.ErrNotFound {
err = nil
} else {
log.Error("mc.Get(%s) error(%v)", key, err)
}
return
}
if err = conn.Scan(rp, &res); err != nil {
log.Error("conn.Scan error(%v)", err)
}
return
}

View File

@@ -0,0 +1,38 @@
package report
import (
"context"
"testing"
"github.com/smartystreets/goconvey/convey"
)
func TestReportSetReportCache(t *testing.T) {
convey.Convey("SetReportCache", t, func(ctx convey.C) {
var (
c = context.Background()
val = make(map[string]interface{})
)
ctx.Convey("When everything goes positive", func(ctx convey.C) {
err := d.SetReportCache(c, val)
ctx.Convey("Then err should be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
})
})
})
}
func TestReportGetReportCache(t *testing.T) {
convey.Convey("GetReportCache", t, func(ctx convey.C) {
var (
c = context.Background()
)
ctx.Convey("When everything goes positive", func(ctx convey.C) {
res, err := d.GetReportCache(c)
ctx.Convey("Then err should be nil.res should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
ctx.So(res, convey.ShouldNotBeNil)
})
})
})
}

View File

@@ -0,0 +1,227 @@
package report
import (
"bytes"
"context"
"crypto/md5"
"encoding/hex"
"errors"
"fmt"
xhttp "net/http"
"net/url"
"sort"
"strings"
"time"
mdlrep "go-common/app/job/main/tv/model/report"
"go-common/library/conf/env"
"go-common/library/log"
"go-common/library/net/metadata"
)
const (
_httpOK = "0"
_pid = "73" // platform: 73 is tv
_plat = 5 // platform: 5 is tv
_tv = "android_tv_yst"
_actDur = `select request_uri,time_iso,ip,version,buvid,fts,proid,chid,pid,brand,deviceid,model,osver,ctime,mid,ver,net,oid,eid,start_time,end_time,duration,openudid,idfa,mac,is_coldstart,session_id,buvid_ext from ods.app_active_duration where unix_timestamp(ctime, 'yyyyMMddHHmmss')>=%d and unix_timestamp(ctime, 'yyyyMMddHHmmss')<%d and log_date=%s and pid=%s`
_playDur = `select stime,build,buvid,mobi_app,platform,session,mid,aid,cid,sid,epid,type,sub_type,quality,total_time,paused_time,played_time,video_duration,play_type,network_type,last_play_progress_time,max_play_progress_time,device,epid_status,play_status,user_status,actual_played_time,auto_play,detail_play_time,list_play_time from ods.app_play_duration where stime>=%d and stime<%d and log_date=%s and mobi_app=%s`
_visitEvent = `select request_uri,time_iso,ip,version,buvid,fts,proid,chid,pid,brand,deviceid,model,osver,mid,ctime,ver,net,oid,page_name,page_arg,ua,h5_chid,unix_timestamp(ctime, 'yyyyMMddHHmmss') from ods.app_visit_event where unix_timestamp(ctime, 'yyyyMMddHHmmss')>=%d and unix_timestamp(ctime, 'yyyyMMddHHmmss')<%d and log_date=%s and pid=%s`
_arcClick = `select r_type,avid,cid,part,mid,stime,did,ip,ua,buvid,cookie_sid,refer,type,sub_type,sid,epid,platform,device from ods.ods_archive_click where stime>=%d and stime<%d and log_date=%s and plat=%d`
)
var (
signParams = []string{"appKey", "timestamp", "version"}
_userAgent = "User-Agent"
)
// Report .
func (d *Dao) Report(ctx context.Context, table string) (info string, err error) {
var (
v = url.Values{}
ip = metadata.String(ctx, metadata.RemoteIP)
logdata = d.queryfmt()
start, end = d.dealDate()
query string
res struct {
Code int `json:"code"`
Msg string `json:"msg"`
StatusURL string `json:"jobStatusUrl"`
}
)
logdata = "'" + logdata + "'"
switch table {
case mdlrep.ArchiveClick:
query = fmt.Sprintf(_arcClick, start, end, logdata, _plat)
case mdlrep.ActiveDuration:
query = fmt.Sprintf(_actDur, start, end, logdata, `"`+_pid+`"`)
case mdlrep.PlayDuration:
query = fmt.Sprintf(_playDur, start, end, logdata, "'"+_tv+"'")
case mdlrep.VisitEvent:
query = fmt.Sprintf(_visitEvent, start, end, logdata, `"`+_pid+`"`)
default:
err = errors.New("table is nill")
return
}
v.Set("appKey", d.conf.DpClient.Key)
v.Set("signMethod", "md5")
v.Set("timestamp", time.Now().Format("2006-01-02 15:04:05"))
v.Set("version", "1.0")
v.Set("query", query)
if err = d.newRequest(ctx, d.conf.Report.ReportURI, ip, v, &res); err != nil {
log.Error("newRequest url(%v), err(%v)", d.conf.Report.ReportURI+"?"+v.Encode(), err)
return
}
if res.Code == 200 && res.StatusURL != "" {
info = res.StatusURL
}
return
}
// CheckJob .
func (d *Dao) CheckJob(ctx context.Context, urls string) (res *mdlrep.DpCheckJobResult, err error) {
var (
v = url.Values{}
ip = metadata.String(ctx, metadata.RemoteIP)
)
res = &mdlrep.DpCheckJobResult{}
v.Set("appKey", d.conf.DpClient.Key)
v.Set("signMethod", "md5")
v.Set("timestamp", time.Now().Format("2006-01-02 15:04:05"))
v.Set("version", "1.0")
if err = d.newRequest(ctx, urls, ip, v, &res); err != nil {
log.Error("d.newRequest error(%v)", err)
return
}
if res.Code != xhttp.StatusOK {
log.Error("d.CheckJob newRequest error code:%d ; url(%s) ", res.Code, urls+"?"+v.Encode())
err = fmt.Errorf("code(%d) msg(%s) statusID(%d) statusID(%s)", res.Code, res.Msg, res.StatusID, res.StatusMsg)
}
return
}
// PostRequest .
func (d *Dao) PostRequest(ctx context.Context, body string) (err error) {
var (
req *xhttp.Request
res struct {
Code string `json:"code"`
Message string `json:"message"`
}
)
if req, err = xhttp.NewRequest(xhttp.MethodPost, d.conf.Report.UpDataURI, strings.NewReader(body)); err != nil {
log.Error("xhttp.NewRequest url(%s) error (%v)", d.conf.Report.UpDataURI, err)
return
}
req.Header.Add("Content-Type", "text/plain; charset=utf-8")
if err = d.httpR.Do(ctx, req, &res); err != nil {
log.Error("d.httpReq.Do error(%v) url(%s)", err, d.conf.Report.UpDataURI)
return
}
if res.Code != _httpOK {
log.Error("PostRequest error code:%s ; url(%s) ", res.Code, d.conf.Report.UpDataURI)
err = fmt.Errorf("code(%s)", res.Code)
}
return
}
// NewRequest new http request with method, url, ip, values and headers .
func (d *Dao) newRequest(c context.Context, url, realIP string, params url.Values, res interface{}) (err error) {
enc, err := d.sign(params)
if err != nil {
log.Error("url:%s,params:%v", url, params)
return
}
if enc != "" {
url = url + "?" + enc
}
req, err := xhttp.NewRequest(xhttp.MethodGet, url, nil)
if err != nil {
log.Error("xhttp.NewRequest method:%s,url:%s", xhttp.MethodGet, url)
return
}
req.Header.Set(_userAgent, "haoguanwei@bilibili.com "+env.AppID)
if err != nil {
return
}
return d.httpR.Do(c, req, res)
}
// Sign calc appkey and appsecret sign .
func (d *Dao) sign(params url.Values) (query string, err error) {
tmp := params.Encode()
signTmp := d.encode(params)
if strings.IndexByte(tmp, '+') > -1 {
tmp = strings.Replace(tmp, "+", "%20", -1)
}
var b bytes.Buffer
b.WriteString(d.conf.DpClient.Secret)
b.WriteString(signTmp)
b.WriteString(d.conf.DpClient.Secret)
mh := md5.Sum(b.Bytes())
// query
var qb bytes.Buffer
qb.WriteString(tmp)
qb.WriteString("&sign=")
qb.WriteString(strings.ToUpper(hex.EncodeToString(mh[:])))
query = qb.String()
return
}
// Encode encodes the values into ``URL encoded'' form .
// ("bar=baz&foo=quux") sorted by key .
func (d *Dao) encode(v url.Values) string {
if v == nil {
return ""
}
var buf bytes.Buffer
keys := make([]string, 0, len(v))
for k := range v {
keys = append(keys, k)
}
sort.Strings(keys)
for _, k := range keys {
found := false
for _, p := range signParams {
if p == k {
found = true
break
}
}
if !found {
continue
}
vs := v[k]
prefix := k
for _, v := range vs {
buf.WriteString(prefix)
buf.WriteString(v)
}
}
return buf.String()
}
// delDate create start and end time .
func (d *Dao) dealDate() (start, end int64) {
var (
timeDelay = "-" + d.conf.Report.TimeDelay
timeSpan = "-" + time.Duration(d.conf.Report.SeTimeSpan).String()
)
endTime := time.Now()
et, _ := time.ParseDuration(timeDelay)
endTmp := endTime.Add(et)
end = endTime.Add(et).Unix()
st, _ := time.ParseDuration(timeSpan)
start = endTmp.Add(st).Unix()
return
}
func (d *Dao) queryfmt() (logdata string) {
var (
dtime = time.Now()
timeData = "-" + d.conf.Report.TimeDelay
)
et, _ := time.ParseDuration(timeData)
logdata = dtime.Add(et).Format("20060102")
return
}

View File

@@ -0,0 +1,32 @@
package report
import (
"context"
"testing"
"github.com/smartystreets/goconvey/convey"
)
func TestReportReport(t *testing.T) {
convey.Convey("Report", t, func(ctx convey.C) {
var (
c = context.Background()
)
ctx.Convey("When everything goes positive", func(ctx convey.C) {
_, err := d.Report(c, "archive_click")
println(err)
})
})
}
func TestReportCheckJob(t *testing.T) {
convey.Convey("CheckJob", t, func(ctx convey.C) {
var (
c = context.Background()
urls = ""
)
ctx.Convey("When everything goes positive", func(ctx convey.C) {
d.CheckJob(c, urls)
})
})
}

View File

@@ -0,0 +1,69 @@
package report
import (
"context"
mdlpgc "go-common/app/job/main/tv/model/pgc"
"go-common/library/database/sql"
"go-common/library/log"
)
const (
_styleSQL = "SELECT id,style,category FROM tv_ep_season WHERE is_deleted=0 AND `check`=1 AND valid=1"
_labelSQL = `SELECT name,value,category FROM tv_label WHERE deleted=0 AND param="style_id" AND valid=1`
)
// FindStyle style all .
func (d *Dao) FindStyle(ctx context.Context) (res []*mdlpgc.StyleRes, err error) {
var (
rows *sql.Rows
)
if rows, err = d.DB.Query(ctx, _styleSQL); err != nil {
log.Error("d.DB.Query sql(%s) error(%v)", _styleSQL, err)
return
}
defer rows.Close()
for rows.Next() {
r := &mdlpgc.StyleRes{}
if err = rows.Scan(&r.ID, &r.Style, &r.Category); err != nil {
log.Error("d.DB.QueryRow error(%v)", err)
return
}
res = append(res, r)
}
if err = rows.Err(); err != nil {
log.Error("rows.Err() error(%v)", err)
}
return
}
// FindLabelID label all .
func (d *Dao) FindLabelID(ctx context.Context) (res map[int]map[string]int, err error) {
var (
rows *sql.Rows
m map[string]int
)
res = make(map[int]map[string]int)
if rows, err = d.DB.Query(ctx, _labelSQL); err != nil {
log.Error("d.DB.Query error(%v)", err)
return
}
defer rows.Close()
for rows.Next() {
r := &mdlpgc.LabelRes{}
if err = rows.Scan(&r.Name, &r.Value, &r.Category); err != nil {
log.Error("d.DB.Query Scan error(%v)", err)
return
}
if _, ok := res[r.Category]; ok {
m[r.Name] = r.Value
} else {
m = make(map[string]int)
}
res[r.Category] = m
}
if err = rows.Err(); err != nil {
log.Error("rows.Err() error(%v)", err)
}
return
}

View File

@@ -0,0 +1,38 @@
package report
import (
"context"
"testing"
"github.com/smartystreets/goconvey/convey"
)
func TestReportFindStyle(t *testing.T) {
convey.Convey("FindStyle", t, func(ctx convey.C) {
var (
c = context.Background()
)
ctx.Convey("When everything goes positive", func(ctx convey.C) {
res, err := d.FindStyle(c)
ctx.Convey("Then err should be nil.res should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
ctx.So(res, convey.ShouldNotBeNil)
})
})
})
}
func TestReportFindLabelID(t *testing.T) {
convey.Convey("FindLabelID", t, func(ctx convey.C) {
var (
c = context.Background()
)
ctx.Convey("When everything goes positive", func(ctx convey.C) {
res, err := d.FindLabelID(c)
ctx.Convey("Then err should be nil.res should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
ctx.So(res, convey.ShouldNotBeNil)
})
})
})
}

View File

@@ -0,0 +1,85 @@
load(
"@io_bazel_rules_go//go:def.bzl",
"go_test",
"go_library",
)
go_test(
name = "go_default_test",
srcs = [
"dao_test.go",
"databus_test.go",
"del_arc_test.go",
"del_upper_test.go",
"del_video_test.go",
"delete_test.go",
"import_test.go",
"manual_test.go",
"media_cache_test.go",
"passed_test.go",
"search_con_test.go",
"sync_video_test.go",
],
embed = [":go_default_library"],
rundir = ".",
tags = ["automanaged"],
deps = [
"//app/job/main/tv/conf:go_default_library",
"//app/job/main/tv/model/ugc:go_default_library",
"//app/service/main/archive/api:go_default_library",
"//library/database/sql:go_default_library",
"//vendor/github.com/smartystreets/goconvey/convey:go_default_library",
],
)
go_library(
name = "go_default_library",
srcs = [
"dao.go",
"databus.go",
"del_arc.go",
"del_upper.go",
"del_video.go",
"delete.go",
"import.go",
"manual.go",
"media_cache.go",
"passed.go",
"report_cid.go",
"search_con.go",
"sync_arc.go",
"sync_video.go",
],
importpath = "go-common/app/job/main/tv/dao/ugc",
tags = ["automanaged"],
visibility = ["//visibility:public"],
deps = [
"//app/job/main/tv/conf:go_default_library",
"//app/job/main/tv/dao/app:go_default_library",
"//app/job/main/tv/model/common:go_default_library",
"//app/job/main/tv/model/pgc:go_default_library",
"//app/job/main/tv/model/ugc:go_default_library",
"//app/service/main/archive/api:go_default_library",
"//library/cache/memcache:go_default_library",
"//library/cache/redis:go_default_library",
"//library/database/sql:go_default_library",
"//library/ecode:go_default_library",
"//library/log:go_default_library",
"//library/net/http/blademaster:go_default_library",
"//library/xstr:go_default_library",
],
)
filegroup(
name = "package-srcs",
srcs = glob(["**"]),
tags = ["automanaged"],
visibility = ["//visibility:private"],
)
filegroup(
name = "all-srcs",
srcs = [":package-srcs"],
tags = ["automanaged"],
visibility = ["//visibility:public"],
)

View File

@@ -0,0 +1,49 @@
package ugc
import (
"context"
"time"
"go-common/app/job/main/tv/conf"
"go-common/library/cache/memcache"
"go-common/library/cache/redis"
"go-common/library/database/sql"
httpx "go-common/library/net/http/blademaster"
)
// Dao dao.
type Dao struct {
conf *conf.Config
DB *sql.DB
client *httpx.Client
mc *memcache.Pool
mcExpire int32 // expire for ugc cache, same as pgc auth, because it's daily refresh
criCID int64 // critical cid for ugc video sync
redis *redis.Pool
}
// New create a instance of Dao and return.
func New(c *conf.Config) (d *Dao) {
d = &Dao{
conf: c,
DB: sql.NewMySQL(c.Mysql),
client: httpx.NewClient(conf.Conf.HTTPClient),
mc: memcache.NewPool(c.Memcache.Config),
mcExpire: int32(time.Duration(c.Memcache.Expire) / time.Second),
criCID: c.UgcSync.Cfg.CriticalCid,
redis: redis.NewPool(c.Redis.Config),
}
return
}
// Close close the redis and kafka resource.
func (d *Dao) Close() {
if d.DB != nil {
d.DB.Close()
}
}
// BeginTran begin mysql transaction
func (d *Dao) BeginTran(c context.Context) (*sql.Tx, error) {
return d.DB.Begin(c)
}

View File

@@ -0,0 +1,70 @@
package ugc
import (
"context"
"flag"
"fmt"
"path/filepath"
"testing"
"go-common/app/job/main/tv/conf"
"go-common/app/job/main/tv/model/ugc"
. "github.com/smartystreets/goconvey/convey"
)
var (
ctx = context.TODO()
d *Dao
queryMid = "SELECT mid FROM ugc_archive WHERE result=1 AND valid=1 AND deleted=0 LIMIT 1"
queryAid = "SELECT aid FROM ugc_archive WHERE result=1 AND valid=1 AND deleted=0 LIMIT 1"
)
func WithDao(f func(d *Dao)) func() {
return func() {
dir, _ := filepath.Abs("../../cmd/tv-job-test.toml")
flag.Set("conf", dir)
conf.Init()
if d == nil {
d = New(conf.Conf)
}
f(d)
}
}
func TestDao_UpArcs(t *testing.T) {
Convey("TestDao_UpArcs", t, WithDao(func(d *Dao) {
var mid int64
d.DB.QueryRow(ctx, queryMid).Scan(&mid)
if mid == 0 {
return
}
res, err := d.UpArcs(ctx, mid)
So(err, ShouldBeNil)
So(len(res), ShouldBeGreaterThan, 0)
}))
}
func TestDao_SetArcCMS(t *testing.T) {
Convey("TestDao_SetArcCMS", t, WithDao(func(d *Dao) {
var aid int64
d.DB.QueryRow(ctx, queryAid).Scan(&aid)
if aid != 0 {
err := d.SetArcCMS(ctx, &ugc.ArcCMS{
AID: aid,
Title: "test",
})
So(err, ShouldBeNil)
fmt.Println(aid)
}
}))
}
func TestDao_CountArcs(t *testing.T) {
Convey("TestDao_CountArcs", t, WithDao(func(d *Dao) {
res, err := d.CountUpArcs(ctx, 452156)
So(err, ShouldBeNil)
So(res, ShouldBeGreaterThan, 0)
fmt.Println(res)
}))
}

View File

@@ -0,0 +1,92 @@
package ugc
import (
"context"
ugcmdl "go-common/app/job/main/tv/model/ugc"
arccli "go-common/app/service/main/archive/api"
"go-common/library/database/sql"
"go-common/library/log"
)
const (
_updateArc = "UPDATE ugc_archive SET title = ?, cover = ?, content = ?, pubtime = ?, " +
"typeid = ?, submit = ?, state = ? WHERE aid = ? AND deleted = 0"
_updateVideo = "UPDATE ugc_video SET eptitle = ?, index_order = ?, submit = ? WHERE cid = ? AND deleted = 0"
_needSubmit = 1
_pickVideos = "SELECT id, eptitle, cid, index_order FROM ugc_video WHERE aid = ? AND deleted = 0"
_upInList = "SELECT mid FROM ugc_uploader WHERE mid = ? AND deleted = 0"
_setUploader = "REPLACE INTO ugc_uploader (mid, state) VALUES (?,?)"
)
// UpInList checks whether the upper is in our list
func (d *Dao) UpInList(c context.Context, mid int64) (realID int64, err error) {
if err = d.DB.QueryRow(c, _upInList, mid).Scan(&realID); err != nil { // get the qualified aid to sync
if err == sql.ErrNoRows {
err = nil
return
}
log.Error("d.UpInList.Query error(%v)", err)
}
return
}
// UpdateArc updates the key fields of an archive, used for databus monitoring
func (d *Dao) UpdateArc(c context.Context, arc *ugcmdl.ArchDatabus) (err error) {
if _, err = d.DB.Exec(c, _updateArc,
arc.Title, arc.Cover, arc.Content, arc.PubTime, arc.TypeID, _needSubmit, arc.State, arc.Aid); err != nil {
log.Error("UpdateArc, failed to update: (%v), Error: %v", arc, err)
}
return
}
// TxUpdateVideo updates the ugc video's status, for databus update
func (d *Dao) TxUpdateVideo(tx *sql.Tx, video *arccli.Page) (err error) {
if _, err = tx.Exec(_updateVideo,
video.Part, video.Page, _needSubmit, video.Cid); err != nil {
log.Error("TxUpdateVideo, failed to update: (%v), Error: %v", video, err)
}
return
}
// PickVideos picks the videos of an archive in one shot
func (d *Dao) PickVideos(c context.Context, aid int64) (res map[int64]*ugcmdl.SimpleVideo, err error) {
var rows *sql.Rows
res = make(map[int64]*ugcmdl.SimpleVideo)
if rows, err = d.DB.Query(c, _pickVideos, aid); err != nil {
log.Error("d.Import.Query error(%v)", err)
return
}
defer rows.Close()
for rows.Next() {
var r = &ugcmdl.SimpleVideo{}
if err = rows.Scan(&r.ID, &r.Eptitle, &r.CID, &r.IndexOrder); err != nil {
log.Error("PickVideos row.Scan() error(%v)", err)
return
}
res[r.CID] = r
}
if err = rows.Err(); err != nil {
log.Error("d.PickVideos.Query error(%v)", err)
}
return
}
// TxAddVideos add into the db the new videos
func (d *Dao) TxAddVideos(tx *sql.Tx, pages []*arccli.Page, aid int64) (err error) {
for _, v := range pages {
if _, err = tx.Exec(_importVideo, aid, v.Cid, v.Part, v.Page, v.Duration, v.Desc); err != nil {
log.Error("_importArc, failed to insert: (%v), Error: %v", v, err)
return
}
}
return
}
// TxUpAdd adds the upper
func (d *Dao) TxUpAdd(tx *sql.Tx, mid int64) (err error) {
if _, err = tx.Exec(_setUploader, mid, 1); err != nil {
log.Error("UpAdd Error %v", err)
}
return
}

View File

@@ -0,0 +1,51 @@
package ugc
import (
"fmt"
"testing"
"go-common/app/service/main/archive/api"
. "github.com/smartystreets/goconvey/convey"
)
func TestDao_UpInList(t *testing.T) {
Convey("TestDao_UpInList", t, WithDao(func(d *Dao) {
res, err := d.UpInList(ctx, 27515615)
So(err, ShouldBeNil)
So(res, ShouldBeGreaterThan, 0)
fmt.Println(res)
res2, err2 := d.UpInList(ctx, 100997637777)
So(err2, ShouldBeNil)
So(res2, ShouldEqual, 0)
}))
}
func TestDao_PickVideos(t *testing.T) {
Convey("TestDao_PickVideos", t, WithDao(func(d *Dao) {
res, err := d.PickVideos(ctx, 10099763)
So(err, ShouldBeNil)
So(len(res), ShouldBeGreaterThan, 0)
for _, v := range res {
fmt.Println(v)
}
}))
}
func TestDao_InsertVideos(t *testing.T) {
Convey("TestDao_InsertVideos", t, WithDao(func(d *Dao) {
tx, err := d.BeginTran(ctx)
So(err, ShouldBeNil)
err = d.TxAddVideos(tx, []*api.Page{
{
Cid: 10126229,
Part: "test",
Duration: 2333,
Desc: "test",
Page: 999,
},
}, 10098693)
tx.Commit()
So(err, ShouldBeNil)
}))
}

View File

@@ -0,0 +1,37 @@
package ugc
import (
"context"
"go-common/library/log"
"time"
)
const (
_deletedArc = "SELECT aid FROM ugc_archive WHERE submit = 1 AND deleted = 1 AND retry < unix_timestamp(now()) LIMIT 1"
_finishDelArc = "UPDATE ugc_archive SET submit = 0 WHERE aid = ? AND deleted = 1"
_ppDelArc = "UPDATE ugc_archive SET retry = ? WHERE aid = ? AND deleted = 1"
)
// DeletedArc picks the deleted archive to sync
func (d *Dao) DeletedArc(c context.Context) (aid int64, err error) {
err = d.DB.QueryRow(c, _deletedArc).Scan(&aid)
return
}
// FinishDelArc updates the submit status from 1 to 0
func (d *Dao) FinishDelArc(c context.Context, aid int64) (err error) {
if _, err = d.DB.Exec(c, _finishDelArc, aid); err != nil {
log.Error("FinishVideos Error: %v", aid, err)
}
return
}
// PpDelArc postpones the archive's submit in 30 mins
func (d *Dao) PpDelArc(c context.Context, aid int64) (err error) {
var delay = time.Now().Unix() + int64(d.conf.UgcSync.Frequency.ErrorWait)
if _, err = d.DB.Exec(c, _ppDelArc, delay, aid); err != nil {
log.Error("PostponeArc, failed to delay: (%v,%v), Error: %v", delay, aid, err)
}
return
}

View File

@@ -0,0 +1,35 @@
package ugc
import (
"fmt"
"go-common/library/database/sql"
"testing"
. "github.com/smartystreets/goconvey/convey"
)
func TestDao_DeletedArc(t *testing.T) {
Convey("TestDao_DeletedArc", t, WithDao(func(d *Dao) {
res, err := d.DeletedArc(ctx)
if err == sql.ErrNoRows {
fmt.Println("No to delete data")
return
}
So(err, ShouldBeNil)
So(res, ShouldBeGreaterThan, 0)
}))
}
func TestDao_PpDelArc(t *testing.T) {
Convey("TestDao_PpDelArc", t, WithDao(func(d *Dao) {
err := d.PpDelArc(ctx, 333)
So(err, ShouldBeNil)
}))
}
func TestDao_FinishDelArc(t *testing.T) {
Convey("TestDao_FinishDelArc", t, WithDao(func(d *Dao) {
err := d.FinishDelArc(ctx, 333)
So(err, ShouldBeNil)
}))
}

View File

@@ -0,0 +1,70 @@
package ugc
import (
"context"
"time"
"go-common/library/database/sql"
"go-common/library/log"
)
const (
_deletedUp = "SELECT mid FROM ugc_uploader WHERE toinit = 2 AND deleted = 1 AND retry < unix_timestamp(now()) LIMIT 1"
_finishDelUp = "UPDATE ugc_uploader SET toinit = 0 WHERE mid = ? AND deleted = 1"
_ppDelUp = "UPDATE ugc_uploader SET retry = ? WHERE mid = ? AND deleted = 1"
_upArcs = "SELECT aid FROM ugc_archive WHERE mid = ? AND deleted = 0 LIMIT 50"
_upCountArc = "SELECT count(1) FROM ugc_archive WHERE mid = ? AND deleted = 0"
)
// DeletedUp picks the deleted uppers, toinit = 2 and deleted = 1
func (d *Dao) DeletedUp(c context.Context) (mid int64, err error) {
err = d.DB.QueryRow(c, _deletedUp).Scan(&mid)
return
}
// FinishDelUp updates the submit toinit from 2 to 0
func (d *Dao) FinishDelUp(c context.Context, mid int64) (err error) {
if _, err = d.DB.Exec(c, _finishDelUp, mid); err != nil {
log.Error("FinishDelUp Error: %v", mid, err)
}
return
}
// PpDelUp postpones the upper's videos submit in 30 mins
func (d *Dao) PpDelUp(c context.Context, mid int64) (err error) {
var delay = time.Now().Unix() + int64(d.conf.UgcSync.Frequency.ErrorWait)
if _, err = d.DB.Exec(c, _ppDelUp, delay, mid); err != nil {
log.Error("PostponeArc, failed to delay: (%v,%v), Error: %v", delay, mid, err)
}
return
}
// CountUpArcs counts the upper's archives
func (d *Dao) CountUpArcs(c context.Context, mid int64) (count int64, err error) {
if err = d.DB.QueryRow(c, _upCountArc, mid).Scan(&count); err != nil {
log.Error("d.CountUpArcs.Query error(%v)", err)
}
return
}
// UpArcs picks 50 arcs of the upper
func (d *Dao) UpArcs(c context.Context, mid int64) (aids []int64, err error) {
var rows *sql.Rows
if rows, err = d.DB.Query(c, _upArcs, mid); err != nil { // get the qualified aid to sync
log.Error("d.UpArcs.Query error(%v)", err)
return
}
defer rows.Close()
for rows.Next() {
var aid int64
if err = rows.Scan(&aid); err != nil {
log.Error("ParseVideos row.Scan() error(%v)", err)
return
}
aids = append(aids, aid)
}
if err = rows.Err(); err != nil {
log.Error("d.UpArcs.Query error(%v)", err)
}
return
}

View File

@@ -0,0 +1,80 @@
package ugc
import (
"context"
"testing"
"database/sql"
"fmt"
"github.com/smartystreets/goconvey/convey"
)
func TestUgcDeletedUp(t *testing.T) {
var c = context.Background()
convey.Convey("DeletedUp", t, func(ctx convey.C) {
mid, err := d.DeletedUp(c)
if err == sql.ErrNoRows {
fmt.Println("No to delete data")
return
}
ctx.Convey("Then err should be nil.mid should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
ctx.So(mid, convey.ShouldNotBeNil)
})
})
}
func TestUgcFinishDelUp(t *testing.T) {
var (
c = context.Background()
mid = int64(0)
)
convey.Convey("FinishDelUp", t, func(ctx convey.C) {
err := d.FinishDelUp(c, mid)
ctx.Convey("Then err should be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
})
})
}
func TestUgcPpDelUp(t *testing.T) {
var (
c = context.Background()
mid = int64(0)
)
convey.Convey("PpDelUp", t, func(ctx convey.C) {
err := d.PpDelUp(c, mid)
ctx.Convey("Then err should be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
})
})
}
func TestUgcCountUpArcs(t *testing.T) {
var (
c = context.Background()
mid = int64(0)
)
convey.Convey("CountUpArcs", t, func(ctx convey.C) {
count, err := d.CountUpArcs(c, mid)
ctx.Convey("Then err should be nil.count should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
ctx.So(count, convey.ShouldNotBeNil)
})
})
}
func TestUgcUpArcs(t *testing.T) {
var (
c = context.Background()
mid = int64(0)
)
convey.Convey("UpArcs", t, func(ctx convey.C) {
aids, err := d.UpArcs(c, mid)
ctx.Convey("Then err should be nil.aids should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
ctx.So(aids, convey.ShouldNotBeNil)
})
})
}

View File

@@ -0,0 +1,59 @@
package ugc
import (
"context"
"time"
"go-common/library/database/sql"
"go-common/library/log"
)
const (
_deletedVideos = "SELECT cid FROM ugc_video WHERE submit = 1 AND deleted = 1 AND retry < unix_timestamp(now()) LIMIT 5"
_finishDelVideo = "UPDATE ugc_video SET submit = 0 WHERE cid = ? AND deleted = 1"
_ppDelVideos = "UPDATE ugc_video SET retry = ? WHERE cid = ? AND deleted = 1"
)
// DeletedVideos picks the deleted videos to sync
func (d *Dao) DeletedVideos(c context.Context) (delIds []int, err error) {
var rows *sql.Rows
if rows, err = d.DB.Query(c, _deletedVideos); err != nil { // get the qualified aid to sync
return
}
defer rows.Close()
for rows.Next() {
var cid int
if err = rows.Scan(&cid); err != nil {
log.Error("ParseVideos row.Scan() error(%v)", err)
return
}
delIds = append(delIds, cid)
}
if err = rows.Err(); err != nil {
log.Error("d.deletedVideos.Query error(%v)", err)
}
return
}
// FinishDelVideos updates the submit status from 1 to 0
func (d *Dao) FinishDelVideos(c context.Context, delIds []int) (err error) {
for _, v := range delIds {
if _, err = d.DB.Exec(c, _finishDelVideo, v); err != nil {
log.Error("FinishDelVideos Error: %v", v, err)
return
}
}
return
}
// PpDelVideos postpones the archive's videos submit in 30 mins
func (d *Dao) PpDelVideos(c context.Context, delIds []int) (err error) {
var delay = time.Now().Unix() + int64(d.conf.UgcSync.Frequency.ErrorWait)
for _, v := range delIds {
if _, err = d.DB.Exec(c, _ppDelVideos, delay, v); err != nil {
log.Error("PpDelVideos, failed to delay: (%v,%v), Error: %v", delay, v, err)
return
}
}
return
}

View File

@@ -0,0 +1,23 @@
package ugc
import (
"testing"
"fmt"
. "github.com/smartystreets/goconvey/convey"
)
func TestDao_DeletedVideos(t *testing.T) {
Convey("TestDao_DeletedVideos", t, WithDao(func(d *Dao) {
res, err := d.DeletedVideos(ctx)
if err == nil && len(res) == 0 {
fmt.Println("No Delete Data")
d.DB.Exec(ctx, "UPDATE ugc_video SET deleted = 1, submit =1 WHERE deleted = 0 LIMIT 1")
}
res, err = d.DeletedVideos(ctx)
So(err, ShouldBeNil)
So(len(res), ShouldBeGreaterThan, 0)
fmt.Println(res)
}))
}

View File

@@ -0,0 +1,78 @@
package ugc
import (
"context"
"fmt"
"go-common/app/job/main/tv/model/ugc"
"go-common/library/database/sql"
"go-common/library/log"
"go-common/library/xstr"
)
const (
_delArc = "UPDATE ugc_archive SET deleted = 1, submit = 1 WHERE aid = ? AND deleted = 0"
_delVideos = "UPDATE ugc_video SET deleted = 1, submit = 1 WHERE aid = ? AND deleted = 0"
_delVideo = "UPDATE ugc_video SET deleted = 1, submit = 1 WHERE cid = ? AND deleted = 0"
_delVideoCids = "UPDATE ugc_video SET deleted = 1, submit = 1 WHERE cid IN (%s) AND deleted = 0"
_checkVideos = "SELECT cid FROM ugc_video WHERE aid = ? AND deleted = 0 LIMIT 1"
)
// TxDelArc deletes an arc
func (d *Dao) TxDelArc(tx *sql.Tx, aid int64) (err error) {
if _, err = tx.Exec(_delArc, aid); err != nil {
log.Error("TxDelArc, failed to update: (%v), Error: %v", aid, err)
}
return
}
// DelVideos delete one archive all videos
func (d *Dao) DelVideos(ctx context.Context, aid int64) (err error) {
if _, err = d.DB.Exec(ctx, _delVideos, aid); err != nil {
log.Error("DelVideos, failed to update: (%v), Error: %v", aid, err)
return
}
log.Info("Aid %d is deleted, delete its videos", aid)
return
}
// TxDelVideos deletes the videos of an arc
func (d *Dao) TxDelVideos(tx *sql.Tx, aid int64) (err error) {
if _, err = tx.Exec(_delVideos, aid); err != nil {
log.Error("TxDelVideos, failed to update: (%v), Error: %v", aid, err)
}
return
}
// TxDelVideo deletes a video
func (d *Dao) TxDelVideo(tx *sql.Tx, cid int64) (err error) {
if _, err = tx.Exec(_delVideo, cid); err != nil {
log.Error("TxDelVideo, failed to update: (%v), Error: %v", cid, err)
}
return
}
// DelVideoArc deletes some videos of an archive, if the archive is empty, also delete it
func (d *Dao) DelVideoArc(ctx context.Context, req *ugc.DelVideos) (arcValid bool, err error) {
var cid int64
arcValid = true
if _, err = d.DB.Exec(ctx, fmt.Sprintf(_delVideoCids, xstr.JoinInts(req.CIDs))); err != nil {
log.Error("DelVideos Cids %v, Aid %d, Error: %v", req.CIDs, req.AID, err)
return
}
if err = d.DB.QueryRow(ctx, _checkVideos, req.AID).Scan(&cid); err != nil { // if no active videos, delete the arc
if err == sql.ErrNoRows {
err = nil
arcValid = false
if _, err = d.DB.Exec(ctx, _delArc, req.AID); err != nil {
log.Error("DelVideos DelArc Cids %v, Aid %d, Error: %v", req.CIDs, req.AID, err)
return
}
log.Info("DelArc Aid %d Because No Active Video", req.AID)
} else {
log.Error("DelVideos Cids %v, Aid %d, Error: %v", req.CIDs, req.AID, err)
return
}
}
return
}

View File

@@ -0,0 +1,127 @@
package ugc
import (
"fmt"
"testing"
"go-common/app/job/main/tv/model/ugc"
arccli "go-common/app/service/main/archive/api"
. "github.com/smartystreets/goconvey/convey"
)
func TestUgcTxDelArc(t *testing.T) {
var (
tx, _ = d.DB.Begin(ctx)
aid = int64(0)
)
Convey("TxDelArc", t, func(ctx C) {
err := d.TxDelArc(tx, aid)
ctx.Convey("Then err should be nil.", func(ctx C) {
ctx.So(err, ShouldBeNil)
})
})
}
func TestUgcTxDelVideos(t *testing.T) {
var (
tx, _ = d.DB.Begin(ctx)
aid = int64(0)
)
Convey("TxDelVideos", t, func(ctx C) {
err := d.TxDelVideos(tx, aid)
ctx.Convey("Then err should be nil.", func(ctx C) {
ctx.So(err, ShouldBeNil)
})
})
}
func TestUgcTxDelVideo(t *testing.T) {
var (
tx, _ = d.DB.Begin(ctx)
cid = int64(0)
)
Convey("TxDelVideo", t, func(ctx C) {
err := d.TxDelVideo(tx, cid)
ctx.Convey("Then err should be nil.", func(ctx C) {
ctx.So(err, ShouldBeNil)
})
})
}
func TestDao_DelVideoArc(t *testing.T) {
Convey("TestDao_DelVideoArc", t, WithDao(func(d *Dao) {
var (
aid = int64(99998888)
cid1 = int64(999988881)
cid2 = int64(999988882)
tx, _ = d.DB.Begin(ctx)
arc = &arccli.Arc{Aid: aid}
countVQ = "SELECT COUNT(1) FROM ugc_video WHERE aid = ? AND deleted = 0"
countAQ = "SELECT COUNT(1) FROM ugc_archive WHERE aid = ? AND deleted = 0"
countV, countA int
arcValid bool
)
// add archive and two videos
d.TxImportArc(tx, &arccli.Arc{Aid: aid})
d.TxMnlVideos(tx, &arccli.ViewReply{
Arc: arc,
Pages: []*arccli.Page{
{
Cid: cid1,
},
{
Cid: cid2,
},
},
})
tx.Commit()
d.DB.QueryRow(ctx, countVQ, aid).Scan(&countV)
So(countV, ShouldEqual, 2)
// delete one video, still one active video under the archive, we keep the archive
_, err := d.DelVideoArc(ctx, &ugc.DelVideos{
AID: aid,
CIDs: []int64{cid1},
})
So(err, ShouldBeNil)
d.DB.QueryRow(ctx, countVQ, aid).Scan(&countV)
d.DB.QueryRow(ctx, countAQ, aid).Scan(&countA)
So(countV, ShouldEqual, 1)
So(countA, ShouldEqual, 1)
So(err, ShouldBeNil)
// delete the last video, the archive should also be deleted
arcValid, err = d.DelVideoArc(ctx, &ugc.DelVideos{
AID: aid,
CIDs: []int64{cid2},
})
So(err, ShouldBeNil)
So(arcValid, ShouldBeFalse)
d.DB.QueryRow(ctx, countVQ, aid).Scan(&countV)
d.DB.QueryRow(ctx, countAQ, aid).Scan(&countA)
So(countV, ShouldEqual, 0)
So(countA, ShouldEqual, 0)
}))
}
func TestDao_DelVideos(t *testing.T) {
Convey("TestDao_DelVideos", t, WithDao(func(d *Dao) {
var (
aid = int64(99998888)
cid1 = 99998887
cid2 = 99998886
insertSQL = "REPLACE INTO ugc_video (aid, cid) VALUES (%d, %d)"
)
ress, err2 := d.DB.Exec(ctx, fmt.Sprintf(insertSQL, aid, cid1))
fmt.Println(fmt.Sprintf(insertSQL, aid, cid1))
fmt.Println(err2)
fmt.Println(ress.RowsAffected())
d.DB.Exec(ctx, fmt.Sprintf(insertSQL, aid, cid2))
var count int
d.DB.QueryRow(ctx, "SELECT COUNT(1) FROM ugc_video WHERE aid = ? AND deleted = 0", aid).Scan(&count)
So(count, ShouldEqual, 2)
err := d.DelVideos(ctx, aid)
So(err, ShouldBeNil)
d.DB.QueryRow(ctx, "SELECT COUNT(1) FROM ugc_video WHERE aid = ? AND deleted = 0", aid).Scan(&count)
So(count, ShouldEqual, 0)
}))
}

View File

@@ -0,0 +1,96 @@
package ugc
import (
"context"
"fmt"
"time"
ugcmdl "go-common/app/job/main/tv/model/ugc"
"go-common/app/service/main/archive/api"
"go-common/library/database/sql"
"go-common/library/log"
"go-common/library/xstr"
)
const (
_import = "SELECT mid FROM ugc_uploader WHERE toinit = 1 AND retry < UNIX_TIMESTAMP(now()) AND deleted = 0 LIMIT "
_postponeUp = "UPDATE ugc_uploader SET retry = ? WHERE mid = ? AND deleted = 0"
_finishUp = "UPDATE ugc_uploader SET toinit = 0 WHERE mid = ? AND deleted = 0"
_filterAids = "SELECT aid FROM ugc_archive WHERE aid IN (%s) AND deleted = 0"
_importArc = "REPLACE INTO ugc_archive(aid, videos, mid, typeid, title, cover, content, duration, copyright, pubtime, state) VALUES (?,?,?,?,?,?,?,?,?,?,?)"
)
// TxImportArc imports an arc
func (d *Dao) TxImportArc(tx *sql.Tx, arc *api.Arc) (err error) {
if _, err = tx.Exec(_importArc, arc.Aid,
arc.Videos, arc.Author.Mid, arc.TypeID, arc.Title, arc.Pic, arc.Desc, arc.Duration,
arc.Copyright, arc.PubDate, arc.State); err != nil {
log.Error("_importArc, failed to update: (%v), Error: %v", arc, err)
}
return
}
// Import picks the uppers to init with the RPC data
func (d *Dao) Import(c context.Context) (res []*ugcmdl.Upper, err error) {
var rows *sql.Rows
if rows, err = d.DB.Query(c, _import+fmt.Sprintf("%d", d.conf.UgcSync.Batch.ImportNum)); err != nil {
log.Error("d.Import.Query error(%v)", err)
return
}
defer rows.Close()
for rows.Next() {
var r = &ugcmdl.Upper{}
if err = rows.Scan(&r.MID); err != nil {
log.Error("Manual row.Scan() error(%v)", err)
return
}
res = append(res, r)
}
if err = rows.Err(); err != nil {
log.Error("d.Import.Query error(%v)", err)
}
return
}
// PpUpper means postpone upper init operation due to some error happened
func (d *Dao) PpUpper(c context.Context, mid int64) (err error) {
var delay = time.Now().Unix() + int64(d.conf.UgcSync.Frequency.ErrorWait)
if _, err = d.DB.Exec(c, _postponeUp, delay, mid); err != nil {
log.Error("PpUpper, failed to delay: (%v,%v), Error: %v", delay, mid, err)
}
return
}
// FinishUpper updates the upper's to_init status to 0 means we finish the import operation
func (d *Dao) FinishUpper(c context.Context, mid int64) (err error) {
if _, err = d.DB.Exec(c, _finishUp, mid); err != nil {
log.Error("FinishUpper, failed to Update: (%v,%v), Error: %v", _finishUp, mid, err)
}
return
}
// FilterExist filters the existing archives and remove them from the res, to have only non-existing data to insert
func (d *Dao) FilterExist(c context.Context, res *map[int64]*api.Arc, aids []int64) (err error) {
var rows *sql.Rows
if rows, err = d.DB.Query(c, fmt.Sprintf(_filterAids, xstr.JoinInts(aids))); err != nil {
if err == sql.ErrNoRows {
err = nil // if non of them exist, it's good, we do nothing
return
}
log.Error("d._filterAids.Query error(%v)", err)
return
}
defer rows.Close()
for rows.Next() {
var aidEx int64
if err = rows.Scan(&aidEx); err != nil {
log.Error("Manual row.Scan() error(%v)", err)
return
}
delete(*res, aidEx) // remove existing data from the map
}
if err = rows.Err(); err != nil {
log.Error("d.FilterExist.Query error(%v)", err)
}
return
}

View File

@@ -0,0 +1,48 @@
package ugc
import (
"go-common/app/service/main/archive/api"
"testing"
. "github.com/smartystreets/goconvey/convey"
)
func TestDao_FinishUpper(t *testing.T) {
Convey("TestDao_FinishUpper", t, WithDao(func(d *Dao) {
err := d.FinishUpper(ctx, 1)
So(err, ShouldBeNil)
}))
}
func TestDao_Import(t *testing.T) {
Convey("TestDao_Import", t, WithDao(func(d *Dao) {
res, err := d.Import(ctx)
So(err, ShouldBeNil)
So(len(res), ShouldBeGreaterThan, 0)
}))
}
func TestDao_PpUpper(t *testing.T) {
Convey("TestDao_PpUpper", t, WithDao(func(d *Dao) {
err := d.PpUpper(ctx, 1)
So(err, ShouldBeNil)
}))
}
func TestDao_BeginTran(t *testing.T) {
Convey("TestDao_BeginTran", t, WithDao(func(d *Dao) {
tx, err := d.BeginTran(ctx)
So(err, ShouldBeNil)
tx.Rollback()
}))
}
func TestDao_FilterExist(t *testing.T) {
Convey("TestDao_FilterExist", t, WithDao(func(d *Dao) {
pMap := make(map[int64]*api.Arc)
pAids := []int64{10106351, 10106309, 10106308, 10106307, 10106306, 10106284, 10105807, 10101484, 10100856, 10100855, 10100486, 10100146, 10100145, 10100144,
10100044, 10099332, 10099167, 10099150, 10099149, 10098970}
err := d.FilterExist(ctx, &pMap, pAids)
So(err, ShouldBeNil)
}))
}

View File

@@ -0,0 +1,93 @@
package ugc
import (
"context"
"fmt"
"time"
ugcmdl "go-common/app/job/main/tv/model/ugc"
arccli "go-common/app/service/main/archive/api"
"go-common/library/database/sql"
"go-common/library/log"
)
const (
_manual = "SELECT id,aid FROM ugc_archive WHERE manual = 1 AND retry < UNIX_TIMESTAMP(now()) AND deleted = 0 LIMIT "
_postpone = "UPDATE ugc_archive SET retry = ? WHERE aid = ? AND deleted = 0"
_importFinish = "UPDATE ugc_archive SET manual = 0 WHERE aid = ? AND deleted = 0"
_manualArc = "UPDATE ugc_archive SET videos = ?, mid = ?, typeid = ?, title = ?, cover = ?, content = ?, duration = ?, " +
"copyright = ?, pubtime = ?, state = ?, submit = ? WHERE aid = ? AND deleted = 0"
_autoArc = "REPLACE INTO ugc_archive (videos, mid, typeid, title, cover, content, duration, copyright, pubtime, state, submit, aid) VALUES " +
"(?,?,?,?,?,?,?,?,?,?,?,?)"
_importVideo = "REPLACE INTO ugc_video (aid,cid,eptitle,index_order,duration,description) VALUES (?,?,?,?,?,?)"
)
// TxMnlArc updates the db with data from API
func (d *Dao) TxMnlArc(tx *sql.Tx, arc *ugcmdl.Archive) (err error) {
if _, err = tx.Exec(_manualArc,
arc.Videos, arc.MID, arc.TypeID, arc.Title, arc.Cover, arc.Content, arc.Duration,
arc.Copyright, arc.Pubtime, arc.State, _needSubmit, arc.AID); err != nil {
log.Error("_importArc, failed to update: (%v), Error: %v", arc, err)
}
return
}
// TxAutoArc imports the db an arc
func (d *Dao) TxAutoArc(tx *sql.Tx, arc *ugcmdl.Archive) (err error) {
if _, err = tx.Exec(_autoArc,
arc.Videos, arc.MID, arc.TypeID, arc.Title, arc.Cover, arc.Content, arc.Duration,
arc.Copyright, arc.Pubtime, arc.State, _needSubmit, arc.AID); err != nil {
log.Error("TxAutoArc, failed to update: (%v), Error: %v", arc, err)
}
return
}
// TxMnlVideos updates the db with data from API, if the
func (d *Dao) TxMnlVideos(tx *sql.Tx, view *arccli.ViewReply) (err error) {
for _, v := range view.Pages {
if _, err = tx.Exec(_importVideo, view.Arc.Aid, v.Cid, v.Part, v.Page, v.Duration, v.Desc); err != nil {
log.Error("_importArc, failed to insert: (%v), Error: %v", v, err)
return
}
}
return
}
// TxMnlStatus updates the aid's manual status to 0
func (d *Dao) TxMnlStatus(tx *sql.Tx, aid int64) (err error) {
if _, err = tx.Exec(_importFinish, aid); err != nil {
log.Error("_importFinish, failed to update: (%v), Error: %v", aid, err)
}
return
}
// Manual picks the archives that added manually
func (d *Dao) Manual(c context.Context) (res []*ugcmdl.Archive, err error) {
var rows *sql.Rows
if rows, err = d.DB.Query(c, _manual+fmt.Sprintf("%d", d.conf.UgcSync.Batch.ManualNum)); err != nil {
log.Error("d.Import.Query error(%v)", err)
return
}
defer rows.Close()
for rows.Next() {
var r = &ugcmdl.Archive{}
if err = rows.Scan(&r.ID, &r.AID); err != nil {
log.Error("Manual row.Scan() error(%v)", err)
return
}
res = append(res, r)
}
if err = rows.Err(); err != nil {
log.Error("d.Manual.Query error(%v)", err)
}
return
}
// Ppmnl means postpone manual operation due to some error happened
func (d *Dao) Ppmnl(c context.Context, aid int64) (err error) {
var delay = time.Now().Unix() + int64(d.conf.UgcSync.Frequency.ErrorWait)
if _, err = d.DB.Exec(c, _postpone, delay, aid); err != nil {
log.Error("Ppmnl, failed to delay: (%v,%v), Error: %v", delay, aid, err)
}
return
}

View File

@@ -0,0 +1,47 @@
package ugc
import (
"fmt"
"testing"
"go-common/app/job/main/tv/model/ugc"
. "github.com/smartystreets/goconvey/convey"
)
func TestDao_Manual(t *testing.T) {
Convey("TestDao_Manual", t, WithDao(func(d *Dao) {
res, err := d.Manual(ctx)
for _, v := range res {
fmt.Println(v)
}
So(err, ShouldBeNil)
So(len(res), ShouldBeGreaterThan, 0)
}))
}
func TestDao_Ppmnl(t *testing.T) {
Convey("TestDao_Ppmnl", t, WithDao(func(d *Dao) {
err := d.Ppmnl(ctx, 10099763)
So(err, ShouldBeNil)
}))
}
func TestDao_UpdateArc(t *testing.T) {
Convey("TestDao_UpdateArc", t, WithDao(func(d *Dao) {
err := d.UpdateArc(ctx, &ugc.ArchDatabus{
Aid: 10099763,
Mid: 452156,
Videos: 1,
TypeID: 174,
Title: "test",
Cover: "testPic",
Content: "testDesc",
Duration: 300,
Copyright: 1,
PubTime: "2018-06-05",
State: 5,
})
So(err, ShouldBeNil)
}))
}

View File

@@ -0,0 +1,176 @@
package ugc
import (
"context"
"fmt"
ugcmdl "go-common/app/job/main/tv/model/ugc"
"go-common/library/cache/memcache"
"go-common/library/database/sql"
"go-common/library/log"
)
const (
_mcArcCMSKey = "arc_cms_%d"
_mcVideoCMSKey = "video_cms_%d"
_totalArcs = "SELECT count(1) FROM ugc_archive WHERE mid = ?"
_totalVideos = "SELECT count(1) FROM ugc_video"
_activeVideos = "SELECT count(1) FROM ugc_video WHERE aid = ? AND deleted = 0"
_cntArcVideo = "SELECT count(1) FROM ugc_video WHERE aid = ?"
_pickArcMC = "SELECT title, aid, content, cover, typeid, pubtime, videos, valid, deleted, result,copyright, state, mid,duration FROM ugc_archive " +
"WHERE mid = %d LIMIT %d,%d"
_pickArcVideoMC = "SELECT cid, eptitle, aid, index_order, valid, deleted, result FROM ugc_video " +
"WHERE aid = ? AND cid > ? ORDER BY cid LIMIT 0,%d"
_transFailVideos = "SELECT cid FROM ugc_video WHERE aid = ? AND cid > %d AND transcoded = 2 and deleted = 0"
)
func arcCMSCacheKey(sid int64) string {
return fmt.Sprintf(_mcArcCMSKey, sid)
}
func videoCMSCacheKey(sid int) string {
return fmt.Sprintf(_mcVideoCMSKey, sid)
}
// SetArcCMS in MC
func (d *Dao) SetArcCMS(ctx context.Context, res *ugcmdl.ArcCMS) (err error) {
var (
key = arcCMSCacheKey(res.AID)
conn = d.mc.Get(ctx)
)
defer conn.Close()
if err = conn.Set(&memcache.Item{Key: key, Object: res, Expiration: d.mcExpire, Flags: memcache.FlagJSON}); err != nil {
log.Error("conn.Set(%s,%v) error(%v)", key, res, err)
}
return
}
// SetVideoCMS in MC
func (d *Dao) SetVideoCMS(ctx context.Context, res *ugcmdl.VideoCMS) (err error) {
var (
key = videoCMSCacheKey(res.CID)
conn = d.mc.Get(ctx)
)
defer conn.Close()
if err = conn.Set(&memcache.Item{Key: key, Object: res, Expiration: d.mcExpire, Flags: memcache.FlagJSON}); err != nil {
log.Error("conn.Set(%s,%v) error(%v)", key, res, err)
}
return
}
// UpArcsCnt counts the total number of arcs, including the deleted ones
func (d *Dao) UpArcsCnt(c context.Context, mid int64) (count int, err error) {
if err = d.DB.QueryRow(c, _totalArcs, mid).Scan(&count); err != nil {
log.Error("d.UpArcsCnt.Query error(%v)", err)
}
return
}
// TotalVideos counts the total number of arcs, including the deleted ones
func (d *Dao) TotalVideos(c context.Context) (count int, err error) {
if err = d.DB.QueryRow(c, _totalVideos).Scan(&count); err != nil {
log.Error("d.TotalVideos.Query error(%v)", err)
}
return
}
// ActVideos checks whether there is still some active videos
func (d *Dao) ActVideos(c context.Context, aid int64) (has bool, err error) {
var count int
if err = d.DB.QueryRow(c, _activeVideos, aid).Scan(&count); err != nil {
log.Error("d.TotalVideos.Query error(%v)", err)
return
}
if count == 0 {
has = false
} else {
has = true
}
return
}
// ArcVideoCnt counts one archive's video
func (d *Dao) ArcVideoCnt(c context.Context, aid int64) (count int, err error) {
if err = d.DB.QueryRow(c, _cntArcVideo, aid).Scan(&count); err != nil {
log.Error("d.TotalVideos.Query error(%v)", err)
}
return
}
// PickUpArcs picks data by Piece to sync in MC, attention: nbPiece begins from zero
func (d *Dao) PickUpArcs(ctx context.Context, mid, nbPiece, nbData int) (res []*ugcmdl.ArcFull, err error) {
var (
rows *sql.Rows
query = fmt.Sprintf(_pickArcMC, mid, nbPiece*nbData, nbData)
)
if rows, err = d.DB.Query(ctx, query); err != nil {
log.Error("d.PickUpArcs.Query: %s error(%v)", query, err)
return
}
defer rows.Close()
for rows.Next() {
var li = &ugcmdl.ArcFull{}
// SELECT title, aid, content, cover, typeid, pubtime, videos, valid, deleted, result,copyright, state, mid,duration
if err = rows.Scan(&li.Title, &li.AID, &li.Content, &li.Cover, &li.TypeID, &li.Pubtime,
&li.Videos, &li.Valid, &li.Deleted, &li.Result, &li.Copyright, &li.State, &li.MID, &li.Duration); err != nil {
log.Error("PickUpArcs row.Scan() error(%v)", err)
return
}
res = append(res, li)
}
if err = rows.Err(); err != nil {
log.Error("d.PickUpArcs.Query error(%v)", err)
}
return
}
// PickArcVideo picks data by Piece to sync in MC
func (d *Dao) PickArcVideo(ctx context.Context, aid int64, LastID int, nbData int) (res []*ugcmdl.VideoCMS, myLast int, err error) {
var (
rows *sql.Rows
query = fmt.Sprintf(_pickArcVideoMC, nbData)
)
if rows, err = d.DB.Query(ctx, query, aid, LastID); err != nil {
log.Error("d._pickArcVideoMC.Query: %s error(%v)", query, err)
return
}
defer rows.Close()
for rows.Next() {
var li = new(ugcmdl.VideoCMS)
if err = rows.Scan(&li.CID, &li.Title, &li.AID, &li.IndexOrder, &li.Valid, &li.Deleted, &li.Result); err != nil {
log.Error("PickVideoMC row.Scan() error(%v)", err)
return
}
res = append(res, li)
myLast = li.CID
}
if err = rows.Err(); err != nil {
log.Error("d.PickArcVideo.Query error(%v)", err)
}
return
}
// TransFailVideos picks transcoding failure videos
func (d *Dao) TransFailVideos(ctx context.Context, aid int64) (cids []int64, err error) {
var (
rows *sql.Rows
query = fmt.Sprintf(_transFailVideos, d.criCID)
)
if rows, err = d.DB.Query(ctx, query, aid); err != nil {
log.Error("d.TransFailVideos.Query: Cid %d error(%v)", aid, err)
return
}
defer rows.Close()
for rows.Next() {
var li int64
if err = rows.Scan(&li); err != nil {
log.Error("PickVideoMC row.Scan() error(%v)", err)
return
}
cids = append(cids, li)
}
if err = rows.Err(); err != nil {
log.Error("d.TransFailVideos.Query error(%v)", err)
}
return
}

View File

@@ -0,0 +1,132 @@
package ugc
import (
"context"
"encoding/json"
"fmt"
"testing"
ugcmdl "go-common/app/job/main/tv/model/ugc"
. "github.com/smartystreets/goconvey/convey"
)
func TestDao_TotalVideos(t *testing.T) {
Convey("TestDao_TotalVideos", t, WithDao(func(d *Dao) {
res, err := d.TotalVideos(ctx)
So(err, ShouldBeNil)
So(res, ShouldBeGreaterThan, 0)
fmt.Println(res)
}))
}
func TestDao_ArcVideoCnt(t *testing.T) {
Convey("TestDao_ArcVideoCnt", t, WithDao(func(d *Dao) {
var aid int64
d.DB.QueryRow(ctx, "select aid from ugc_video where deleted = 0 limit 1").Scan(&aid)
if aid == 0 {
fmt.Println("empty arc")
return
}
cnt, errcnt := d.ArcVideoCnt(ctx, aid)
So(errcnt, ShouldBeNil)
fmt.Println(aid)
So(cnt, ShouldBeGreaterThan, 0)
fmt.Println(cnt)
resVideos, lastIDVideo, errVd := d.PickArcVideo(ctx, aid, 0, 10)
So(errVd, ShouldBeNil)
So(len(resVideos), ShouldBeGreaterThan, 0)
So(lastIDVideo, ShouldBeGreaterThan, 0)
str, _ := json.Marshal(resVideos)
fmt.Println(string(str))
}))
}
func TestDao_SetArc(t *testing.T) {
Convey("TestDao_SetArc", t, WithDao(func(d *Dao) {
err := d.SetArcCMS(ctx, &ugcmdl.ArcCMS{
Title: "testtest",
AID: 777,
})
So(err, ShouldBeNil)
}))
}
func TestDao_UpArcsCnt(t *testing.T) {
Convey("TestDao_UpArcsCnt", t, WithDao(func(d *Dao) {
var mid int64
d.DB.QueryRow(ctx, "select mid from ugc_archive where deleted = 0 limit 1").Scan(&mid)
if mid == 0 {
fmt.Println("empty arc")
return
}
count, err := d.UpArcsCnt(ctx, mid)
So(err, ShouldBeNil)
So(count, ShouldBeGreaterThan, 0)
fmt.Println("mid ", mid, " cnt", count)
if count > 1 {
d.DB.Exec(context.Background(), "update ugc_archive set deleted = 1 where mid = ? and deleted = 0 limit 1", mid)
cntNonDeleted, err2 := d.CountUpArcs(context.Background(), mid)
So(err2, ShouldBeNil)
So(count, ShouldBeGreaterThan, cntNonDeleted)
fmt.Println("all: ", count, " non-deleted: ", cntNonDeleted)
}
}))
}
func TestDao_TransFailVideos(t *testing.T) {
Convey("TestDao_TransFailVideos", t, WithDao(func(d *Dao) {
query := "SELECT aid FROM ugc_video WHERE cid > 12780000 AND transcoded = 2 and deleted = 0 limit 1"
var aid int64
d.DB.QueryRow(context.Background(), query).Scan(&aid)
if aid == 0 {
fmt.Println("Empty archives")
return
}
cids, err := d.TransFailVideos(ctx, aid)
So(err, ShouldBeNil)
So(len(cids), ShouldBeGreaterThan, 0)
fmt.Println("aid ", aid, " cids ", cids)
}))
}
func TestDao_ActVideos(t *testing.T) {
Convey("TestDao_ActVideos", t, WithDao(func(d *Dao) {
var (
aid = int64(88888888)
cid = 99999999
)
insertSQL := "REPLACE INTO ugc_video (aid, cid, deleted) VALUES (%d, %d, 1)"
d.DB.Exec(ctx, fmt.Sprintf(insertSQL, aid, cid))
has, err := d.ActVideos(ctx, aid)
So(err, ShouldBeNil)
So(has, ShouldBeFalse)
d.DB.Exec(ctx, "UPDATE ugc_video SET deleted = 0 WHERE cid = ?", cid)
has, err = d.ActVideos(ctx, aid)
So(err, ShouldBeNil)
So(has, ShouldBeTrue)
}))
}
func TestDao_PickArcMC(t *testing.T) {
Convey("TestDao_PickArcMC", t, WithDao(func(d *Dao) {
pickMid := "select mid from ugc_archive where deleted = 0 group by mid order by count(aid) desc limit 1"
var mid = 0
d.DB.QueryRow(ctx, pickMid).Scan(&mid)
if mid == 0 {
fmt.Println("empty archive")
return
}
fmt.Println("mid ", mid)
res1, err1 := d.PickUpArcs(ctx, mid, 0, 5)
So(err1, ShouldBeNil)
So(len(res1), ShouldBeGreaterThan, 0)
res2, err2 := d.PickUpArcs(ctx, mid, 7, 5)
So(err2, ShouldBeNil)
So(len(res2), ShouldBeGreaterThan, 0)
str1, _ := json.Marshal(res1)
str2, _ := json.Marshal(res2)
fmt.Println(string(str1))
fmt.Println(string(str2))
}))
}

View File

@@ -0,0 +1,44 @@
package ugc
import (
"context"
"fmt"
"go-common/app/job/main/tv/model/common"
"go-common/library/database/sql"
"go-common/library/log"
"go-common/library/xstr"
)
const (
_passedArc = "SELECT aid, ctime FROM ugc_archive WHERE typeid IN (%s) AND result = 1 AND valid = 1 and deleted = 0"
)
// PassedArcs picks the passed Arc data for Idx Page
func (d *Dao) PassedArcs(c context.Context, tids []int64) (res []*common.IdxRank, err error) {
if len(tids) == 0 {
return
}
var (
rows *sql.Rows
tidsStr = xstr.JoinInts(tids)
)
if rows, err = d.DB.Query(c, fmt.Sprintf(_passedArc, tidsStr)); err != nil {
log.Error("d.PassedArcs.Query error(%v)", err)
return
}
defer rows.Close()
for rows.Next() {
var r = &common.IdxRank{}
if err = rows.Scan(&r.ID, &r.Ctime); err != nil {
log.Error("PassedArcs row.Scan() error(%v)", err)
return
}
res = append(res, r)
}
if err = rows.Err(); err != nil {
log.Error("d.PassedArcs.Query error(%v)", err)
}
return
}

Some files were not shown because too many files have changed in this diff Show More