Create & Init Project...

This commit is contained in:
2019-04-22 18:49:16 +08:00
commit fc4fa37393
25440 changed files with 4054998 additions and 0 deletions

View File

@@ -0,0 +1,41 @@
filegroup(
name = "package-srcs",
srcs = glob(["**"]),
tags = ["automanaged"],
visibility = ["//visibility:private"],
)
filegroup(
name = "all-srcs",
srcs = [
":package-srcs",
"//app/admin/main/videoup/cmd:all-srcs",
"//app/admin/main/videoup/conf:all-srcs",
"//app/admin/main/videoup/dao/archive:all-srcs",
"//app/admin/main/videoup/dao/data:all-srcs",
"//app/admin/main/videoup/dao/databus:all-srcs",
"//app/admin/main/videoup/dao/manager:all-srcs",
"//app/admin/main/videoup/dao/monitor:all-srcs",
"//app/admin/main/videoup/dao/music:all-srcs",
"//app/admin/main/videoup/dao/oversea:all-srcs",
"//app/admin/main/videoup/dao/search:all-srcs",
"//app/admin/main/videoup/dao/staff:all-srcs",
"//app/admin/main/videoup/dao/tag:all-srcs",
"//app/admin/main/videoup/dao/task:all-srcs",
"//app/admin/main/videoup/dao/track:all-srcs",
"//app/admin/main/videoup/http:all-srcs",
"//app/admin/main/videoup/model/archive:all-srcs",
"//app/admin/main/videoup/model/manager:all-srcs",
"//app/admin/main/videoup/model/message:all-srcs",
"//app/admin/main/videoup/model/monitor:all-srcs",
"//app/admin/main/videoup/model/music:all-srcs",
"//app/admin/main/videoup/model/oversea:all-srcs",
"//app/admin/main/videoup/model/search:all-srcs",
"//app/admin/main/videoup/model/track:all-srcs",
"//app/admin/main/videoup/model/up:all-srcs",
"//app/admin/main/videoup/model/utils:all-srcs",
"//app/admin/main/videoup/service:all-srcs",
],
tags = ["automanaged"],
visibility = ["//visibility:public"],
)

View File

@@ -0,0 +1,531 @@
#### 稿件审核后台接口
##### Version 1.29.4
> 1.修改监控数据的接口URL
> 2.修复稿件监控列表数据隐藏PGC的问题
##### Version 1.29.3
> 1.移除app配置
##### Version 1.29.2
> 1.使用up-service grpc
##### Version 1.29.1
> 1.移除稿件无用报表
##### Version 1.29.0
> 1.通用批量tag接口 添加tag或者删除tag接口 x/admin/videoup/archive/batch/tag
> 1.1支持频道回查 form_list = channel_review
> 1.2支持adminBind/upBind(默认走adminBind) is_up_bind=true
> 1.3支持同步隐藏tag sync_hidden_tag=true
> 1.4因为稿件服务不cache tags 也不需要发force_sync.未来计划砍掉审核库archive.tag
##### Version 1.28.1
> 1.修复稿件、视频搜索的account grpc 报错的问题
##### Version 1.28.0
> 1.联合投稿 admin
##### Version 1.27.8
> 1.升级账号API为grpc服务,简化配置
##### Version 1.27.7
> 1.批量修改分区支持刷数据模式 仅通知稿件服务 force_sync
##### Version 1.27.6
> 1.1000条登入日志不够用扩充为10000条
##### Version 1.27.5
> 1.去掉稿件列表封面的默认值
##### Version 1.27.4
> 1.修复搜索列表封面URL路径错误的问题
##### Version 1.27.3
> 1.兼容videoup-task-admin 用于判断复审的参数. 待前端完全迁移到v2,删除本项目中任务代码
##### Version 1.27.2
> 1.修复稿件稿件回查列表数据移除导致重复数据的bug
##### Version 1.27.1
> 1.修复稿件搜索接口多个状态传字符串的bug
> 2.修复稿件搜索接口回查列表字段传递不正确的问题
> 3.稿件搜索接口添加关键字匹配度优先的功能
##### Version 1.27.0
> 1.稿件ugc付费流程
##### Version 1.26.3
> 1.修复全部稿件列表批量搜索aid时不能翻页的问题
> 2.修复监控列表
##### Version 1.26.2
> 1.second_round 消息支持开关邮件发送 默认发邮件
##### Version 1.26.1
> 1.将稿件搜索从manager-v4迁移过来
##### Version 1.26.0
> 1.新增属性位 21位 家长模式
##### Version 1.25.8
> 1.first_round消息附带UP主粉丝数 fans 方便视频云做转码优先级
##### Version 1.25.7
> 1.恢复pgc的第18位属性设置AttrBitBadgepay
##### Version 1.25.6
> 1.删除pgc的第18位属性设置AttrBitBadgepay
##### Version 1.25.5
> 1.修复版权搜索返回结构不正确导致空指针的问题
##### Version 1.25.4
> 1.修复非视频列表也进行鉴权的bug
##### Version 1.25.3
> 1.去除视频监控统计中稿件状态为-100的数据
> 2.解决视频监控结果列表翻页的bug
##### Version 1.25.2
> 1.去除redis keys命令的使用
##### Version 1.25.1
> 1.将权重的redis独立不影响稿件主流程
##### Version 1.25.0
> 1.添加稿件停留数量统计接口
##### Version 1.24.7
> 1.人工操作评论开关移除全部改为videoup-report-job的状态联动
##### Version 1.24.6
> 1.修改版权搜索报错信息
##### Version 1.24.5
> 1.二审到三审新增商单通道,(支持商单三审及特色分区三审)
##### Version 1.24.4
> 1.hbase v2
##### Version 1.24.3
> 1.fixbug 修复二审round 10 state-6 非特殊分区 提交审核 变成三审bug
##### Version 1.24.2
> 1.频道rpc返回结构修改
##### Version 1.24.1
> 1.一审视频任务质检添加不支持异步避免与task_utime上报的竞争
##### Version 1.24.0
> 1.一审视频任务审核支持任务质检添加
##### Version 1.23.22
> 1.隔离预发布redis 队列
##### Version 1.23.21
> 1.定时发布表增加软删除字段deleted_at
##### Version 1.23.20
> 1.增加版权接口配置解决copyright接口经常504的问题
##### Version 1.23.19
> 1.新增aitrack接口:获取相似稿件aid
##### Version 1.23.18
> 1.修复写视频操作记录cid不存在时的bug
> 2.修复查询视频时id参数中有空白字符ParseInt报错的问题
##### Version 1.23.17
> 1.搜索关键字等级统一改成low
> 2.版权接口返回前30条数据
##### Version 1.23.16
> 1.配合谷安将视频、版权搜索接口从v2迁移至v3。提供给前端的接口也从manager-v4迁移至Videoup-admin。
##### Version 1.23.15
> 1.新增频道信息查询接口
##### Version 1.23.14
> 1.将地区策略组加限制的policy_id>1 同步到稿件attr 13bit(取消限制同理)
##### Version 1.23.13
> 1.支持签约up主的报备邮件
##### Version 1.23.12
> 1.新增批量频道审核接口:新增或删除tag可能触发频道回查
##### Version 1.23.11
> 1.添加up主信用分稿件端数据上报 upcredit pub
> 2.移除音频库业务
##### Version 1.23.10
> 1.tag同步绑定一级、二级分区
> 2.稿件审核单个提交移除tag落库和tag变更日志
##### Version 1.23.9
> 1.添加热门回查功能
##### Version 1.23.8
> 1.去掉redis的大key: task_weight
##### Version 1.23.7
> 1.移除net/http/parse
##### Version 1.23.6
> 1.频道回查、稿件审核单个提交、稿件批量修改属性支持频道禁止
> 2.tag接口无脑回查从属性位改为archive_recheck表
##### Version 1.23.5
> 1.优化tag接口从频道回查列表进入且提交的稿件会重置属性位不管实时查询是否为频道回查
##### Version 1.23.4
> 1.移除第一次过审过渡代码
##### Version 1.23.3
> 1.changelog版本号错误
##### Version 1.23.2
> 1.修复taskweight的redis过期不生效
> 2.同步videoup-job内TimeFormat修改
##### Version 1.23.1
> 1.新增单独保存稿件tag的接口指定条件下触发频道回查
##### Version 1.23.0
> 1.添加策略组相关接口
##### Version 1.22.11
> 1.修复权重变更日志发布时间错误
##### Version 1.22.10
> 1.去掉任务释放时的大事务
> 2.时间类型使用timeformat简化
> 3.从manager-admin读取uid和uname不直接读数据库
##### Version 1.22.9
> 1.稿件私单日志增加私单old和new数据
##### Version 1.22.8
> 1.修复日志缺少用户名,cookie里面不存uid和uname只存sessionid
##### Version 1.22.7
> 1.修复task_dispatch死锁
##### Version 1.22.6
> 1.审核登入日志接搜索平台
##### Version 1.22.5
> 1.稿件信息追踪的用户编辑区只显示变更的标题、封面、简介、分P
##### Version 1.22.4
> 1.登出不释放第一条任务该任务延迟5分钟释放
> 2.去掉for update语句使用
##### Version 1.22.3
> 1.取消私单编辑日志记录 (私单计数方案换成up-service)
##### Version 1.22.2
> 1.权重日志添加定时发布时间
##### Version 1.22.1
> 1.在线用户列表不查询上次登出时间
> 2.权重配置添加生效时间范围
##### Version 1.22.0
> 1.稿件bgm管理
##### Version 1.21.5
> 1.修改稿件私单日志格式
##### Version 1.21.4
> 1.登出时不校验已登陆
##### Version 1.21.3
> 1.忽略多次登入多次登出的错误
##### Version 1.21.2
> 1.使用auth.permit设置uid,不直接读取cookie里面的uid
##### Version 1.21.1
> 1.稿件商单接入日志平台
##### Version 1.21.0
>1.权重配置新增按照分区和投稿来源
>2.移植任务释放,任务延迟,以及用户登入登出接口
>3.新增权重分值查看和配置接口
##### Version 1.20.5
> 1.添加PolicyID逻辑
> 2.添加ApplyID逻辑
##### Version 1.20.4
> 1.日志上报优化
##### Version 1.20.2
> 1.稿件、视频接入日志平台
##### Version 1.20.1
> 1.稿件修改封面逻辑兼容 相对路径截取
##### Version 1.20.0
> 1.添加任务权重
##### Version 1.19.5
> 1.修改path
##### Version 1.19.4
> 1.稿件修改消息助手 modify flag
##### Version 1.19.3
> 1.archive/batch 支持修改copyright 开关 flag_copyright
##### Version 1.19.2
> 1.使用account-service v7
##### Version 1.19.1
> 1.add contributor
##### Version 1.19.0
> 1.私单全量写操作日志
##### Version 1.18.6
> 1.去除statsd
##### Version 1.18.5
> 1.重构第一次过审,查询双写
##### Version 1.18.4
> 1.私单+活动稿件进入私单四审
##### Version 1.18.3
> 1.新版稿件、视频信息追踪
##### Version 1.18.2
> 1.对接B博动态
##### Version 1.18.1
> 1.非特殊分区私单定时发布时,稿件进入四审
##### Version 1.18.0
> 1.二审系统通知写操作记录
> 2.first_round 发送消息带稿件分区ID
> 3.私单流量TAG流量写操作记录
##### Version 1.17.2
> 1.error返回补漏
##### Version 1.17.1
> 1.引入blademaster
##### Version 1.17.0
> 1.pgc接口限流
##### Version 1.16.4
> 1.一审提交 将archive_video_relation state设置为0 允许修复已经被删除的视频 与archive_video行为一致
##### Version 1.16.3
> 1.将attribute第13位改成"是否限制地区"
> 2.添加了goconvey test
##### Version 1.16.2
> 1.视频审核若变更属性,触发邮件报备
##### Version 1.16.1
> 1.fixbug 私单二期 flow_design 私单流量 支持稿件详情页修改
##### Version 1.16.0
> 1.新增私单二期业务
##### Version 1.15.2
> 1.添加了打点表的数据接口
##### Version 1.15.1
> 1.切换到video新表
##### Version 1.15.0
> 1.增加video新表相关操作
##### Version 1.14.10
> 1.合作方嵌套up_from=6稿件流程变更
##### Version 1.14.9
> 1.稿件tag去重防止触发邮件报备。
##### Version 1.14.8
> 1.一审打回/锁定时adminChange=true即可以发送报备邮件。
##### Version 1.14.7
> 1.解决视频自动锁定的一些已知bug。查找399指派任务时去除state=2的任务。
##### Version 1.14.6
> 1.添加视频自动锁定【欧美电影】,【日本电影】,【其他国家】,【港台剧】,【海外剧】
##### Version 1.14.5
> 1.upos实验室上传的稿件发送ugc_first_round消息
##### Version 1.14.4
> 1.审核流程变更
##### Version 1.14.3
> 1.将稿件attribute中的第9位作为is_pgc字段
##### Version 1.14.2
> 1.恢复 http track video 接口 /va/track/video
##### Version 1.14.1
> 1.恢复passed 逻辑查 archive_track 表
##### Version 1.14.0
> 1.archive_track 迁移至 hbase
> 2.调整 passed 逻辑改查 archive_oper 表
##### Version 1.13.3
> 1.稿件支持 dynamic 特性
##### Version 1.13.2
> 1.移除APPkey参数
##### Version 1.13.1
> 1.移除 archive_video_track 相关逻辑
##### Version 1.13.0
> 1.天马流量融合私单
> 2.开放预览和橙色通过才可进私单回查
##### Version 1.12.5
> 1. readme去掉空格
##### Version 1.12.4
> 1. 同步new_video的时候增加ctime字段
##### Version 1.12.3
> 1.修复二审提交批量attr异常改的allowtag属性
> 2.修复redirectURL不能修改的bug
##### Version 1.12.2
> 1.修复二审提交批量attr异常改的allowtag属性
> 2.修复redirectURL不能修改的bug
##### Version 1.12.1
> 1.二审增加默认更新mtime
##### Version 1.12.0
> 1.archive_video拆表双写逻辑
> 2.删除老的批量接口逻辑
##### Version 1.11.0
> 1.批量提交一审二审修改稿件attr移动稿件分区及一、二审的稿件和视频的审核日志
##### Version 1.10.2
> 1.新增商业平台更新定时发布时间
##### Version 1.10.1
> 1.fix 批量审核延迟发布被删除的bug
##### Version 1.10.0
> 1.简介描述相关功能
##### Version 1.9.5
> 1.删除延迟发布的逻辑
> 2.fix私单打回流程bug
> 3.去掉一审的二禁、三禁用户attr修改
##### Version 1.9.4
> 1.活动稿件私单流程问题
##### Version 1.9.3
> 1.兼容活动私单稿件bug
##### Version 1.9.2
> 1.兼容活动databus的bug
##### Version 1.9.1
> 1.私单活动稿件case fix
##### Version 1.9.0
> 1.私单相关功能
> 2.一审二禁用户增加动态/推荐限制
> 3.修复redirectURL取消不了的bug
##### Version 1.8.4
> 1.二审视频操作(增删改)
##### Version 1.8.3
> 1.二审 修改判断job是否发邮件的逻辑
##### Version 1.8.2
> 1.二审 判断是否需要发邮件并databus通知job
##### Version 1.8.1
> 1.增加一审 task 等待耗时打点数据拉取接口
##### Version 1.8.0
> 1.修改attr的跳转链接时日志描述
##### Version 1.7.9
> 1.fix log—agent的bug
##### Version 1.7.8
> 1.水印迁移(新老兼容)
##### Version 1.7.7
> 1.修复发布时间修改
##### Version 1.7.6
> 1.二审遗漏note字段
##### Version 1.7.5
> 1.入参统一使用ap
##### Version 1.7.4
> 1.fix三审三查bug
##### Version 1.7.3
> 1.稿件二审代码优化
##### Version 1.7.2
> 1.视频会员可见驱动稿件会员可见
##### Version 1.7.1
> 1.一审消息添加分区ID
##### Version 1.7.0
> 1.添加二审接口
##### Version 1.6.2
> 1.重发消息队列key修改
##### Version 1.6.1
> 1.new_identify
##### Version 1.6.0
> 1.新增一审提交修改三位用户attr的值
##### Version 1.5.0
> 1.删除dede库的老逻辑
##### Version 1.4.0
> 1.新增PGC属性修改接口
> 2.新增PGC封禁接口
##### Version 1.3.1
> 1.fix bug 增加重试协程
##### Version 1.3.0
> 1.接入新的配置中心
##### Version 1.2.0
> 1.一审视频
> 2.追踪信息
##### Version 1.1.0
> 1.增加修改attr逻辑
> 2.增加商业产品修改attr接口
##### Version 1.0.1
> 1.PGC自动日志表修改
##### Version 1.0.0
> 1.PGC自动过审

View File

@@ -0,0 +1,16 @@
# Owner
shencen
wangzhe01
# Author
hejianbing
gaopeng
hejianrong
dengwei
liusiming
chenxi01
# Reviewer
chenxi01
dengwei
liusiming

View File

@@ -0,0 +1,24 @@
# See the OWNERS docs at https://go.k8s.io/owners
approvers:
- chenxi01
- dengwei
- gaopeng
- hejianbing
- hejianrong
- liusiming
- shencen
- wangzhe01
labels:
- admin
- admin/main/videoup
- main
options:
no_parent_owners: true
reviewers:
- chenxi01
- dengwei
- gaopeng
- hejianbing
- hejianrong
- liusiming

View File

@@ -0,0 +1,10 @@
#### videoup-admin
##### 项目简介
> 1.提供反作弊服务
##### 编译环境
> 请只用golang v1.7.x以上版本编译执行。
##### 依赖包
> 1.公共包go-common

View File

@@ -0,0 +1,45 @@
package(default_visibility = ["//visibility:public"])
load(
"@io_bazel_rules_go//go:def.bzl",
"go_binary",
"go_library",
)
go_binary(
name = "cmd",
embed = [":go_default_library"],
tags = ["automanaged"],
)
go_library(
name = "go_default_library",
srcs = ["main.go"],
data = ["videoup-admin.toml"],
importpath = "go-common/app/admin/main/videoup/cmd",
tags = ["automanaged"],
visibility = ["//visibility:public"],
deps = [
"//app/admin/main/videoup/conf:go_default_library",
"//app/admin/main/videoup/http:go_default_library",
"//app/admin/main/videoup/service:go_default_library",
"//library/ecode/tip:go_default_library",
"//library/log:go_default_library",
"//library/net/trace:go_default_library",
"//library/queue/databus/report:go_default_library",
],
)
filegroup(
name = "package-srcs",
srcs = glob(["**"]),
tags = ["automanaged"],
visibility = ["//visibility:private"],
)
filegroup(
name = "all-srcs",
srcs = [":package-srcs"],
tags = ["automanaged"],
visibility = ["//visibility:public"],
)

View File

@@ -0,0 +1,54 @@
package main
import (
"flag"
"os"
"os/signal"
"syscall"
"time"
"go-common/app/admin/main/videoup/conf"
"go-common/app/admin/main/videoup/http"
"go-common/app/admin/main/videoup/service"
ecode "go-common/library/ecode/tip"
"go-common/library/log"
"go-common/library/net/trace"
"go-common/library/queue/databus/report"
)
func main() {
flag.Parse()
if err := conf.Init(); err != nil {
log.Error("conf.Init() error(%v)", err)
panic(err)
}
// init log
log.Init(conf.Conf.Xlog)
trace.Init(conf.Conf.Tracer)
defer trace.Close()
defer log.Close()
log.Info("videoup-admin start")
report.InitManager(conf.Conf.ManagerReport)
ecode.Init(conf.Conf.Ecode)
// service init
svr := service.New(conf.Conf)
http.Init(conf.Conf, svr)
// init signal
c := make(chan os.Signal, 1)
signal.Notify(c, syscall.SIGHUP, syscall.SIGQUIT, syscall.SIGTERM, syscall.SIGINT)
for {
s := <-c
log.Info("videoup-admin get a signal %s", s.String())
switch s {
case syscall.SIGQUIT, syscall.SIGTERM, syscall.SIGSTOP, syscall.SIGINT:
svr.Close()
log.Info("videoup-admin exit")
time.Sleep(1 * time.Second)
return
case syscall.SIGHUP:
// TODO reload
default:
return
}
}
}

View File

@@ -0,0 +1,342 @@
# This is a TOML document. Boom.
[host]
api = "http://uat-api.bilibili.com"
mission = "http://172.16.33.104"
account = "http://account.bilibili.co"
manager = "http://manager.bilibili.co"
mngsearch = "http://bili-search.bilibili.co"
task = "http://archive.api.bilibili.co"
archive = "http://archive.api.bilibili.co"
data = "http://data.bilibili.co"
[xlog]
dir = "/data/log/videoup-admin/"
[xlog.elk]
project = "videoup-admin"
addr = "172.18.20.17:8520"
chanSize = 10240
[statsd]
project = "videoup-admin"
addr = "172.18.20.15:8200"
chanSize = 10240
[app]
key = "c05dd4e1638a8af0"
secret = "7daa7f8c06cd33c5c3067063c746fdcb"
[httpClient]
[httpClient.read]
key = "f265dcfa28272742"
secret = "437facc22dc8698b5544669bcc12348d"
dial = "1s"
timeout = "1s"
keepAlive = "60s"
timer = 1000
[httpClient.read.breaker]
window = "10s"
sleep = "100ms"
bucket = 10
ratio = 0.5
request = 100
[httpClient.write]
key = "6aa4286456d16b97"
secret = "351cf022e1ae8296109c3c524faafcc8"
dial = "1s"
timeout = "3s"
keepAlive = "60s"
timer = 1000
[httpClient.write.breaker]
window = "10s"
sleep = "100ms"
bucket = 10
ratio = 0.5
request = 100
[httpClient.search]
key = "6aa4286456d16b97"
secret = "351cf022e1ae8296109c3c524faafcc8"
dial = "1s"
timeout = "3s"
keepAlive = "60s"
timer = 1000
[httpClient.search.breaker]
window = "10s"
sleep = "100ms"
bucket = 10
ratio = 0.5
request = 100
[bm]
addr = "0.0.0.0:6324"
maxListen = 1000
timeout = "1s"
#db for fat 172.16.33.205:3308
#db for uat 172.22.34.101:3306
[ormArchive]
dsn = "test:test@tcp(172.16.33.205:3308)/bilibili_archive?timeout=5s&readTimeout=5s&writeTimeout=5s&parseTime=true&loc=Local&charset=utf8,utf8mb4"
active = 5
idle = 2
idleTimeout = "4h"
[db]
[db.archive]
name = "172.16.33.205:3308"
dsn = "test:test@tcp(172.16.33.205:3308)/bilibili_archive?timeout=5s&readTimeout=5s&writeTimeout=5s&parseTime=true&loc=Local&charset=utf8,utf8mb4"
active = 5
idle = 2
idleTimeout ="4h"
queryTimeout = "5s"
execTimeout = "5s"
tranTimeout = "10s"
[db.archive.breaker]
window = "3s"
sleep = "100ms"
bucket = 10
ratio = 0.5
request = 100
[db.archiveRead]
name = "172.16.33.205:3308"
dsn = "test:test@tcp(172.16.33.205:3308)/bilibili_archive?timeout=5s&readTimeout=5s&writeTimeout=5s&parseTime=true&loc=Local&charset=utf8,utf8mb4"
active = 5
idle = 2
idleTimeout ="4h"
queryTimeout = "5s"
execTimeout = "5s"
tranTimeout = "10s"
[db.archiveRead.breaker]
window = "3s"
sleep = "100ms"
bucket = 10
ratio = 0.5
request = 100
[db.dede]
name = "172.16.33.54:3307"
dsn = "dede_test:dede_test@tcp(172.16.33.54:3307)/bilibili?timeout=5s&readTimeout=5s&writeTimeout=5s&parseTime=true&loc=Local&charset=utf8"
active = 5
idle = 2
idleTimeout ="4h"
queryTimeout = "5s"
execTimeout = "5s"
tranTimeout = "10s"
[db.dede.breaker]
window = "3s"
sleep = "100ms"
bucket = 10
ratio = 0.5
request = 100
[db.manager]
name = "172.16.33.205:3308"
dsn = "test:test@tcp(172.16.33.205:3308)/bilibili_manager?timeout=5s&readTimeout=5s&writeTimeout=5s&parseTime=true&loc=Local&charset=utf8"
active = 5
idle = 2
idleTimeout ="4h"
queryTimeout = "5s"
execTimeout = "5s"
tranTimeout = "10s"
[db.manager.breaker]
window = "3s"
sleep = "100ms"
bucket = 10
ratio = 0.5
request = 100
[db.oversea]
name = "172.16.33.205:3308"
dsn = "test:test@tcp(172.16.33.205:3308)/bilibili_oversea?timeout=5s&readTimeout=5s&writeTimeout=5s&parseTime=true&loc=Local&charset=utf8"
active = 5
idle = 2
idleTimeout ="4h"
queryTimeout = "5s"
execTimeout = "5s"
tranTimeout = "10s"
[db.oversea.breaker]
window = "3s"
sleep = "100ms"
bucket = 10
ratio = 0.5
request = 100
[db.creative]
name = "172.16.33.205:3308"
dsn = "test:test@tcp(172.16.33.205:3308)/bilibili_creative?timeout=5s&readTimeout=5s&writeTimeout=5s&parseTime=true&loc=Local&charset=utf8"
active = 5
idle = 2
idleTimeout ="4h"
queryTimeout = "5s"
execTimeout = "5s"
tranTimeout = "10s"
[db.creative.breaker]
window = "3s"
sleep = "100ms"
bucket = 10
ratio = 0.5
request = 100
[identify]
whiteAccessKey = ""
whiteMid = 0
csrf = true
[identify.app]
key = "53e2fa226f5ad348"
secret = "3cf6bd1b0ff671021da5f424fea4b04a"
[identify.memcache]
name = "go-business/identify"
proto = "tcp"
addr = "172.16.33.54:11211"
active = 5
idle = 2
dialTimeout = "1s"
readTimeout = "1s"
writeTimeout = "1s"
idleTimeout = "80s"
[identify.host]
auth = "http://passport.bilibili.com"
secret = "http://open.bilibili.com"
[identify.httpClient]
key = "53e2fa226f5ad348"
secret = "3cf6bd1b0ff671021da5f424fea4b04a"
dial = "1s"
timeout = "1s"
keepAlive = "60s"
[identify.httpClient.breaker]
window = "10s"
sleep = "100ms"
bucket = 10
ratio = 0.5
request = 100
[identify.httpClient.url]
"http://passport.bilibili.co/intranet/auth/tokenInfo" = {timeout = "100ms"}
"http://passport.bilibili.co/intranet/auth/cookieInfo" = {timeout = "100ms"}
"http://open.bilibili.co/api/getsecret" = {timeout = "500ms"}
[auth]
managerHost = "http://uat-manager.bilibili.co"
dashboardHost = "http://dashboard-mng.bilibili.co"
dashboardCaller = "manager-go"
[auth.DsHTTPClient]
key = "manager-go"
secret = "949bbb2dd3178252638c2407578bc7ad"
dial = "1s"
timeout = "1s"
keepAlive = "60s"
[auth.DsHTTPClient.breaker]
window = "3s"
sleep = "100ms"
bucket = 10
ratio = 0.5
request = 100
[auth.MaHTTPClient]
key = "f6433799dbd88751"
secret = "36f8ddb1806207fe07013ab6a77a3935"
dial = "1s"
timeout = "1s"
keepAlive = "60s"
[auth.MaHTTPClient.breaker]
window = "3s"
sleep = "100ms"
bucket = 10
ratio = 0.5
request = 100
[auth.session]
sessionIDLength = 32
cookieLifeTime = 1800
cookieName = "mng-go"
domain = ".bilibili.co"
[auth.session.Memcache]
name = "go-business/auth"
proto = "unix"
addr = "/tmp/uat-manager-auth-mc.sock"
active = 5
idle = 2
dialTimeout = "1s"
readTimeout = "1s"
writeTimeout = "1s"
idleTimeout = "80s"
[accountRPC]
timeout = "10s"
[videoupPub]
key = "0PtMsLLxWyyvoTgAyLCD"
secret = "0PtMsLLxWyyvoTgAyLCE"
group = "Videoup-Videoup2Bvc-P"
topic = "Videoup2Bvc"
action = "pub"
name = "videoup-admin/databus"
proto = "tcp"
addr = "172.16.33.158:6205"
active = 1
idle = 1
dialTimeout = "1s"
readTimeout = "1s"
writeTimeout = "1s"
idleTimeout = "10s"
[upCreditPub]
key = "4c76cbb7a985ac90"
secret = "43bb22ce34a6b13e7814f09cb8116522"
group = "UpCreditLog-MainArchive-P"
topic = "UpCreditLog-T"
action = "pub"
name = "videoup-admin/databus"
proto = "tcp"
addr = "172.16.33.158:6205"
active = 1
idle = 1
dialTimeout = "1s"
readTimeout="1s"
writeTimeout="1s"
idleTimeout="10s"
[redis]
[redis.track]
name = "video-admin/track"
proto = "tcp"
addr = "172.18.33.61:6807"
idle = 10
active = 10
dialTimeout = "1s"
readTimeout = "1s"
writeTimeout = "1s"
idleTimeout = "10s"
expire = "10s"
[redis.secondary]
name = "videoup-admin/secondary"
proto = "tcp"
addr = "172.22.33.137:6835"
active = 10
idle = 2
dialTimeout = "1s"
readTimeout = "1s"
writeTimeout = "1s"
idleTimeout = "80s"
[redis.task]
name = "video-admin/task"
proto = "tcp"
addr = "172.18.33.61:6807"
idle = 10
active = 10
dialTimeout = "1s"
readTimeout = "1s"
writeTimeout = "1s"
idleTimeout = "10s"
[hbase]
master = ""
meta = ""
dialTimeout = "1s"
readTimeout = "10s"
readsTimeout = "10s"
writeTimeout = "10s"
writesTimeout = "10s"
[hbase.zookeeper]
root = ""
addrs = ["172.18.33.131:2181","172.18.33.168:2181","172.18.33.169:2181"]
timeout = "30s"
[tagDisRPC]
timeout="1s"

View File

@@ -0,0 +1,45 @@
package(default_visibility = ["//visibility:public"])
load(
"@io_bazel_rules_go//go:def.bzl",
"go_library",
)
go_library(
name = "go_default_library",
srcs = ["conf.go"],
importpath = "go-common/app/admin/main/videoup/conf",
tags = ["automanaged"],
visibility = ["//visibility:public"],
deps = [
"//library/cache/redis:go_default_library",
"//library/conf:go_default_library",
"//library/database/hbase.v2:go_default_library",
"//library/database/orm:go_default_library",
"//library/database/sql:go_default_library",
"//library/ecode/tip:go_default_library",
"//library/log:go_default_library",
"//library/net/http/blademaster:go_default_library",
"//library/net/http/blademaster/middleware/permit:go_default_library",
"//library/net/rpc:go_default_library",
"//library/net/rpc/warden:go_default_library",
"//library/net/trace:go_default_library",
"//library/queue/databus:go_default_library",
"//library/time:go_default_library",
"//vendor/github.com/BurntSushi/toml:go_default_library",
],
)
filegroup(
name = "package-srcs",
srcs = glob(["**"]),
tags = ["automanaged"],
visibility = ["//visibility:private"],
)
filegroup(
name = "all-srcs",
srcs = [":package-srcs"],
tags = ["automanaged"],
visibility = ["//visibility:public"],
)

View File

@@ -0,0 +1,168 @@
package conf
import (
"errors"
"flag"
"go-common/library/net/rpc/warden"
"go-common/library/cache/redis"
"go-common/library/conf"
"go-common/library/database/orm"
"go-common/library/database/sql"
ecode "go-common/library/ecode/tip"
"go-common/library/log"
bm "go-common/library/net/http/blademaster"
"go-common/library/net/http/blademaster/middleware/permit"
"go-common/library/net/rpc"
"go-common/library/net/trace"
"go-common/library/queue/databus"
"go-common/library/time"
"go-common/library/database/hbase.v2"
"github.com/BurntSushi/toml"
)
var (
confPath string
//Conf .
Conf = &Config{}
client *conf.Client
)
//Config .
type Config struct {
Env string
// base
// host
Host *Host
// channal len
ChanSize int64
// log
Xlog *log.Config
// http
BM *bm.ServerConfig
// Auth
Auth *permit.Config
// tracer
Tracer *trace.Config
// tick load pgc
Tick time.Duration
// db
DB *DB
// db
ORMArchive *orm.Config
// databus
VideoupPub *databus.Config
UpCreditPub *databus.Config
// redis
Redis *Redis
// hbase
HBase *hbaseConf
// http client test
HTTPClient HTTPClient
// rpc
AccountRPC *warden.ClientConfig
UpsRPC *warden.ClientConfig
TagDisRPC *rpc.ClientConfig
Ecode *ecode.Config
ManagerReport *databus.Config
}
type hbaseConf struct {
hbase.Config
ReadTimeout time.Duration
ReadsTimeout time.Duration
WriteTimeout time.Duration
WritesTimeout time.Duration
}
//Host .
type Host struct {
API string
MngSearch string
Manager string
Data string
Account string
Task string
Archive string
}
//DB .
type DB struct {
Archive *sql.Config
ArchiveRead *sql.Config
Manager *sql.Config
Oversea *orm.Config
Creative *sql.Config
}
//Redis .
type Redis struct {
Track *struct {
*redis.Config
Expire time.Duration
}
Secondary *struct {
*redis.Config
Expire time.Duration
}
}
// HTTPClient test
type HTTPClient struct {
Read *bm.ClientConfig
Write *bm.ClientConfig
Search *bm.ClientConfig
}
func init() {
flag.StringVar(&confPath, "conf", "", "default config path")
}
//Init .
func Init() (err error) {
if confPath != "" {
return local()
}
return remote()
}
func local() (err error) {
_, err = toml.DecodeFile(confPath, &Conf)
return
}
func remote() (err error) {
if client, err = conf.New(); err != nil {
return
}
if err = load(); err != nil {
return
}
go func() {
for range client.Event() {
log.Info("config reload")
if load() != nil {
log.Error("config reload error (%v)", err)
}
}
}()
return
}
func load() (err error) {
var (
s string
ok bool
tmpConf *Config
)
if s, ok = client.Toml2(); !ok {
return errors.New("load config center error")
}
if _, err = toml.Decode(s, &tmpConf); err != nil {
return errors.New("could not decode config")
}
*Conf = *tmpConf
return
}

View File

@@ -0,0 +1,115 @@
package(default_visibility = ["//visibility:public"])
load(
"@io_bazel_rules_go//go:def.bzl",
"go_test",
"go_library",
)
go_test(
name = "go_default_test",
srcs = [
"addit_test.go",
"archive_test.go",
"config_test.go",
"dao_test.go",
"delay_test.go",
"first_pass_test.go",
"flow_test.go",
"forbid_test.go",
"hbase_test.go",
"history_test.go",
"mosaic_test.go",
"new_video_test.go",
"oper_test.go",
"porder_test.go",
"recheck_test.go",
"stats_test.go",
"tag_test.go",
"task_consumer_test.go",
"task_dispatch_test.go",
"task_state_test.go",
"task_test.go",
"task_weight_config_test.go",
"type_test.go",
"user_card_test.go",
"video_test.go",
"watermark_test.go",
],
embed = [":go_default_library"],
rundir = ".",
tags = ["automanaged"],
deps = [
"//app/admin/main/videoup/conf:go_default_library",
"//app/admin/main/videoup/model/archive:go_default_library",
"//vendor/github.com/smartystreets/goconvey/convey:go_default_library",
"//vendor/gopkg.in/h2non/gock.v1:go_default_library",
],
)
go_library(
name = "go_default_library",
srcs = [
"addit.go",
"archive.go",
"config.go",
"dao.go",
"delay.go",
"first_pass.go",
"flow.go",
"forbid.go",
"hbase.go",
"history.go",
"mosaic.go",
"new_video.go",
"oper.go",
"porder.go",
"recheck.go",
"redis.go",
"stats.go",
"tag.go",
"task.go",
"task_consumer.go",
"task_dispatch.go",
"task_oper_history.go",
"task_qa_video.go",
"task_state.go",
"task_weight_config.go",
"type.go",
"user_card.go",
"video.go",
"video_audit.go",
"video_history.go",
"watermark.go",
],
importpath = "go-common/app/admin/main/videoup/dao/archive",
tags = ["automanaged"],
visibility = ["//visibility:public"],
deps = [
"//app/admin/main/videoup/conf:go_default_library",
"//app/admin/main/videoup/model/archive:go_default_library",
"//app/admin/main/videoup/model/utils:go_default_library",
"//library/cache/redis:go_default_library",
"//library/database/hbase.v2:go_default_library",
"//library/database/sql:go_default_library",
"//library/log:go_default_library",
"//library/net/http/blademaster:go_default_library",
"//library/time:go_default_library",
"//library/xstr:go_default_library",
"//vendor/github.com/tsuna/gohbase/hrpc:go_default_library",
],
)
filegroup(
name = "package-srcs",
srcs = glob(["**"]),
tags = ["automanaged"],
visibility = ["//visibility:private"],
)
filegroup(
name = "all-srcs",
srcs = [":package-srcs"],
tags = ["automanaged"],
visibility = ["//visibility:public"],
)

View File

@@ -0,0 +1,89 @@
package archive
import (
"context"
"fmt"
"go-common/app/admin/main/videoup/model/archive"
xsql "go-common/library/database/sql"
"go-common/library/log"
"go-common/library/xstr"
)
const (
_inAddRdrSQL = "INSERT INTO archive_addit (aid,redirect_url) VALUES (?,?) ON DUPLICATE KEY UPDATE redirect_url=?"
_upAdditSQL = "UPDATE archive_addit SET mission_id=?,source=?,description=?,dynamic=? WHERE aid=?"
_AdditSQL = "SELECT aid,mission_id,from_ip,up_from,recheck_reason,redirect_url,source,order_id,desc_format_id,dynamic,inner_attr FROM archive_addit WHERE aid=?"
_additBatch = "SELECT aid,mission_id,from_ip,up_from,recheck_reason,redirect_url,source,order_id,desc_format_id,dynamic,inner_attr FROM archive_addit WHERE aid IN (%s)"
_inAdditInnerAttrSQL = "INSERT INTO archive_addit (aid, inner_attr) VALUES (?,?) ON DUPLICATE KEY UPDATE inner_attr=?"
)
// TxUpAdditRedirect update archive redirect url.
func (d *Dao) TxUpAdditRedirect(tx *xsql.Tx, aid int64, redirectURL string) (rows int64, err error) {
res, err := tx.Exec(_inAddRdrSQL, aid, redirectURL, redirectURL)
if err != nil {
log.Error("d._inAdditRedirect.Exec() error(%v)", err)
return
}
rows, err = res.RowsAffected()
return
}
// TxUpAddit update archive_addit mission_id && source by aid.
func (d *Dao) TxUpAddit(tx *xsql.Tx, aid, missionID int64, source, description, dynamic string) (rows int64, err error) {
res, err := tx.Exec(_upAdditSQL, missionID, source, description, dynamic, aid)
if err != nil {
log.Error("d.TxUpAddit.tx.Exec error(%v)", err)
return
}
rows, err = res.RowsAffected()
return
}
// Addit get a archive addit by avid.
func (d *Dao) Addit(c context.Context, aid int64) (ad *archive.Addit, err error) {
row := d.rddb.QueryRow(c, _AdditSQL, aid)
ad = &archive.Addit{}
if err = row.Scan(&ad.Aid, &ad.MissionID, &ad.FromIP, &ad.UpFrom, &ad.RecheckReason, &ad.RedirectURL, &ad.Source, &ad.OrderID, &ad.DescFormatID, &ad.Dynamic, &ad.InnerAttr); err != nil {
if err == xsql.ErrNoRows {
err = nil
} else {
log.Error("row.Scan error(%v)", err)
}
}
return
}
//TxUpInnerAttr update archive inner_attr
func (d *Dao) TxUpInnerAttr(tx *xsql.Tx, aid int64, attr int64) (id int64, err error) {
res, err := tx.Exec(_inAdditInnerAttrSQL, aid, attr, attr)
if err != nil {
log.Error("TxUpInnerAttr tx.Exec error(%v) aid(%d) attr(%d)", err, aid, attr)
return 0, err
}
return res.LastInsertId()
}
// ArcStateMap get archive id and state map
func (d *Dao) AdditBatch(c context.Context, aids []int64) (sMap map[int64]*archive.Addit, err error) {
sMap = make(map[int64]*archive.Addit)
if len(aids) == 0 {
return
}
rows, err := d.rddb.Query(c, fmt.Sprintf(_additBatch, xstr.JoinInts(aids)))
if err != nil {
log.Error("db.Query() error(%v)", err)
return
}
defer rows.Close()
for rows.Next() {
ad := &archive.Addit{}
if err = rows.Scan(&ad.Aid, &ad.MissionID, &ad.FromIP, &ad.UpFrom, &ad.RecheckReason, &ad.RedirectURL, &ad.Source, &ad.OrderID, &ad.DescFormatID, &ad.Dynamic, &ad.InnerAttr); err != nil {
log.Error("rows.Scan error(%v)", err)
return
}
sMap[ad.Aid] = ad
}
return
}

View File

@@ -0,0 +1,44 @@
package archive
import (
"context"
. "github.com/smartystreets/goconvey/convey"
"testing"
)
func TestAddit(t *testing.T) {
Convey("test archive", t, WithDao(func(d *Dao) {
aid := int64(10098814)
a, err := d.Addit(context.Background(), aid)
So(err, ShouldBeNil)
So(a, ShouldNotBeNil)
}))
}
func TestUpAdditRedirect(t *testing.T) {
Convey("UpAdditRedirect", t, WithDao(func(d *Dao) {
var c = context.TODO()
tx, _ := d.BeginTran(c)
_, err := d.TxUpAdditRedirect(tx, 0, "")
tx.Commit()
So(err, ShouldBeNil)
}))
}
func TestTxUpAddit(t *testing.T) {
Convey("TxUpAddit", t, WithDao(func(d *Dao) {
var c = context.TODO()
tx, _ := d.BeginTran(c)
_, err := d.TxUpAddit(tx, 0, 0, "", "", "")
tx.Commit()
So(err, ShouldBeNil)
}))
}
func TestAdditBatch(t *testing.T) {
Convey("AdditBatch", t, WithDao(func(d *Dao) {
var c = context.TODO()
_, err := d.AdditBatch(c, []int64{1, 2, 3})
So(err, ShouldBeNil)
}))
}

View File

@@ -0,0 +1,250 @@
package archive
import (
"context"
"database/sql"
"fmt"
"time"
"go-common/app/admin/main/videoup/model/archive"
xsql "go-common/library/database/sql"
"go-common/library/log"
xtime "go-common/library/time"
"go-common/library/xstr"
)
const (
_upArcSQL = "UPDATE archive SET title=?,content=?,copyright=?,cover=?,note=?,pubtime=?,mtime=? WHERE id=?"
_upArcTpSQL = "UPDATE archive SET typeid=? WHERE id=?"
_upArcRound = "UPDATE archive SET round=? WHERE id=?"
_upArcState = "UPDATE archive SET state=? WHERE id=?"
_upAccessSQL = "UPDATE archive SET access=? WHERE id=?"
_upAuthorSQL = "UPDATE archive SET mid=?,author=? WHERE id=?"
_upPTimeSQL = "UPDATE archive SET pubtime=? WHERE id=?"
_upArcReasonSQL = "UPDATE archive SET reject_reason=?,forward=? WHERE id=?"
_upArcAttrSQL = "UPDATE archive SET attribute=attribute&(~(1<<?))|(?<<?) WHERE id=?"
_upArcNote = "UPDATE archive SET note=? WHERE id=?"
_upArcCopyright = "UPDATE archive SET copyright=? WHERE id=?"
_upArcMtime = "UPDATE archive SET mtime=? WHERE id=?"
_arcSQL = "SELECT id,mid,title,access,attribute,reject_reason,tag,forward,round,state,copyright,cover,content,typeid,pubtime,ctime,mtime FROM archive WHERE id=?"
_arcsSQL = "SELECT id,mid,title,access,attribute,reject_reason,tag,forward,round,state,copyright,cover,content,typeid,pubtime,ctime,mtime FROM archive WHERE id in (%s)"
_arcStatesSQL = "SELECT id,state FROM archive WHERE id IN (%s)"
_upArcTagSQL = "UPDATE archive SET tag=? WHERE id=?"
)
// TxUpArchive update archive by aid.
func (d *Dao) TxUpArchive(tx *xsql.Tx, aid int64, title, content, cover, note string, copyright int8, pTime xtime.Time) (rows int64, err error) {
res, err := tx.Exec(_upArcSQL, title, content, copyright, cover, note, pTime, time.Now(), aid)
if err != nil {
log.Error("d.TxUpArchive.tx.Exec error(%v)", err)
return
}
rows, err = res.RowsAffected()
return
}
// TxUpArcTypeID update archive type_id by aid
func (d *Dao) TxUpArcTypeID(tx *xsql.Tx, aid int64, typeID int16) (rows int64, err error) {
res, err := tx.Exec(_upArcTpSQL, typeID, aid)
if err != nil {
log.Error("d.TxUpArcTypeID.tx.Exec error(%v) ", err)
return
}
rows, err = res.RowsAffected()
return
}
// TxUpArcRound update archive round by aid
func (d *Dao) TxUpArcRound(tx *xsql.Tx, aid int64, round int8) (rows int64, err error) {
res, err := tx.Exec(_upArcRound, round, aid)
if err != nil {
log.Error("d.TxUpArcRound.tx.Exec error(%v) ", err)
return
}
rows, err = res.RowsAffected()
return
}
// TxUpArcState update archive state by aid
func (d *Dao) TxUpArcState(tx *xsql.Tx, aid int64, state int8) (rows int64, err error) {
res, err := tx.Exec(_upArcState, state, aid)
if err != nil {
log.Error("d.TxUpArcState.tx.Exec error(%v)", err)
}
rows, err = res.RowsAffected()
return
}
// TxUpArcAccess update archive by aid.
func (d *Dao) TxUpArcAccess(tx *xsql.Tx, aid int64, access int16) (rows int64, err error) {
res, err := tx.Exec(_upAccessSQL, access, aid)
if err != nil {
log.Error("d.TxUpArcAccess.tx.Exec error(%v)", err)
return
}
rows, err = res.RowsAffected()
return
}
// TxUpArcAuthor update archive mid && author by aid.
func (d *Dao) TxUpArcAuthor(tx *xsql.Tx, aid, mid int64, author string) (rows int64, err error) {
res, err := tx.Exec(_upAuthorSQL, mid, author, aid)
if err != nil {
log.Error("d.TxUpArcAuthor.tx.Exec error(%v)", err)
return
}
rows, err = res.RowsAffected()
return
}
// TxUpArcPTime update ptime by aid
func (d *Dao) TxUpArcPTime(tx *xsql.Tx, aid int64, pTime xtime.Time) (rows int64, err error) {
res, err := tx.Exec(_upPTimeSQL, pTime, aid)
if err != nil {
log.Error("tx.Exec(%s, %d, %v) error(%v)", _upPTimeSQL, pTime, aid, err)
return
}
rows, err = res.RowsAffected()
return
}
// TxUpArcReason update archive reject_reason && forward_id by aid
func (d *Dao) TxUpArcReason(tx *xsql.Tx, aid, forward int64, reason string) (rows int64, err error) {
res, err := tx.Exec(_upArcReasonSQL, reason, forward, aid)
if err != nil {
log.Error("d.TxUpArcReason.tx.Exec error(%v)", err)
return
}
rows, err = res.RowsAffected()
return
}
// TxUpArcAttr update attribute by aid.
func (d *Dao) TxUpArcAttr(tx *xsql.Tx, aid int64, bit uint, val int32) (rows int64, err error) {
res, err := tx.Exec(_upArcAttrSQL, bit, val, bit, aid)
if err != nil {
log.Error("d.upArcAttr.Exec() error(%v)", err)
return
}
rows, err = res.RowsAffected()
return
}
// TxUpArcNote update note by aid.
func (d *Dao) TxUpArcNote(tx *xsql.Tx, aid int64, note string) (rows int64, err error) {
res, err := tx.Exec(_upArcNote, note, aid)
if err != nil {
log.Error("d.upArcNote.Exec() error(%v)", err)
return
}
rows, err = res.RowsAffected()
return
}
// TxUpArcCopyRight update copyright by aid.
func (d *Dao) TxUpArcCopyRight(tx *xsql.Tx, aid int64, copyright int8) (rows int64, err error) {
res, err := tx.Exec(_upArcCopyright, copyright, aid)
if err != nil {
log.Error("d.TxUpArcCopyRight.Exec() error(%v)", err)
return
}
rows, err = res.RowsAffected()
return
}
// TxUpArcMtime update mtime by aid.
func (d *Dao) TxUpArcMtime(tx *xsql.Tx, aid int64) (rows int64, err error) {
res, err := tx.Exec(_upArcMtime, time.Now(), aid)
if err != nil {
log.Error("d.upArcNote.Exec() error(%v)", err)
return
}
rows, err = res.RowsAffected()
return
}
// Archive get archive by aid
func (d *Dao) Archive(c context.Context, aid int64) (a *archive.Archive, err error) {
var (
row = d.rddb.QueryRow(c, _arcSQL, aid)
reason, tag sql.NullString
)
a = &archive.Archive{}
if err = row.Scan(&a.Aid, &a.Mid, &a.Title, &a.Access, &a.Attribute, &reason, &tag, &a.Forward, &a.Round, &a.State,
&a.Copyright, &a.Cover, &a.Desc, &a.TypeID, &a.PTime, &a.CTime, &a.MTime); err != nil {
if err == xsql.ErrNoRows {
a = nil
err = nil
} else {
log.Error("row.Scan error(%v)", err)
}
return
}
a.RejectReason = reason.String
a.Tag = tag.String
return
}
// Archives get archives by aids
func (d *Dao) Archives(c context.Context, aids []int64) (am map[int64]*archive.Archive, err error) {
am = make(map[int64]*archive.Archive)
if len(aids) == 0 {
return
}
var reason, tag sql.NullString
rows, err := d.rddb.Query(c, fmt.Sprintf(_arcsSQL, xstr.JoinInts(aids)))
if err != nil {
log.Error("db.Query() error(%v)", err)
return
}
defer rows.Close()
for rows.Next() {
a := &archive.Archive{}
if err = rows.Scan(&a.Aid, &a.Mid, &a.Title, &a.Access, &a.Attribute, &reason, &tag, &a.Forward, &a.Round, &a.State,
&a.Copyright, &a.Cover, &a.Desc, &a.TypeID, &a.PTime, &a.CTime, &a.MTime); err != nil {
log.Error("rows.Scan error(%v)", err)
return
}
a.RejectReason = reason.String
a.Tag = tag.String
am[a.Aid] = a
}
return
}
// ArcStateMap get archive id and state map
func (d *Dao) ArcStateMap(c context.Context, aids []int64) (sMap map[int64]int, err error) {
sMap = make(map[int64]int)
if len(aids) == 0 {
return
}
rows, err := d.rddb.Query(c, fmt.Sprintf(_arcStatesSQL, xstr.JoinInts(aids)))
if err != nil {
log.Error("db.Query() error(%v)", err)
return
}
defer rows.Close()
for rows.Next() {
a := struct {
ID int64
State int
}{}
if err = rows.Scan(&a.ID, &a.State); err != nil {
log.Error("rows.Scan error(%v)", err)
return
}
sMap[a.ID] = a.State
}
return
}
//TxUpTag update archive tag
func (d *Dao) TxUpTag(tx *xsql.Tx, aid int64, tags string) (id int64, err error) {
res, err := tx.Exec(_upArcTagSQL, tags, aid)
if err != nil {
log.Error("TxUpTag tx.Exec error(%v) aid(%d) tags(%s)", err, aid, tags)
return
}
return res.LastInsertId()
}

View File

@@ -0,0 +1,183 @@
package archive
import (
"context"
. "github.com/smartystreets/goconvey/convey"
"go-common/app/admin/main/videoup/model/archive"
"testing"
)
func TestArchive(t *testing.T) {
Convey("test archive", t, WithDao(func(d *Dao) {
aid := int64(10098814)
a, err := d.Archive(context.Background(), aid)
So(err, ShouldBeNil)
So(a, ShouldNotBeNil)
t.Logf("resp: %v", a)
}))
}
func TestArchives(t *testing.T) {
Convey("Archives", t, WithDao(func(d *Dao) {
_, err := d.Archives(context.Background(), []int64{1, 2})
So(err, ShouldBeNil)
}))
}
func TestDao_TxUpArcNote(t *testing.T) {
Convey("TxUpArcNote", t, WithDao(func(d *Dao) {
var c = context.TODO()
tx, _ := d.BeginTran(c)
_, err := d.TxUpArcNote(tx, 111, "2")
tx.Commit()
So(err, ShouldBeNil)
}))
}
func TestDao_TxUpArcMtime(t *testing.T) {
Convey("TxUpArcMtime", t, WithDao(func(d *Dao) {
var c = context.TODO()
tx, _ := d.BeginTran(c)
_, err := d.TxUpArcMtime(tx, 111)
tx.Commit()
So(err, ShouldBeNil)
}))
}
func TestDao_TxUpArcAuthor(t *testing.T) {
Convey("TxUpArcAuthor", t, WithDao(func(d *Dao) {
var c = context.TODO()
tx, _ := d.BeginTran(c)
_, err := d.TxUpArcAuthor(tx, 111, 222, "222")
tx.Commit()
So(err, ShouldBeNil)
}))
}
func TestDao_TxUpArcState(t *testing.T) {
Convey("TxUpArcState", t, WithDao(func(d *Dao) {
var c = context.TODO()
tx, _ := d.BeginTran(c)
_, err := d.TxUpArcState(tx, 111, 0)
tx.Commit()
So(err, ShouldBeNil)
}))
}
func TestDao_TxUpArcAccess(t *testing.T) {
Convey("TxUpArcAccess", t, WithDao(func(d *Dao) {
var c = context.TODO()
tx, _ := d.BeginTran(c)
_, err := d.TxUpArcAccess(tx, 111, 0)
tx.Commit()
So(err, ShouldBeNil)
}))
}
func TestDao_TxUpArcReason(t *testing.T) {
Convey("TxUpArcReason", t, WithDao(func(d *Dao) {
var c = context.TODO()
tx, _ := d.BeginTran(c)
_, err := d.TxUpArcReason(tx, 111, 0, "")
tx.Commit()
So(err, ShouldBeNil)
}))
}
func TestDao_TxUpArcAttr(t *testing.T) {
Convey("TxUpArcAttr", t, WithDao(func(d *Dao) {
var c = context.TODO()
tx, _ := d.BeginTran(c)
_, err := d.TxUpArcAttr(tx, 111, 0, 1)
tx.Commit()
So(err, ShouldBeNil)
}))
}
func TestDao_TxUpTag(t *testing.T) {
Convey("TxUpTag", t, WithDao(func(d *Dao) {
c := context.TODO()
aid := int64(2880441)
tx, _ := d.BeginTran(c)
_, err := d.TxUpTag(tx, aid, "haha1,haha2,haha3")
tx.Commit()
So(err, ShouldBeNil)
}))
}
func TestDao_TxUpInnerAttr(t *testing.T) {
Convey("TxUpInnerAttr", t, WithDao(func(d *Dao) {
c := context.TODO()
addit := &archive.Addit{
Aid: 3,
}
addit.InnerAttrSet(1, archive.InnerAttrChannelReview)
tx, _ := d.BeginTran(c)
_, err := d.TxUpInnerAttr(tx, addit.Aid, addit.InnerAttr)
tx.Commit()
So(err, ShouldBeNil)
}))
}
func TestDao_TxUpArchive(t *testing.T) {
Convey("TxUpArchive", t, WithDao(func(d *Dao) {
c := context.TODO()
tx, _ := d.BeginTran(c)
a, err := d.Archive(c, 10098217)
t.Logf("archive(%+v)", a)
So(err, ShouldBeNil)
if err == nil {
_, err = d.TxUpArchive(tx, a.Aid, a.Title, "随便一个内容啦", a.Cover, "随意一个note", a.Copyright, a.PTime)
So(err, ShouldBeNil)
}
tx.Commit()
}))
}
func TestDao_TxUpArcTypeID(t *testing.T) {
Convey("TxUpArcTypeID", t, WithDao(func(d *Dao) {
c := context.TODO()
tx, _ := d.BeginTran(c)
_, err := d.TxUpArcTypeID(tx, 0, 0)
So(err, ShouldBeNil)
tx.Commit()
}))
}
func TestDao_TxUpArcRound(t *testing.T) {
Convey("TxUpArcRound", t, WithDao(func(d *Dao) {
c := context.TODO()
tx, _ := d.BeginTran(c)
_, err := d.TxUpArcRound(tx, 0, 0)
So(err, ShouldBeNil)
tx.Commit()
}))
}
func TestDao_TxUpArcPTime(t *testing.T) {
Convey("TxUpArcPTime", t, WithDao(func(d *Dao) {
c := context.TODO()
tx, _ := d.BeginTran(c)
_, err := d.TxUpArcPTime(tx, 0, 0)
So(err, ShouldBeNil)
tx.Commit()
}))
}
func TestDao_TxUpArcCopyRight(t *testing.T) {
Convey("TxUpArcCopyRight", t, WithDao(func(d *Dao) {
c := context.TODO()
tx, _ := d.BeginTran(c)
_, err := d.TxUpArcCopyRight(tx, 0, 0)
So(err, ShouldBeNil)
tx.Commit()
}))
}
func TestDao_ArcStateMap(t *testing.T) {
Convey("ArcStateMap", t, WithDao(func(d *Dao) {
c := context.TODO()
_, err := d.ArcStateMap(c, []int64{1, 2, 3})
So(err, ShouldBeNil)
}))
}

View File

@@ -0,0 +1,167 @@
package archive
import (
"context"
"database/sql"
"encoding/json"
"strconv"
"strings"
"go-common/app/admin/main/videoup/model/archive"
"go-common/library/log"
"go-common/library/xstr"
)
const (
_confSQL = "SELECT value FROM archive_config WHERE state=0 AND name=?"
_upconfSQL = "UPDATE archive_config SET value=?,remark=? WHERE name=?"
_inconfSQL = "INSERT archive_config(value,remark,name,state) VALUE (?,?,?,0)"
)
// FansConf is fan round check types config.
func (d *Dao) FansConf(c context.Context) (fans int64, err error) {
row := d.rddb.QueryRow(c, _confSQL, archive.ConfForClick)
var val string
if err = row.Scan(&val); err != nil {
if err == sql.ErrNoRows {
err = nil
} else {
log.Error("row.Scan error(%v)", err)
}
return
}
if fans, err = strconv.ParseInt(val, 10, 64); err != nil {
log.Error("strconv.ParseInt(%s) error(%v)", val, err)
}
return
}
// RoundTypeConf is typeid round check types config.
func (d *Dao) RoundTypeConf(c context.Context) (roundTypes map[int16]struct{}, err error) {
roundTypes = map[int16]struct{}{}
row := d.rddb.QueryRow(c, _confSQL, archive.ConfForRoundType)
var (
val string
tids []string
tid int64
)
if err = row.Scan(&val); err != nil {
if err == sql.ErrNoRows {
err = nil
} else {
log.Error("row.Scan error(%v)", err)
}
return
}
tids = strings.Split(val, ",")
for _, tidStr := range tids {
if tid, err = strconv.ParseInt(tidStr, 10, 64); err != nil {
log.Error("strconv.ParseInt(%s) error(%v)", tid, err)
return
}
roundTypes[int16(tid)] = struct{}{}
}
return
}
// ThresholdConf is second types opposite first types.
func (d *Dao) ThresholdConf(c context.Context) (tpThr map[int16]int, err error) {
row := d.rddb.QueryRow(c, _confSQL, archive.ConfForThreshold)
var value string
if err = row.Scan(&value); err != nil {
if err == sql.ErrNoRows {
err = nil
} else {
log.Error("row.Scan() error(%v)", err)
}
return
}
if err = json.Unmarshal([]byte(value), &tpThr); err != nil {
log.Error("json.Unmarshal(%s) error(%v)", value, err)
return
}
return
}
// AuditTypesConf is audit types.
func (d *Dao) AuditTypesConf(c context.Context) (atps map[int16]struct{}, err error) {
row := d.rddb.QueryRow(c, _confSQL, archive.ConfForWaitAudit)
var (
value string
typeIDs []int64
)
if err = row.Scan(&value); err != nil {
if err == sql.ErrNoRows {
err = nil
} else {
log.Error("row.Scan error(%v)", err)
}
return
}
typeIDs, err = xstr.SplitInts(value)
if err != nil {
log.Error("archive_config value(%s) xstr.SplitInts error(%v)", value, err)
return
}
atps = map[int16]struct{}{}
for _, typeid := range typeIDs {
atps[int16(typeid)] = struct{}{}
}
return
}
// WeightVC 获取权重分值
func (d *Dao) WeightVC(c context.Context) (wvc *archive.WeightVC, err error) {
var value []byte
row := d.rddb.QueryRow(c, _confSQL, archive.ConfForWeightVC)
if err = row.Scan(&value); err != nil {
if err == sql.ErrNoRows {
err = nil
} else {
log.Error("row.Scan error(%v)", err)
}
return
}
wvc = new(archive.WeightVC)
if err = json.Unmarshal(value, wvc); err != nil {
log.Error("json.Unmarshal error(%v)", err)
wvc = nil
}
return
}
// SetWeightVC 设置权重分值
func (d *Dao) SetWeightVC(c context.Context, wvc *archive.WeightVC, desc string) (rows int64, err error) {
var (
valueb []byte
res sql.Result
)
if valueb, err = json.Marshal(wvc); err != nil {
log.Error("json.Marshal(%+v) error(%v)", wvc, err)
return
}
if res, err = d.db.Exec(c, _upconfSQL, string(valueb), desc, archive.ConfForWeightVC); err != nil {
log.Error("d.db.Exec(%s, %s, %s, %s) error(%v)", _upconfSQL, string(valueb), desc, archive.ConfForWeightVC, err)
return
}
return res.RowsAffected()
}
// InWeightVC 插入
func (d *Dao) InWeightVC(c context.Context, wvc *archive.WeightVC, desc string) (rows int64, err error) {
var (
valueb []byte
res sql.Result
)
if valueb, err = json.Marshal(wvc); err != nil {
log.Error("json.Marshal(%+v) error(%v)", wvc, err)
return
}
if res, err = d.db.Exec(c, _inconfSQL, string(valueb), desc, archive.ConfForWeightVC); err != nil {
log.Error("d.db.Exec(%s, %s, %s, %s) error(%v)", _inconfSQL, string(valueb), desc, archive.ConfForWeightVC, err)
return
}
return res.LastInsertId()
}

View File

@@ -0,0 +1,64 @@
package archive
import (
"context"
"testing"
"go-common/app/admin/main/videoup/model/archive"
. "github.com/smartystreets/goconvey/convey"
)
func TestFansConf(t *testing.T) {
Convey("test archive", t, WithDao(func(d *Dao) {
fans, err := d.FansConf(context.Background())
So(err, ShouldBeNil)
So(fans, ShouldNotBeNil)
}))
}
func TestDao_RoundTypeConf(t *testing.T) {
Convey("test archive", t, WithDao(func(d *Dao) {
r, err := d.RoundTypeConf(context.Background())
So(err, ShouldBeNil)
So(r, ShouldNotBeNil)
}))
}
func TestDao_ThresholdConf(t *testing.T) {
Convey("test archive", t, WithDao(func(d *Dao) {
r, err := d.ThresholdConf(context.Background())
So(err, ShouldBeNil)
So(r, ShouldNotBeNil)
}))
}
func TestDao_AuditTypesConf(t *testing.T) {
Convey("test archive", t, WithDao(func(d *Dao) {
r, err := d.AuditTypesConf(context.Background())
So(err, ShouldBeNil)
So(r, ShouldNotBeNil)
}))
}
func TestDao_WeightVC(t *testing.T) {
Convey("test archive", t, WithDao(func(d *Dao) {
r, err := d.WeightVC(context.Background())
So(err, ShouldBeNil)
So(r, ShouldNotBeNil)
}))
}
func TestDao_SetWeightVC(t *testing.T) {
Convey("SetWeightVC", t, WithDao(func(d *Dao) {
_, err := d.SetWeightVC(context.TODO(), &archive.WeightVC{}, "desc")
So(err, ShouldBeNil)
}))
}
func TestDao_InWeightVC(t *testing.T) {
Convey("InWeightVC", t, WithDao(func(d *Dao) {
_, err := d.InWeightVC(context.TODO(), &archive.WeightVC{}, "desc")
So(err, ShouldBeNil)
}))
}

View File

@@ -0,0 +1,65 @@
package archive
import (
"context"
"go-common/app/admin/main/videoup/conf"
"go-common/library/cache/redis"
"go-common/library/database/hbase.v2"
"go-common/library/database/sql"
bm "go-common/library/net/http/blademaster"
)
// Dao is redis dao.
type Dao struct {
c *conf.Config
// db
db *sql.DB
rddb *sql.DB
// redis
redis *redis.Pool
// hbase
hbase *hbase.Client
userCardURL string
addQAVideoURL string
clientW, clientR *bm.Client
creativeDB *sql.DB
}
// New new a dao.
func New(c *conf.Config) (d *Dao) {
d = &Dao{
c: c,
db: sql.NewMySQL(c.DB.Archive),
rddb: sql.NewMySQL(c.DB.ArchiveRead),
redis: redis.NewPool(c.Redis.Track.Config),
hbase: hbase.NewClient(&c.HBase.Config),
userCardURL: c.Host.Account + "/api/member/getCardByMid",
addQAVideoURL: c.Host.Task + "/vt/video/add",
clientW: bm.NewClient(c.HTTPClient.Write),
clientR: bm.NewClient(c.HTTPClient.Read),
creativeDB: sql.NewMySQL(c.DB.Creative),
}
return d
}
// BeginTran begin transcation.
func (d *Dao) BeginTran(c context.Context) (tx *sql.Tx, err error) {
return d.db.Begin(c)
}
// Close close dao.
func (d *Dao) Close() {
if d.db != nil {
d.db.Close()
}
if d.creativeDB != nil {
d.creativeDB.Close()
}
d.redis.Close()
}
// Ping ping cpdb
func (d *Dao) Ping(c context.Context) (err error) {
return d.db.Ping(c)
}

View File

@@ -0,0 +1,66 @@
package archive
import (
"context"
"flag"
"go-common/app/admin/main/videoup/conf"
"testing"
. "github.com/smartystreets/goconvey/convey"
"gopkg.in/h2non/gock.v1"
"os"
"strings"
)
var (
d *Dao
)
func WithDao(f func(d *Dao)) func() {
return func() {
Reset(func() {})
f(d)
}
}
func TestWeightLog(t *testing.T) {
Convey("WeightLog", t, WithDao(func(d *Dao) {
d.WeightLog(context.TODO(), 2604)
}))
}
func TestGetNextTask(t *testing.T) {
Convey("GetNextTask", t, WithDao(func(d *Dao) {
_, err := d.GetNextTask(context.TODO(), 102)
So(err, ShouldBeNil)
}))
}
func httpMock(method, url string) *gock.Request {
r := gock.New(url)
r.Method = strings.ToUpper(method)
d.clientR.SetTransport(gock.DefaultTransport)
return r
}
func TestMain(m *testing.M) {
if os.Getenv("DEPLOY_ENV") != "" {
flag.Set("app_id", "main.archive.videoup-admin")
flag.Set("conf_token", "gRSfeavV7kJdY9875Gf29pbd2wrdKZ1a")
flag.Set("tree_id", "2307")
flag.Set("conf_version", "docker-1")
flag.Set("deploy_env", "uat")
flag.Set("conf_host", "config.bilibili.co")
flag.Set("conf_path", "/tmp")
flag.Set("region", "sh")
flag.Set("zone", "sh001")
} else {
flag.Set("conf", "../../cmd/videoup-admin.toml")
}
flag.Parse()
if err := conf.Init(); err != nil {
panic(err)
}
d = New(conf.Conf)
os.Exit(m.Run())
}

View File

@@ -0,0 +1,80 @@
package archive
import (
"context"
"database/sql"
"go-common/app/admin/main/videoup/model/archive"
xsql "go-common/library/database/sql"
"go-common/library/log"
xtime "go-common/library/time"
"time"
)
const (
_upDelaySQL = "INSERT INTO archive_delay (aid,type,mid,state,dtime) VALUES(?,?,?,?,?) ON DUPLICATE KEY UPDATE mid=?,state=?,dtime=?,deleted_at='0000-00-00 00:00:00'"
_upDelStateSQL = "UPDATE archive_delay SET state=? WHERE aid=? AND type=?"
_upDelayDtimeSQL = "UPDATE archive_delay SET dtime=? WHERE aid=? AND type=?"
_delDelaySQL = "UPDATE archive_delay SET deleted_at = ? WHERE aid=? AND type=?"
_DelaySQL = "SELECT aid,dtime,state,mid FROM archive_delay WHERE aid=? AND type=? AND deleted_at = 0"
)
// TxUpDelay update delay
func (d *Dao) TxUpDelay(tx *xsql.Tx, mid, aid int64, state, tp int8, dTime xtime.Time) (rows int64, err error) {
res, err := tx.Exec(_upDelaySQL, aid, tp, mid, state, dTime, mid, state, dTime)
if err != nil {
log.Error("d.TxUpDelay.Exec() error(%v)", err)
return
}
rows, err = res.RowsAffected()
return
}
// TxUpDelState update delay state
func (d *Dao) TxUpDelState(tx *xsql.Tx, aid int64, state, tp int8) (rows int64, err error) {
res, err := tx.Exec(_upDelStateSQL, state, aid, tp)
if err != nil {
log.Error("d.TxUpDelState.Exec() error(%v)", err)
return
}
rows, err = res.RowsAffected()
return
}
// TxUpDelayDtime update archive delaytime by aid.
func (d *Dao) TxUpDelayDtime(tx *xsql.Tx, aid int64, tp int8, dtime xtime.Time) (rows int64, err error) {
res, err := tx.Exec(_upDelayDtimeSQL, dtime, aid, tp)
if err != nil {
log.Error("d.TxUpDelayDtime.Exec() error(%v)", err)
return
}
rows, err = res.RowsAffected()
return
}
// TxDelDelay delete delay
func (d *Dao) TxDelDelay(tx *xsql.Tx, aid int64, tp int8) (rows int64, err error) {
res, err := tx.Exec(_delDelaySQL, time.Now(), aid, tp)
if err != nil {
log.Error("d.TxDelDelay.Exec() error(%v)", err)
return
}
rows, err = res.RowsAffected()
return
}
// Delay get a delay time by avid.
func (d *Dao) Delay(c context.Context, aid int64, tp int8) (dl *archive.Delay, err error) {
row := d.rddb.QueryRow(c, _DelaySQL, aid, tp)
dl = &archive.Delay{}
if err = row.Scan(&dl.Aid, &dl.DTime, &dl.State, &dl.Mid); err != nil {
if err == sql.ErrNoRows {
dl = nil
err = nil
} else {
log.Error("row.Scan error(%v)", err)
}
return
}
return
}

View File

@@ -0,0 +1,70 @@
package archive
import (
"context"
"testing"
. "github.com/smartystreets/goconvey/convey"
)
func Test_Delay(t *testing.T) {
var (
err error
)
Convey("PopMsgCache", t, WithDao(func(d *Dao) {
_, err = d.Delay(context.Background(), 10098814, 1)
So(err, ShouldBeNil)
}))
}
func Test_TxUpDelay(t *testing.T) {
var (
err error
c = context.Background()
)
Convey("TxUpDelay", t, WithDao(func(d *Dao) {
tx, _ := d.BeginTran(c)
_, err = d.TxUpDelay(tx, 0, 0, 0, 0, 0)
So(err, ShouldBeNil)
tx.Commit()
}))
}
func Test_TxUpDelState(t *testing.T) {
var (
err error
c = context.Background()
)
Convey("TxUpDelState", t, WithDao(func(d *Dao) {
tx, _ := d.BeginTran(c)
_, err = d.TxUpDelState(tx, 0, 0, 0)
So(err, ShouldBeNil)
tx.Commit()
}))
}
func Test_TxUpDelayDtime(t *testing.T) {
var (
err error
c = context.Background()
)
Convey("TxUpDelayDtime", t, WithDao(func(d *Dao) {
tx, _ := d.BeginTran(c)
_, err = d.TxUpDelayDtime(tx, 0, 0, 0)
So(err, ShouldBeNil)
tx.Commit()
}))
}
func Test_TxDelDelay(t *testing.T) {
var (
err error
c = context.Background()
)
Convey("TxDelDelay", t, WithDao(func(d *Dao) {
tx, _ := d.BeginTran(c)
_, err = d.TxDelDelay(tx, 0, 0)
So(err, ShouldBeNil)
tx.Commit()
}))
}

View File

@@ -0,0 +1,38 @@
package archive
import (
"context"
"go-common/library/database/sql"
"go-common/library/log"
"time"
)
const (
_slByAID = "SELECT `id` FROM `archive_first_pass` WHERE `aid`=? LIMIT 1;"
_inFirstPass = "INSERT INTO `archive_first_pass`(`aid`, `ctime`, `mtime`) VALUES(?,?,?);"
)
//GetFirstPassByAID 根据aid获取第一次过审的记录
func (d *Dao) GetFirstPassByAID(c context.Context, aid int64) (id int64, err error) {
row := d.db.QueryRow(c, _slByAID, aid)
if err = row.Scan(&id); err != nil {
if err == sql.ErrNoRows {
err = nil
} else {
log.Error("GetFirstPassByAID error(%v) aid(%d)", err, aid)
}
return
}
return
}
//AddFirstPass 添加一条 第一次过审的记录
func (d *Dao) AddFirstPass(tx *sql.Tx, aid int64) (err error) {
now := time.Now()
if _, err = tx.Exec(_inFirstPass, aid, now, now); err != nil {
log.Error("AddFirstPass error(%v) aid(%d)", err)
}
return
}

View File

@@ -0,0 +1,15 @@
package archive
import (
"context"
. "github.com/smartystreets/goconvey/convey"
"testing"
)
func Test_GetFirstPassByAID(t *testing.T) {
Convey("test archive", t, WithDao(func(d *Dao) {
id, err := d.GetFirstPassByAID(context.Background(), 10098814)
So(err, ShouldBeNil)
So(id, ShouldNotBeNil)
}))
}

View File

@@ -0,0 +1,181 @@
package archive
import (
"context"
"fmt"
"go-common/app/admin/main/videoup/model/archive"
"go-common/library/database/sql"
"go-common/library/log"
"go-common/library/xstr"
)
const (
_upFlowSQL = "UPDATE flow_design SET group_id=?,uid=? WHERE id=?"
_inFlowLogSQL = "INSERT into flow_design_log(pool,oid,group_id,uid,action,remark) VALUES (?,?,?,?,?,?)"
_inFlowSQL = "INSERT into flow_design(pool,oid,group_id,uid,remark) VALUES (?,?,?,?,?)"
_flowsSQL = "SELECT id,name FROM flow_group WHERE state=0"
_flowPoolSQL = "SELECT id FROM flow_design WHERE pool=? AND oid=? AND state=0 order by id desc limit 1"
_findGroupIDByScopeSQL = "SELECT group_id FROM flow_scope WHERE pool= ? AND industry_id=? AND brand_id=? AND official=? AND state=0 order by id desc limit 1;"
_upFlowStateSQL = "UPDATE flow_design SET state=? WHERE id=?"
_flowsByOIDSQL = "SELECT fd.id,fd.pool,fd.oid,fd.group_id,fd.parent,fd.state,fg.value FROM flow_design fd LEFT JOIN flow_group fg ON fd.group_id=fg.id WHERE fd.oid=? AND fd.state=0 AND fg.state=0"
_flowUniqueSQL = "SELECT id,pool,oid,group_id,parent,state FROM flow_design WHERE oid=? AND pool=? AND group_id=? LIMIT 1"
_flowGroupPool = "SELECT id, pool FROM flow_group WHERE id IN (%s)"
)
// TxUpFlow tx up flow_design.
func (d *Dao) TxUpFlow(tx *sql.Tx, flowID, groupID, UID int64) (rows int64, err error) {
res, err := tx.Exec(_upFlowSQL, groupID, UID, flowID)
if err != nil {
log.Error("d.TxUpFlow.Exec() error(%v)", err)
return
}
rows, err = res.RowsAffected()
return
}
// TxAddFlow tx add flow_design.
func (d *Dao) TxAddFlow(tx *sql.Tx, pool int8, oid, uid, groupID int64, remark string) (id int64, err error) {
res, err := tx.Exec(_inFlowSQL, pool, oid, groupID, uid, remark)
if err != nil {
log.Error("d.TxAddFlow.Exec() error(%v)", err)
return
}
id, err = res.LastInsertId()
return
}
//FindGroupIDByScope .
func (d *Dao) FindGroupIDByScope(c context.Context, pool int8, IndustryID, brandID int64, official int8) (groupID int64, err error) {
row := d.rddb.QueryRow(c, _findGroupIDByScopeSQL, pool, IndustryID, brandID, official)
if err = row.Scan(&groupID); err != nil {
if err == sql.ErrNoRows {
err = nil
} else {
log.Error("row.Scan error(%v)", err)
}
groupID = 1
log.Info("FindGroupIDByScope match no scope AND hit default scope (%v)", groupID)
}
return
}
// TxAddFlowLog tx add flow_design log.
func (d *Dao) TxAddFlowLog(tx *sql.Tx, pool, action int8, oid, uid, groupID int64, remark string) (id int64, err error) {
res, err := tx.Exec(_inFlowLogSQL, pool, oid, groupID, uid, action, remark)
if err != nil {
log.Error("d._inFlowLog.Exec() error(%v)", err)
return
}
id, err = res.LastInsertId()
return
}
// Flows get flow_control id and remark.
func (d *Dao) Flows(c context.Context) (fs map[int64]string, err error) {
rows, err := d.rddb.Query(c, _flowsSQL)
if err != nil {
log.Error("d.db.Query(%s) error(%v)", err)
return
}
defer rows.Close()
fs = make(map[int64]string)
for rows.Next() {
var (
id int64
name string
)
if err = rows.Scan(&id, &name); err != nil {
log.Error("rows.Scan error(%v)", err)
return
}
fs[id] = name
}
return
}
//FlowByPool .
func (d *Dao) FlowByPool(pool int8, oid int64) (id int64, err error) {
row := d.rddb.QueryRow(context.TODO(), _flowPoolSQL, pool, oid)
if err = row.Scan(&id); err != nil {
if err == sql.ErrNoRows {
err = nil
} else {
log.Error("row.Scan error(%v)", err)
}
}
return
}
//TxUpFlowState 更新pool!=1的流量套餐资源的状态
func (d *Dao) TxUpFlowState(tx *sql.Tx, id int64, state int8) (rows int64, err error) {
res, err := tx.Exec(_upFlowStateSQL, state, id)
if err != nil {
log.Error("TxUpFlowState.Exec() error(%v)", err)
return
}
rows, err = res.RowsAffected()
return
}
//FlowsByOID 获取所有命中的流量套餐记录
func (d *Dao) FlowsByOID(c context.Context, oid int64) (res []*archive.FlowData, err error) {
var (
rows *sql.Rows
)
res = []*archive.FlowData{}
if rows, err = d.rddb.Query(context.TODO(), _flowsByOIDSQL, oid); err != nil {
log.Error("FlowsByOID d.rddb.Query error(%v) oid(%d)", err, oid)
return
}
defer rows.Close()
for rows.Next() {
f := &archive.FlowData{}
if err = rows.Scan(&f.ID, &f.Pool, &f.OID, &f.GroupID, &f.Parent, &f.State, &f.GroupValue); err != nil {
log.Error("FlowsByOID rows.Scan error(%v) oid(%d)", err, oid)
return
}
res = append(res, f)
}
return
}
//FlowUnique 获取命中 指定流量套餐的记录
func (d *Dao) FlowUnique(c context.Context, oid, groupID int64, pool int8) (f *archive.FlowData, err error) {
f = &archive.FlowData{}
if err = d.rddb.QueryRow(context.TODO(), _flowUniqueSQL, oid, pool, groupID).Scan(&f.ID, &f.Pool, &f.OID, &f.GroupID, &f.Parent, &f.State); err != nil {
if err == sql.ErrNoRows {
err = nil
f = nil
} else {
log.Error("row.Scan error(%v)", err)
}
}
return
}
//FlowGroupPools 获取指定流量套餐的pool
func (d *Dao) FlowGroupPools(c context.Context, ids []int64) (res map[int64]int8, err error) {
var (
rows *sql.Rows
id int64
pool int8
)
res = map[int64]int8{}
idstr := xstr.JoinInts(ids)
if rows, err = d.rddb.Query(context.TODO(), fmt.Sprintf(_flowGroupPool, idstr)); err != nil {
log.Error("FlowGroupPools d.rddb.Query error(%v) ids(%s)", err, idstr)
return
}
defer rows.Close()
for rows.Next() {
if err = rows.Scan(&id, &pool); err != nil {
log.Error("FlowGroupPools rows.Scan error(%v) ids(%d)", err, idstr)
return
}
res[id] = pool
}
return
}

View File

@@ -0,0 +1,122 @@
package archive
import (
"context"
. "github.com/smartystreets/goconvey/convey"
"testing"
"go-common/app/admin/main/videoup/model/archive"
)
func TestDao_TxAddFlowLog(t *testing.T) {
var (
id int64
err error
)
Convey("TxAddFlowLog", t, WithDao(func(d *Dao) {
c := context.TODO()
tx, _ := d.BeginTran(c)
id, err = d.TxAddFlowLog(tx, archive.PoolPrivateOrder, archive.FlowLogAdd, 10, 421, 1, "测试添加流量日志-私单-其他")
tx.Commit()
So(id, ShouldBeGreaterThan, 0)
So(err, ShouldBeNil)
tx, _ = d.BeginTran(c)
id, err = d.TxAddFlowLog(tx, archive.PoolArcForbid, archive.FlowLogAdd, 10, 421, archive.FlowGroupNoChannel, "测试添加流量日志-回查-频道禁止")
tx.Commit()
So(id, ShouldBeGreaterThan, 0)
So(err, ShouldBeNil)
}))
}
func TestDao_TxUpFlowState(t *testing.T) {
var (
id, rows int64
err1, err2 error
)
Convey("TxUpFlowState", t, WithDao(func(d *Dao) {
c := context.TODO()
tx, _ := d.BeginTran(c)
id, err1 = d.TxAddFlow(tx, archive.PoolArcForbid, 1, 421, archive.FlowGroupNoChannel, "测试添加频道禁止流量套餐")
rows, err2 = d.TxUpFlowState(tx, id, archive.FlowOpen)
tx.Commit()
So(err1, ShouldBeNil)
So(id, ShouldBeGreaterThan, 0)
So(err2, ShouldBeNil)
So(rows, ShouldEqual, 0)
tx, _ = d.BeginTran(c)
rows, err2 = d.TxUpFlowState(tx, id, archive.FlowDelete)
tx.Commit()
So(err2, ShouldBeNil)
So(rows, ShouldBeGreaterThan, 0)
}))
}
func TestDao_FlowsByOID(t *testing.T) {
var (
flows []*archive.FlowData
err error
)
Convey("FlowsByOID", t, WithDao(func(d *Dao) {
c := context.TODO()
flows, err = d.FlowsByOID(c, 1)
So(err, ShouldBeNil)
}))
}
func TestDao_FlowUnique(t *testing.T) {
var (
err error
)
Convey("FlowUnique", t, WithDao(func(d *Dao) {
c := context.TODO()
_, err = d.FlowUnique(c, 1, archive.FlowGroupNoChannel, archive.PoolArcForbid)
So(err, ShouldBeNil)
}))
}
func TestDao_FlowGroupPools(t *testing.T) {
Convey("FlowGroupPools", t, WithDao(func(d *Dao) {
c := context.TODO()
pools, err := d.FlowGroupPools(c, []int64{23, 24, 1})
So(err, ShouldBeNil)
So(pools, ShouldNotBeNil)
t.Logf("pools(%+v)", pools)
}))
}
func TestDao_TxUpFlow(t *testing.T) {
var (
c = context.Background()
)
Convey("TxUpFlow", t, WithDao(func(d *Dao) {
tx, _ := d.BeginTran(c)
_, err := d.TxUpFlow(tx, 0, 0, 0)
So(err, ShouldBeNil)
tx.Commit()
}))
}
func TestDao_FindGroupIDByScope(t *testing.T) {
Convey("FindGroupIDByScope", t, WithDao(func(d *Dao) {
c := context.TODO()
_, err := d.FindGroupIDByScope(c, 0, 0, 0, 0)
So(err, ShouldBeNil)
}))
}
func TestDao_Flows(t *testing.T) {
Convey("Flows", t, WithDao(func(d *Dao) {
c := context.TODO()
_, err := d.Flows(c)
So(err, ShouldBeNil)
}))
}
func TestDao_FlowByPool(t *testing.T) {
Convey("FlowByPool", t, WithDao(func(d *Dao) {
_, err := d.FlowByPool(0, 0)
So(err, ShouldBeNil)
}))
}

View File

@@ -0,0 +1,55 @@
package archive
import (
"context"
"go-common/app/admin/main/videoup/model/archive"
"go-common/library/database/sql"
"go-common/library/log"
)
const (
_inForbidSQL = `INSERT INTO archive_forbid (aid,rank_attr,recommend_attr,dynamic_attr,show_attr) VALUES (?,?,?,?,?) ON DUPLICATE KEY UPDATE
rank_attr=?,recommend_attr=?,dynamic_attr=?,show_attr=?`
_upFlowIDSQL = "INSERT INTO archive_forbid (aid,on_flow_id) VALUES (?,?) ON DUPLICATE KEY UPDATE on_flow_id=?"
_forbidSQL = `SELECT aid,rank_attr,recommend_attr,dynamic_attr,show_attr,on_flow_id FROM archive_forbid WHERE aid=?`
)
// TxUpForbid insert archive forbid.
func (d *Dao) TxUpForbid(tx *sql.Tx, af *archive.ForbidAttr) (rows int64, err error) {
res, err := tx.Exec(_inForbidSQL, af.Aid, af.RankV, af.RecommendV, af.DynamicV, af.ShowV, af.RankV, af.RecommendV, af.DynamicV, af.ShowV)
if err != nil {
log.Error("d.inForbid.Exec error(%v)", err)
return
}
rows, err = res.RowsAffected()
return
}
// TxUpFlowID insert archive flowid.
func (d *Dao) TxUpFlowID(tx *sql.Tx, aid, onFlowID int64) (rows int64, err error) {
res, err := tx.Exec(_upFlowIDSQL, aid, onFlowID, onFlowID)
if err != nil {
log.Error("d.upFlowID.Exec error(%v)", err)
return
}
rows, err = res.RowsAffected()
return
}
// Forbid get a archive forbid.
func (d *Dao) Forbid(c context.Context, aid int64) (af *archive.ForbidAttr, err error) {
row := d.rddb.QueryRow(c, _forbidSQL, aid)
af = &archive.ForbidAttr{Aid: aid}
if err = row.Scan(&af.Aid, &af.RankV, &af.DynamicV, &af.RecommendV, &af.ShowV, &af.OnFlowID); err != nil {
if err == sql.ErrNoRows {
// af = nil // NOTE: for init
err = nil
} else {
log.Error("row.Scan error(%v)", err)
}
return
}
af.Convert()
return
}

View File

@@ -0,0 +1,38 @@
package archive
import (
"context"
"testing"
"go-common/app/admin/main/videoup/model/archive"
. "github.com/smartystreets/goconvey/convey"
)
func Test_Forbid(t *testing.T) {
Convey("test archive", t, WithDao(func(d *Dao) {
af, err := d.Forbid(context.Background(), 10098814)
So(err, ShouldBeNil)
So(af, ShouldNotBeNil)
}))
}
func Test_TxUpForbid(t *testing.T) {
var c = context.Background()
Convey("TxUpForbid", t, WithDao(func(d *Dao) {
tx, _ := d.BeginTran(c)
_, err := d.TxUpForbid(tx, &archive.ForbidAttr{})
So(err, ShouldBeNil)
tx.Commit()
}))
}
func Test_TxUpFlowID(t *testing.T) {
var c = context.Background()
Convey("TxUpFlowID", t, WithDao(func(d *Dao) {
tx, _ := d.BeginTran(c)
_, err := d.TxUpFlowID(tx, 0, 0)
So(err, ShouldBeNil)
tx.Commit()
}))
}

View File

@@ -0,0 +1,57 @@
package archive
import (
"bytes"
"context"
"crypto/md5"
"encoding/binary"
"encoding/json"
"fmt"
"time"
"github.com/tsuna/gohbase/hrpc"
"go-common/app/admin/main/videoup/model/archive"
"go-common/library/log"
)
var (
tableInfo = "ugc:ArchiveTaskWeight"
family = "weightlog"
familyB = []byte(family)
)
// hashRowKey create rowkey(md5(tid)[:2]+tid) for track by tid.
func hashRowKey(tid int64) string {
var bs = make([]byte, 8)
binary.LittleEndian.PutUint64(bs, uint64(tid))
rk := md5.Sum(bs)
return fmt.Sprintf("%x%d", rk[:2], tid)
}
// WeightLog get weight log.
func (d *Dao) WeightLog(c context.Context, taskid int64) (ls []*archive.TaskWeightLog, err error) {
var (
result *hrpc.Result
key = hashRowKey(taskid)
ctx, cancel = context.WithTimeout(c, time.Duration(d.c.HBase.ReadTimeout))
)
defer cancel()
if result, err = d.hbase.Get(ctx, []byte(tableInfo), []byte(key)); err != nil {
log.Error("d.hbase.Get error(%v)", err)
return
}
for _, c := range result.Cells {
if c == nil || !bytes.Equal(c.Family, familyB) {
return
}
aLog := &archive.TaskWeightLog{}
if err = json.Unmarshal(c.Value, aLog); err != nil {
log.Warn("json.Unmarshal(%s) error(%v)", string(c.Value), err)
err = nil
continue
}
ls = append(ls, aLog)
}
return
}

View File

@@ -0,0 +1,13 @@
package archive
import (
"context"
. "github.com/smartystreets/goconvey/convey"
"testing"
)
func TestDao_WeightLog(t *testing.T) {
Convey("WeightLog", t, WithDao(func(d *Dao) {
d.WeightLog(context.TODO(), 4575)
}))
}

View File

@@ -0,0 +1,51 @@
package archive
import (
"context"
"go-common/app/admin/main/videoup/model/archive"
"go-common/library/database/sql"
"go-common/library/log"
"time"
)
const (
_archistoryByAIDSQL = "SELECT `id`, `aid`, `title`, `tag`, `content`, `cover`, `mid`, `ctime` FROM `archive_edit_history` WHERE `aid`=? AND `ctime`>? ORDER BY `id` DESC;"
_archistoryByIDSQL = "SELECT `id`, `aid`, `title`, `tag`, `content`, `cover`, `mid`, `ctime` FROM `archive_edit_history` WHERE `id`=? LIMIT 1"
)
//HistoryByAID 根据aid获取稿件的用户编辑历史
func (d *Dao) HistoryByAID(c context.Context, aid int64, stime time.Time) (hs []*archive.ArcHistory, err error) {
hs = []*archive.ArcHistory{}
rows, err := d.db.Query(c, _archistoryByAIDSQL, aid, stime)
if err != nil {
log.Error("HistoryByAID d.db.Query(aid(%d)) error(%v)", aid, err)
return
}
defer rows.Close()
for rows.Next() {
h := &archive.ArcHistory{}
if err = rows.Scan(&h.ID, &h.AID, &h.Title, &h.Tag, &h.Content, &h.Cover, &h.MID, &h.CTime); err != nil {
log.Error("HistoryByAID rows.Scan(aid(%d)) error(%v)", aid, err)
return
}
hs = append(hs, h)
}
return
}
//HistoryByID 根据id获取一条稿件的用户编辑历史
func (d *Dao) HistoryByID(c context.Context, id int64) (h *archive.ArcHistory, err error) {
h = &archive.ArcHistory{}
row := d.db.QueryRow(c, _archistoryByIDSQL, id)
if err = row.Scan(&h.ID, &h.AID, &h.Title, &h.Tag, &h.Content, &h.Cover, &h.MID, &h.CTime); err != nil {
if err == sql.ErrNoRows {
return nil, nil
}
log.Error("HistoryByID row.Scan(id(%d)) error(%v)", id, err)
}
return
}

View File

@@ -0,0 +1,35 @@
package archive
import (
"context"
"github.com/smartystreets/goconvey/convey"
"testing"
"time"
)
func Test_historyByAID(t *testing.T) {
convey.Convey("根据aid获取稿件编辑历史", t, WithDao(func(d *Dao) {
aid := int64(10107879)
h, err := d.HistoryByAID(context.TODO(), aid, time.Now().Add(720*-1*time.Hour))
for _, o := range h {
t.Logf("%+v", o)
}
convey.So(err, convey.ShouldBeNil)
}))
}
func Test_historyByID(t *testing.T) {
convey.Convey("根据id获取稿件编辑历史", t, WithDao(func(d *Dao) {
hid := int64(1)
_, err := d.HistoryByID(context.TODO(), hid)
convey.So(err, convey.ShouldBeNil)
}))
}
func Test_videoHistoryByHID(t *testing.T) {
convey.Convey("根据id获取分p编辑历史", t, WithDao(func(d *Dao) {
hid := int64(1)
_, err := d.VideoHistoryByHID(context.TODO(), hid)
convey.So(err, convey.ShouldBeNil)
}))
}

View File

@@ -0,0 +1,32 @@
package archive
import (
"context"
"go-common/app/admin/main/videoup/model/archive"
"go-common/library/database/sql"
"go-common/library/log"
)
const _mosaic = "SELECT id, aid, cid, coordinate,ctime FROM archive_video_mosaic WHERE cid=?"
//Mosaic get mosaic
func (d *Dao) Mosaic(c context.Context, cid int64) (m []*archive.Mosaic, err error) {
var rows *sql.Rows
m = []*archive.Mosaic{}
if rows, err = d.rddb.Query(c, _mosaic, cid); err != nil {
log.Error("Mosaic d.rddb.Query error(%v) cid(%d)", err, cid)
return
}
defer rows.Close()
for rows.Next() {
ms := new(archive.Mosaic)
if err = rows.Scan(&ms.ID, &ms.AID, &ms.CID, &ms.Coordinate, &ms.CTime); err != nil {
log.Error("Mosaic rows.Scan error(%v) cid(%d)", err, cid)
return
}
m = append(m, ms)
}
return
}

View File

@@ -0,0 +1,15 @@
package archive
import (
"context"
. "github.com/smartystreets/goconvey/convey"
"testing"
)
func Test_Mosaic(t *testing.T) {
Convey("test archive", t, WithDao(func(d *Dao) {
m, err := d.Mosaic(context.Background(), 10116994)
So(err, ShouldBeNil)
So(m, ShouldNotBeNil)
}))
}

View File

@@ -0,0 +1,311 @@
package archive
import (
"context"
"fmt"
"go-common/app/admin/main/videoup/model/archive"
"go-common/library/database/sql"
"go-common/library/log"
"go-common/library/xstr"
)
const (
_videoByCid = "SELECT vr.id,vr.aid,vr.title AS eptitle,vr.description,v.filename,v.src_type,vr.cid,v.duration,v.filesize,v.resolutions,vr.index_order,vr.ctime,vr.mtime,v.status,v.playurl,v.attribute,v.failcode AS failinfo,v.xcode_state,v.weblink FROM archive_video_relation AS vr LEFT JOIN video AS v ON vr.cid = v.id WHERE vr.cid = ?"
_inRelationSQL = "INSERT IGNORE INTO archive_video_relation (id,aid,cid,title,description,index_order,ctime,mtime) VALUES (?,?,?,?,?,?,?,?)"
_upRelationSQL = "UPDATE archive_video_relation SET title=?,description=? WHERE id=?"
_upRelationOrderSQL = "UPDATE archive_video_relation SET index_order=? WHERE id=?"
_upRelationStateSQL = "UPDATE archive_video_relation SET state=? WHERE id=?"
_upVideoLinkSQL = "UPDATE video SET weblink=? WHERE id=?"
_upVideoStatusSQL = "UPDATE video SET status=? WHERE id=?"
_upVideoAttrSQL = "UPDATE video SET attribute=attribute&(~(1<<?))|(?<<?) WHERE id=?"
_slPlayurl = "SELECT playurl FROM video WHERE id=? LIMIT 1"
_newVideoIDSQL = `SELECT avr.id,v.filename,avr.cid,avr.aid,avr.title,avr.description,v.src_type,v.duration,v.filesize,v.resolutions,v.playurl,v.failcode,
avr.index_order,v.attribute,v.xcode_state,avr.state,avr.ctime,avr.mtime FROM archive_video_relation avr JOIN video v on avr.cid = v.id
WHERE avr.id=? LIMIT 1`
_newVideoIDsSQL = `SELECT avr.id,v.filename,avr.cid,avr.aid,avr.title,avr.description,v.src_type,v.duration,v.filesize,v.resolutions,v.playurl,v.failcode,
avr.index_order,v.attribute,v.xcode_state,avr.state,avr.ctime,avr.mtime FROM archive_video_relation avr JOIN video v on avr.cid = v.id
WHERE avr.id in (%s)`
_newVideosAIDSQL = `SELECT avr.id,v.filename,avr.cid,avr.aid,avr.title,avr.description,v.src_type,v.duration,v.filesize,v.resolutions,v.playurl,v.failcode,
avr.index_order,v.attribute,v.xcode_state,avr.state,v.status,avr.ctime,avr.mtime FROM archive_video_relation avr JOIN video v on avr.cid = v.id
WHERE aid=? and state != -100 ORDER BY index_order ASC`
_newVideoCntSQL = `SELECT COUNT(*) FROM archive_video_relation WHERE aid=? AND state!=-100`
_slSrcTypeSQL = "SELECT `id`, `src_type` FROM `video` WHERE `id` IN (%s)"
_slVIDSQL = "SELECT ar.id FROM archive_video_relation AS ar, video AS v WHERE ar.cid = v.id AND ar.aid=? AND v.filename=?;"
_videoInfo = `SELECT vr.id, vr.aid, vr.title AS eptitle, vr.description, vr.cid, vr.ctime AS epctime, v.filename, v.xcode_state, v.playurl,
a.ctime, a.author, a.title, a.tag, a.content, a.cover, a.typeid, a.mid, a.copyright,
coalesce(addit.source, '') source, coalesce(addit.dynamic, '') dynamic, coalesce(addit.desc_format_id, 0) desc_format_id, coalesce(addit.description, '') description
FROM archive_video_relation AS vr JOIN archive AS a ON vr.aid = a.id
LEFT OUTER JOIN video AS v ON vr.cid = v.id
LEFT OUTER JOIN archive_addit AS addit ON vr.aid = addit.aid
WHERE vr.aid = ? AND vr.cid=? LIMIT 1`
_videoRelated = `SELECT v.filename,v.status,vr.aid,vr.index_order,a.title,a.ctime FROM archive_video_relation AS vr LEFT JOIN video AS v ON vr.cid = v.id JOIN archive AS a ON vr.aid = a.id WHERE vr.aid = ?`
)
// VideoByCID get video by cid
func (d *Dao) VideoByCID(c context.Context, cid int64) (v *archive.Video, err error) {
row := d.rddb.QueryRow(c, _videoByCid, cid)
v = &archive.Video{}
if err = row.Scan(&v.ID, &v.Aid, &v.Title, &v.Desc, &v.Filename, &v.SrcType, &v.Cid, &v.Duration, &v.Filesize, &v.Resolutions, &v.Index, &v.CTime, &v.MTime, &v.Status, &v.Playurl, &v.Attribute, &v.FailCode, &v.XcodeState, &v.WebLink); err != nil {
if err == sql.ErrNoRows {
v = nil
err = nil
} else {
log.Error("row.Scan error(%v)", err)
}
}
return
}
// TxAddRelation insert archive_video_relation.
func (d *Dao) TxAddRelation(tx *sql.Tx, v *archive.Video) (rows int64, err error) {
res, err := tx.Exec(_inRelationSQL, v.ID, v.Aid, v.Cid, v.Title, v.Desc, v.Index, v.CTime, v.MTime)
if err != nil {
log.Error("d.inRelation.Exec error(%v)", err)
return
}
rows, err = res.RowsAffected()
return
}
// TxUpRelation update title and desc on archive_video_relation by vid.
func (d *Dao) TxUpRelation(tx *sql.Tx, vid int64, title, desc string) (rows int64, err error) {
res, err := tx.Exec(_upRelationSQL, title, desc, vid)
if err != nil {
log.Error("d.upRelation.Exec error(%v)", err)
return
}
rows, err = res.RowsAffected()
return
}
// TxUpRelationOrder update index_order on archive_video_relation by vid.
func (d *Dao) TxUpRelationOrder(tx *sql.Tx, vid int64, index int) (rows int64, err error) {
res, err := tx.Exec(_upRelationOrderSQL, index, vid)
if err != nil {
log.Error("d.upRelationOrder.Exec error(%v)", err)
return
}
rows, err = res.RowsAffected()
return
}
// TxUpRelationState update state on archive_video_relation by vid.
func (d *Dao) TxUpRelationState(tx *sql.Tx, vid int64, state int16) (rows int64, err error) {
res, err := tx.Exec(_upRelationStateSQL, state, vid)
if err != nil {
log.Error("d.upRelationState.Exec error(%v)", err)
return
}
rows, err = res.RowsAffected()
return
}
// TxUpWebLink update weblink on video by cid.
func (d *Dao) TxUpWebLink(tx *sql.Tx, cid int64, weblink string) (rows int64, err error) {
res, err := tx.Exec(_upVideoLinkSQL, weblink, cid)
if err != nil {
log.Error("d.upVideoLink.Exec error(%v)", err)
return
}
rows, err = res.RowsAffected()
return
}
// TxUpStatus update status on video by cid.
func (d *Dao) TxUpStatus(tx *sql.Tx, cid int64, status int16) (rows int64, err error) {
res, err := tx.Exec(_upVideoStatusSQL, status, cid)
if err != nil {
log.Error("d.upVideoStatus.Exec error(%v)", err)
return
}
rows, err = res.RowsAffected()
return
}
// TxUpAttr update attribute on video by cid.
func (d *Dao) TxUpAttr(tx *sql.Tx, cid int64, bit uint, val int32) (rows int64, err error) {
res, err := tx.Exec(_upVideoAttrSQL, bit, val, bit, cid)
if err != nil {
log.Error("d.upVideoAttr.Exec() error(%v)", err)
return
}
rows, err = res.RowsAffected()
return
}
// VideoPlayurl get video play url
func (d *Dao) VideoPlayurl(c context.Context, cid int64) (playurl string, err error) {
row := d.rddb.QueryRow(c, _slPlayurl, cid)
if err = row.Scan(&playurl); err != nil {
if err == sql.ErrNoRows {
err = nil
} else {
log.Error("row.Scan error(%v)", err)
}
}
return
}
// NewVideoByID Video get video info by id.
func (d *Dao) NewVideoByID(c context.Context, id int64) (v *archive.Video, err error) {
row := d.rddb.QueryRow(c, _newVideoIDSQL, id)
v = &archive.Video{}
if err = row.Scan(&v.ID, &v.Filename, &v.Cid, &v.Aid, &v.Title, &v.Desc, &v.SrcType, &v.Duration, &v.Filesize, &v.Resolutions,
&v.Playurl, &v.FailCode, &v.Index, &v.Attribute, &v.XcodeState, &v.Status, &v.CTime, &v.MTime); err != nil {
if err == sql.ErrNoRows {
v = nil
err = nil
} else {
log.Error("row.Scan error(%v)", err)
}
}
return
}
// NewVideoByIDs Video get video info by ids. NOTE: NOT USED
func (d *Dao) NewVideoByIDs(c context.Context, id []int64) (vs []*archive.Video, err error) {
rows, err := d.rddb.Query(c, fmt.Sprintf(_newVideoIDsSQL, xstr.JoinInts(id)))
if err != nil {
log.Error("db.Query() error(%v)", err)
return
}
defer rows.Close()
for rows.Next() {
v := &archive.Video{}
if err = rows.Scan(&v.ID, &v.Filename, &v.Cid, &v.Aid, &v.Title, &v.Desc, &v.SrcType, &v.Duration, &v.Filesize, &v.Resolutions,
&v.Playurl, &v.FailCode, &v.Index, &v.Attribute, &v.XcodeState, &v.Status, &v.CTime, &v.MTime); err != nil {
log.Error("rows.Scan error(%v)", err)
return
}
vs = append(vs, v)
}
return
}
// NewVideosByAid Video get video info by aid.
func (d *Dao) NewVideosByAid(c context.Context, aid int64) (vs []*archive.Video, err error) {
rows, err := d.rddb.Query(c, _newVideosAIDSQL, aid)
if err != nil {
log.Error("db.Query() error(%v)", err)
return
}
defer rows.Close()
for rows.Next() {
var avrState, vState int16
v := &archive.Video{}
if err = rows.Scan(&v.ID, &v.Filename, &v.Cid, &v.Aid, &v.Title, &v.Desc, &v.SrcType, &v.Duration, &v.Filesize, &v.Resolutions,
&v.Playurl, &v.FailCode, &v.Index, &v.Attribute, &v.XcodeState, &avrState, &vState, &v.CTime, &v.MTime); err != nil {
log.Error("rows.Scan error(%v)", err)
return
}
// 2 state map to 1
if avrState == archive.VideoStatusDelete {
v.Status = archive.VideoStatusDelete
} else {
v.Status = vState
}
vs = append(vs, v)
}
return
}
// NewVideoCount get all video duration by aid. NOTE: NOT USED
func (d *Dao) NewVideoCount(c context.Context, aid int64) (count int, err error) {
row := d.rddb.QueryRow(c, _newVideoCntSQL, aid)
if err = row.Scan(&count); err != nil {
if err == sql.ErrNoRows {
err = nil
} else {
log.Error("row.Scan error(%v)", err)
}
}
return
}
//VideoSrcTypeByIDs video src_type and id map
func (d *Dao) VideoSrcTypeByIDs(c context.Context, ids []int64) (st map[int64]string, err error) {
st = map[int64]string{}
idStr := xstr.JoinInts(ids)
rows, err := d.db.Query(c, fmt.Sprintf(_slSrcTypeSQL, idStr))
if err != nil {
log.Error("VideoSrcTypeByIDs d.db.Query (ids(%v)) error(%v)", idStr, err)
return
}
defer rows.Close()
for rows.Next() {
var (
id int64
srcType string
)
if err = rows.Scan(&id, &srcType); err != nil {
log.Error("VideoSrcTypeByIDs rows.Scan (ids(%v)) error(%v)", idStr, err)
return
}
st[id] = srcType
}
return
}
//VIDByAIDFilename 根据filename查询视频的vid
func (d *Dao) VIDByAIDFilename(c context.Context, aid int64, filename string) (vid int64, err error) {
row := d.db.QueryRow(c, _slVIDSQL, aid, filename)
if err = row.Scan(&vid); err != nil {
if err == sql.ErrNoRows {
err = nil
} else {
log.Error("VideoRelationIDByFilename row.Scan err(%v) aid(%d) filename(%s)", err, aid, filename)
}
}
return
}
//VideoInfo video info
func (d *Dao) VideoInfo(c context.Context, aid int64, cid int64) (v *archive.VideoInfo, err error) {
var (
descFormatID int64
formatDesc string
)
v = &archive.VideoInfo{}
row := d.rddb.QueryRow(c, _videoInfo, aid, cid)
if err = row.Scan(&v.ID, &v.AID, &v.Eptitle, &v.Description, &v.CID, &v.Epctime, &v.Filename, &v.XcodeState, &v.Playurl,
&v.Ctime, &v.Author, &v.Title, &v.Tag, &v.Content, &v.Cover, &v.Typeid, &v.MID, &v.Copyright,
&v.Source, &v.Dynamic, &descFormatID, &formatDesc); err != nil {
if err == sql.ErrNoRows {
err = nil
v = nil
} else {
log.Error("VideoInfo row.Scan error(%v) aid(%d) cid(%d)", err, aid, cid)
}
return
}
if descFormatID > 0 {
v.Content = formatDesc
}
return
}
//VideoRelated related videos
func (d *Dao) VideoRelated(c context.Context, aid int64) (vs []*archive.RelationVideo, err error) {
var rows *sql.Rows
vs = []*archive.RelationVideo{}
if rows, err = d.rddb.Query(c, _videoRelated, aid); err != nil {
log.Error("VideoRelated d.rddb.Query error(%v) aid(%d)", err, aid)
return
}
defer rows.Close()
for rows.Next() {
v := &archive.RelationVideo{}
if err = rows.Scan(&v.Filename, &v.Status, &v.AID, &v.IndexOrder, &v.Title, &v.Ctime); err != nil {
log.Error("VideoRelated rows.Scan error(%v) aid(%d)", err, aid)
return
}
vs = append(vs, v)
}
return
}

View File

@@ -0,0 +1,149 @@
package archive
import (
"context"
"github.com/smartystreets/goconvey/convey"
"testing"
)
func Test_videoSrcTypeByIDs(t *testing.T) {
convey.Convey("根据cid获取最新的上传类型src_type", t, WithDao(func(d *Dao) {
ids := []int64{385, 386, 387, 388}
m, err := d.VideoSrcTypeByIDs(context.TODO(), ids)
convey.So(err, convey.ShouldBeNil)
convey.So(len(m), convey.ShouldBeLessThanOrEqualTo, len(ids))
}))
}
func Test_vIDByAIDFilename(t *testing.T) {
convey.Convey("根据aid+filename获取分p的vid", t, WithDao(func(d *Dao) {
aid := int64(161)
filename := "d74b1c1cda32e5740658a2517fd82965"
_, err := d.VIDByAIDFilename(context.TODO(), aid, filename)
convey.So(err, convey.ShouldBeNil)
}))
}
func TestDao_VideoInfo(t *testing.T) {
convey.Convey("VideoInfo", t, WithDao(func(d *Dao) {
_, err := d.VideoInfo(context.Background(), 10098493, 10109201)
convey.So(err, convey.ShouldBeNil)
}))
}
func TestDao_VideoByCID(t *testing.T) {
convey.Convey("VideoByCID", t, WithDao(func(d *Dao) {
info, err := d.VideoByCID(context.Background(), 10109201)
convey.So(err, convey.ShouldBeNil)
convey.So(info, convey.ShouldNotBeNil)
}))
}
func TestDao_VideoRelated(t *testing.T) {
convey.Convey("VideoRelated", t, WithDao(func(d *Dao) {
v, err := d.VideoRelated(context.Background(), 10098493)
t.Logf("VideoRelated(%+v)\r\n", v)
convey.So(err, convey.ShouldBeNil)
convey.So(v, convey.ShouldNotBeNil)
}))
}
func Test_TxUpRelation(t *testing.T) {
var c = context.Background()
convey.Convey("TxUpRelation", t, WithDao(func(d *Dao) {
tx, _ := d.BeginTran(c)
_, err := d.TxUpRelation(tx, 0, "", "")
convey.So(err, convey.ShouldBeNil)
tx.Commit()
}))
}
func Test_TxUpRelationOrder(t *testing.T) {
var c = context.Background()
convey.Convey("TxUpRelationOrder", t, WithDao(func(d *Dao) {
tx, _ := d.BeginTran(c)
_, err := d.TxUpRelationOrder(tx, 0, 0)
convey.So(err, convey.ShouldBeNil)
tx.Commit()
}))
}
func Test_TxUpRelationState(t *testing.T) {
var c = context.Background()
convey.Convey("TxUpRelationState", t, WithDao(func(d *Dao) {
tx, _ := d.BeginTran(c)
_, err := d.TxUpRelationState(tx, 0, 0)
convey.So(err, convey.ShouldBeNil)
tx.Commit()
}))
}
func Test_TxUpWebLink(t *testing.T) {
var c = context.Background()
convey.Convey("TxUpWebLink", t, WithDao(func(d *Dao) {
tx, _ := d.BeginTran(c)
_, err := d.TxUpWebLink(tx, 0, "")
convey.So(err, convey.ShouldBeNil)
tx.Commit()
}))
}
func Test_TxUpStatus(t *testing.T) {
var c = context.Background()
convey.Convey("TxUpStatus", t, WithDao(func(d *Dao) {
tx, _ := d.BeginTran(c)
_, err := d.TxUpStatus(tx, 0, 0)
convey.So(err, convey.ShouldBeNil)
tx.Commit()
}))
}
func Test_TxUpAttr(t *testing.T) {
var c = context.Background()
convey.Convey("TxUpAttr", t, WithDao(func(d *Dao) {
tx, _ := d.BeginTran(c)
_, err := d.TxUpAttr(tx, 0, 0, 0)
convey.So(err, convey.ShouldBeNil)
tx.Commit()
}))
}
func Test_VideoPlayurl(t *testing.T) {
var c = context.Background()
convey.Convey("VideoPlayurl", t, WithDao(func(d *Dao) {
_, err := d.VideoPlayurl(c, 0)
convey.So(err, convey.ShouldBeNil)
}))
}
func Test_NewVideoByID(t *testing.T) {
var c = context.Background()
convey.Convey("NewVideoByID", t, WithDao(func(d *Dao) {
_, err := d.NewVideoByID(c, 0)
convey.So(err, convey.ShouldBeNil)
}))
}
func Test_NewVideoByIDs(t *testing.T) {
var c = context.Background()
convey.Convey("NewVideoByIDs", t, WithDao(func(d *Dao) {
_, err := d.NewVideoByIDs(c, []int64{1, 2, 3})
convey.So(err, convey.ShouldBeNil)
}))
}
func Test_NewVideosByAid(t *testing.T) {
var c = context.Background()
convey.Convey("NewVideosByAid", t, WithDao(func(d *Dao) {
_, err := d.NewVideosByAid(c, 0)
convey.So(err, convey.ShouldBeNil)
}))
}
func Test_NewVideoCount(t *testing.T) {
var c = context.Background()
convey.Convey("NewVideoCount", t, WithDao(func(d *Dao) {
_, err := d.NewVideoCount(c, 0)
convey.So(err, convey.ShouldBeNil)
}))
}

View File

@@ -0,0 +1,119 @@
package archive
import (
"context"
"database/sql"
"go-common/app/admin/main/videoup/model/archive"
"go-common/library/log"
"go-common/library/time"
)
const (
_inArcOperSQL = "INSERT INTO archive_oper (aid,uid,typeid,state,content,round,attribute,last_id,remark) VALUES (?,?,?,?,?,?,?,?,?)"
_inVideoOperSQL = "INSERT INTO archive_video_oper (aid,uid,vid,status,content,attribute,last_id,remark) VALUES (?,?,?,?,?,?,?,?)"
_upVideoOperSQL = "UPDATE archive_video_oper SET last_id=? WHERE id=?"
_arcOperSQL = "SELECT id,aid,uid,typeid,state,content,round,attribute,last_id,remark FROM archive_oper WHERE aid = ? ORDER BY ctime DESC"
_arcPassedOperSQL = "SELECT id FROM archive_oper WHERE aid=? AND state>=? LIMIT 1"
_videoOperSQL = "SELECT id,aid,uid,vid,status,content,attribute,last_id,remark,ctime FROM archive_video_oper WHERE vid = ? ORDER BY ctime DESC"
_operAttrSQL = "SELECT attribute, ctime FROM archive_video_oper WHERE vid=? ORDER BY ctime DESC;"
)
// AddArcOper insert archive_oper.
func (d *Dao) AddArcOper(c context.Context, aid, adminID int64, attribute int32, typeID, state int16, round int8, lastID int64, content, remark string) (rows int64, err error) {
res, err := d.db.Exec(c, _inArcOperSQL, aid, adminID, typeID, state, content, round, attribute, lastID, remark)
if err != nil {
log.Error("d.inArcOper.Exec error(%v)", err)
return
}
rows, err = res.RowsAffected()
return
}
// AddVideoOper insert archive_video_oper.
func (d *Dao) AddVideoOper(c context.Context, aid, adminID, vid int64, attribute int32, status int16, lastID int64, content, remark string) (id int64, err error) {
res, err := d.db.Exec(c, _inVideoOperSQL, aid, adminID, vid, status, content, attribute, lastID, remark)
if err != nil {
log.Error("d.inVideoOper.Exec error(%v)", err)
return
}
id, err = res.LastInsertId()
return
}
// UpVideoOper update archive_video_oper last_id by id.
func (d *Dao) UpVideoOper(c context.Context, lastID, id int64) (rows int64, err error) {
res, err := d.db.Exec(c, _upVideoOperSQL, lastID, id)
if err != nil {
log.Error("d.upVideoOper.Exec error(%v)", err)
return
}
rows, err = res.RowsAffected()
return
}
// ArchiveOper select archive_oper.
func (d *Dao) ArchiveOper(c context.Context, aid int64) (oper *archive.ArcOper, err error) {
row := d.rddb.QueryRow(c, _arcOperSQL, aid)
oper = &archive.ArcOper{}
if err = row.Scan(&oper.ID, &oper.Aid, &oper.UID, &oper.TypeID, &oper.State, &oper.Content, &oper.Round, &oper.Attribute, &oper.LastID, &oper.Remark); err != nil {
if err == sql.ErrNoRows {
err = nil
} else {
log.Error("row.Scan error(%v)", err)
}
}
return
}
// VideoOper select archive_video_oper.
func (d *Dao) VideoOper(c context.Context, vid int64) (oper *archive.VideoOper, err error) {
row := d.rddb.QueryRow(c, _videoOperSQL, vid)
oper = &archive.VideoOper{}
if err = row.Scan(&oper.ID, &oper.AID, &oper.UID, &oper.VID, &oper.Status, &oper.Content, &oper.Attribute, &oper.LastID, &oper.Remark, &oper.CTime); err != nil {
if err == sql.ErrNoRows {
err = nil
} else {
log.Error("row.Scan error(%v)", err)
}
}
return
}
// PassedOper check archive passed
func (d *Dao) PassedOper(c context.Context, aid int64) (id int64, err error) {
row := d.rddb.QueryRow(c, _arcPassedOperSQL, aid, archive.StateOpen)
if err = row.Scan(&id); err != nil {
if err == sql.ErrNoRows {
err = nil
} else {
log.Error("row.Scan error(%v)", err)
}
}
return
}
//VideoOperAttrsCtimes 获取vid的审核属性记录按照ctime排序
func (d *Dao) VideoOperAttrsCtimes(c context.Context, vid int64) (attrs []int32, ctimes []int64, err error) {
rows, err := d.rddb.Query(c, _operAttrSQL, vid)
if err != nil {
log.Error("VideoOperAttrsCtimes d.rddb.Query error(%v)", err)
return
}
defer rows.Close()
for rows.Next() {
var (
ctime time.Time
attr int32
)
if err = rows.Scan(&attr, &ctime); err != nil {
log.Error("VideoOperAttrsCtimes rows.Scan error(%v)", err)
return
}
attrs = append(attrs, attr)
ctimes = append(ctimes, int64(ctime))
}
return
}

View File

@@ -0,0 +1,55 @@
package archive
import (
"context"
. "github.com/smartystreets/goconvey/convey"
"testing"
)
func Test_videoOperAttrsCtimes(t *testing.T) {
Convey("获取vid的审核属性记录按照ctime排序", t, WithDao(func(d *Dao) {
vid := int64(8943315)
attrs, ctimes, err := d.VideoOperAttrsCtimes(context.TODO(), vid)
So(err, ShouldBeNil)
So(len(attrs), ShouldEqual, len(ctimes))
}))
}
func Test_ArchiveOper(t *testing.T) {
Convey("test archive", t, WithDao(func(d *Dao) {
m, err := d.ArchiveOper(context.Background(), 10116994)
So(err, ShouldBeNil)
So(m, ShouldNotBeNil)
}))
}
func Test_VideoOper(t *testing.T) {
Convey("test archive", t, WithDao(func(d *Dao) {
m, err := d.VideoOper(context.Background(), 10116994)
So(err, ShouldBeNil)
So(m, ShouldNotBeNil)
}))
}
func Test_PassedOper(t *testing.T) {
Convey("test archive", t, WithDao(func(d *Dao) {
m, err := d.PassedOper(context.Background(), 10116994)
So(err, ShouldBeNil)
So(m, ShouldNotBeNil)
}))
}
func Test_VideoOperAttrsCtimes(t *testing.T) {
Convey("test archive", t, WithDao(func(d *Dao) {
_, _, err := d.VideoOperAttrsCtimes(context.Background(), 10116994)
So(err, ShouldBeNil)
}))
}
func Test_UpVideoOper(t *testing.T) {
var c = context.Background()
Convey("UpVideoOper", t, WithDao(func(d *Dao) {
_, err := d.UpVideoOper(c, 0, 0)
So(err, ShouldBeNil)
}))
}

View File

@@ -0,0 +1,61 @@
package archive
import (
"context"
"go-common/app/admin/main/videoup/model/archive"
"go-common/library/database/sql"
"go-common/library/log"
)
const (
_inPorderSQL = `INSERT INTO archive_porder (aid,industry_id,brand_id,brand_name,official,show_type,show_front,advertiser,agent,state) VALUES (?,?,?,?,?,?,?,?,?,1) ON DUPLICATE KEY UPDATE
industry_id=?,brand_id=?,brand_name=?,official=?,show_type=?,show_front=?,advertiser=?,agent=?`
_selPorderSQL = `select industry_id,brand_id,brand_name,official,show_type,advertiser,agent,state from archive_porder where aid=?`
_selPorderConfigSQL = `select id,name,rank,type,state from porder_config`
)
// TxUpPorder archive_porder
func (d *Dao) TxUpPorder(tx *sql.Tx, aid int64, ap *archive.ArcParam) (rows int64, err error) {
res, err := tx.Exec(_inPorderSQL, aid, ap.IndustryID, ap.BrandID, ap.BrandName, ap.Official, ap.ShowType, ap.ShowFront, ap.Advertiser, ap.Agent, ap.IndustryID, ap.BrandID, ap.BrandName, ap.Official, ap.ShowType, ap.ShowFront, ap.Advertiser, ap.Agent)
if err != nil {
log.Error("d.TxUpPorder.Exec error(%v)", err)
return
}
rows, err = res.RowsAffected()
return
}
// Porder get archive Proder
func (d *Dao) Porder(c context.Context, aid int64) (p *archive.Porder, err error) {
row := d.rddb.QueryRow(c, _selPorderSQL, aid)
p = &archive.Porder{}
if err = row.Scan(&p.IndustryID, &p.BrandID, &p.BrandName, &p.Official, &p.ShowType, &p.Advertiser, &p.Agent, &p.State); err != nil {
if err != sql.ErrNoRows {
log.Error("row.Scan error(%v)", err)
return
}
err = nil
}
return
}
// PorderConfig get archive ProderConfigs
func (d *Dao) PorderConfig(c context.Context) (pc map[int64]*archive.PorderConfig, err error) {
rows, err := d.rddb.Query(c, _selPorderConfigSQL)
if err != nil {
log.Error("d.db.Query(%s) error(%v)", err)
return
}
defer rows.Close()
pc = make(map[int64]*archive.PorderConfig)
for rows.Next() {
ap := &archive.PorderConfig{}
if err = rows.Scan(&ap.ID, &ap.Name, &ap.Rank, &ap.Type, &ap.State); err != nil {
log.Error("rows.Scan error(%v)", err)
return
}
pc[ap.ID] = ap
}
return
}

View File

@@ -0,0 +1,36 @@
package archive
import (
"context"
"testing"
"go-common/app/admin/main/videoup/model/archive"
. "github.com/smartystreets/goconvey/convey"
)
func Test_PorderConfig(t *testing.T) {
Convey("PorderConfig", t, WithDao(func(d *Dao) {
p, err := d.PorderConfig(context.TODO())
So(err, ShouldBeNil)
So(p, ShouldNotBeNil)
}))
}
func Test_Porder(t *testing.T) {
Convey("test archive", t, WithDao(func(d *Dao) {
p, err := d.Porder(context.Background(), 10098814)
So(err, ShouldBeNil)
So(p, ShouldNotBeNil)
}))
}
func Test_TxUpPorder(t *testing.T) {
var c = context.Background()
Convey("TxUpPorder", t, WithDao(func(d *Dao) {
tx, _ := d.BeginTran(c)
_, err := d.TxUpPorder(tx, 0, &archive.ArcParam{})
So(err, ShouldBeNil)
tx.Commit()
}))
}

View File

@@ -0,0 +1,97 @@
package archive
import (
"context"
"fmt"
"go-common/app/admin/main/videoup/model/archive"
"go-common/library/database/sql"
"go-common/library/log"
"go-common/library/xstr"
)
const (
_recheckByAid = "SELECT id,type,aid,uid,state,ctime,mtime FROM archive_recheck WHERE aid =? and type = ?"
_recheckBatchIDByAid = "SELECT id,aid FROM archive_recheck WHERE aid IN (%s) AND type=?"
_recheckBatchStateByAid = "SELECT aid,state FROM archive_recheck WHERE aid IN (%s) AND type=?"
_upRecheckState = "UPDATE archive_recheck SET state=? WHERE aid =? and type = ?"
)
// TxUpRecheckState update recheck state
func (d *Dao) TxUpRecheckState(tx *sql.Tx, tp int, aid int64, state int8) (err error) {
if _, err = tx.Exec(_upRecheckState, state, aid, tp); err != nil {
log.Error("TxUpRecheckState Exec(%d,%d,%d) error(%v)", state, tp, aid, err)
return
}
return
}
// RecheckByAid find archive recheck
func (d *Dao) RecheckByAid(c context.Context, tp int, aid int64) (recheck *archive.Recheck, err error) {
row := d.db.QueryRow(c, _recheckByAid, aid, tp)
recheck = &archive.Recheck{}
if err = row.Scan(&recheck.ID, &recheck.Type, &recheck.AID, &recheck.UID, &recheck.State, &recheck.CTime, &recheck.MTime); err != nil {
if err == sql.ErrNoRows {
err = nil
recheck = nil
} else {
log.Error("RecheckByAid row.Scan(%d,%d) error(%v)", tp, aid, err)
}
return
}
return
}
//RecheckIDByAID find states by ids
func (d *Dao) RecheckIDByAID(c context.Context, tp int, aids []int64) (ids []int64, existAID []int64, err error) {
var (
rows *sql.Rows
id, aid int64
)
aidstr := xstr.JoinInts(aids)
ids = []int64{}
existAID = []int64{}
if rows, err = d.db.Query(c, fmt.Sprintf(_recheckBatchIDByAid, aidstr), tp); err != nil {
log.Error("RecheckIDByAID d.db.Query error(%v) type(%d) aids(%s)", err, tp, aidstr)
return
}
defer rows.Close()
for rows.Next() {
if err = rows.Scan(&id, &aid); err != nil {
log.Error("RecheckIDByAID rows.Scan error(%v) type(%d) aids(%s)", err, tp, aidstr)
return
}
ids = append(ids, id)
existAID = append(existAID, aid)
}
return
}
func (d *Dao) RecheckStateMap(c context.Context, tp int, aids []int64) (m map[int64]int8, err error) {
var (
rows *sql.Rows
aid int64
state int8
)
m = make(map[int64]int8)
if len(aids) == 0 {
return
}
str := xstr.JoinInts(aids)
if rows, err = d.db.Query(c, fmt.Sprintf(_recheckBatchStateByAid, str), tp); err != nil {
log.Error("RecheckStateMap d.db.Query error(%v) type(%d) aids(%s)", err, tp, str)
return
}
defer rows.Close()
for rows.Next() {
if err = rows.Scan(&aid, &state); err != nil {
log.Error("RecheckStateMap rows.Scan error(%v) type(%d) aids(%s)", err, tp, str)
return
}
m[aid] = state
}
return
}

View File

@@ -0,0 +1,38 @@
package archive
import (
"context"
. "github.com/smartystreets/goconvey/convey"
"testing"
)
func Test_RecheckByAid(t *testing.T) {
Convey("test archive", t, WithDao(func(d *Dao) {
_, err := d.RecheckByAid(context.Background(), 1, 10098814)
So(err, ShouldBeNil)
}))
}
func Test_RecheckIDByAID(t *testing.T) {
Convey("test archive", t, WithDao(func(d *Dao) {
_, _, err := d.RecheckIDByAID(context.Background(), 1, []int64{10098814})
So(err, ShouldBeNil)
}))
}
func Test_RecheckStateMap(t *testing.T) {
Convey("test archive", t, WithDao(func(d *Dao) {
_, err := d.RecheckStateMap(context.Background(), 1, []int64{10098814})
So(err, ShouldBeNil)
}))
}
func Test_TxUpRecheckState(t *testing.T) {
var c = context.Background()
Convey("TxUpRecheckState", t, WithDao(func(d *Dao) {
tx, _ := d.BeginTran(c)
err := d.TxUpRecheckState(tx, 0, 0, 0)
So(err, ShouldBeNil)
tx.Commit()
}))
}

View File

@@ -0,0 +1,39 @@
package archive
import (
"context"
"go-common/library/cache/redis"
"go-common/library/log"
)
const (
_lockingVideo = "videoup_admin_locking_video"
)
// IsLockingVideo 是否正在自动锁定视频
func (d *Dao) IsLockingVideo(c context.Context) (locking bool, err error) {
conn := d.redis.Get(c)
defer conn.Close()
if locking, err = redis.Bool(conn.Do("EXISTS", _lockingVideo)); err != nil {
log.Error("conn.Do(EXISTS,%s) error(%v)", _lockingVideo, err)
}
return
}
// LockingVideo 设置是否正在自动锁定视频
func (d *Dao) LockingVideo(c context.Context, v int8) (err error) {
var conn = d.redis.Get(c)
defer conn.Close()
if v == 1 {
if _, err = conn.Do("SET", _lockingVideo, v); err != nil {
log.Error("conn.Do(SET, %s,%d) error(%v)", _lockingVideo, v, err)
}
} else {
if _, err = conn.Do("DEL", _lockingVideo); err != nil {
log.Error("conn.Do(SET, %s) error(%v)", _lockingVideo, err)
}
}
return
}

View File

@@ -0,0 +1,31 @@
package archive
import (
"context"
"go-common/app/admin/main/videoup/model/archive"
"go-common/library/log"
"time"
)
const (
_statsPointSQL = "SELECT id,type,content,ctime,mtime FROM archive_report_sum WHERE mtime>=? AND mtime<? AND type=?"
)
// StatsPoints get archive_report_sum data by type and time
func (d *Dao) StatsPoints(c context.Context, stime, etime time.Time, typeInt int8) (points []*archive.StatsPoint, err error) {
rows, err := d.rddb.Query(c, _statsPointSQL, stime, etime, typeInt)
if err != nil {
log.Error("d.StatsPoints.Query(%v,%v,%v) error(%v)", stime, etime, typeInt, err)
return
}
defer rows.Close()
for rows.Next() {
point := &archive.StatsPoint{}
if err = rows.Scan(&point.ID, &point.Type, &point.Content, &point.Ctime, &point.Mtime); err != nil {
log.Error("rows.Scan error(%v)", err)
return
}
points = append(points, point)
}
return
}

View File

@@ -0,0 +1,16 @@
package archive
import (
"context"
. "github.com/smartystreets/goconvey/convey"
"testing"
"time"
)
func Test_StatsPoints(t *testing.T) {
Convey("test archive", t, WithDao(func(d *Dao) {
now := time.Now()
_, err := d.StatsPoints(context.Background(), now.Add(-time.Hour), now, 1)
So(err, ShouldBeNil)
}))
}

View File

@@ -0,0 +1,38 @@
package archive
import (
"context"
"fmt"
"go-common/library/log"
"go-common/library/xstr"
)
const (
_nameByIdsSQL = "SELECT id,description FROM archive_tag WHERE id IN (%s)"
)
// TagNameMap get audit tag id and name map
func (d *Dao) TagNameMap(c context.Context, ids []int64) (nameMap map[int64]string, err error) {
nameMap = make(map[int64]string)
if len(ids) == 0 {
return
}
rows, err := d.rddb.Query(c, fmt.Sprintf(_nameByIdsSQL, xstr.JoinInts(ids)))
if err != nil {
log.Error("db.Query() error(%v)", err)
return
}
defer rows.Close()
for rows.Next() {
tag := struct {
ID int64
Name string
}{}
if err = rows.Scan(&tag.ID, &tag.Name); err != nil {
log.Error("rows.Scan error(%v)", err)
return
}
nameMap[tag.ID] = tag.Name
}
return
}

View File

@@ -0,0 +1,14 @@
package archive
import (
"context"
. "github.com/smartystreets/goconvey/convey"
"testing"
)
func Test_TagNameMap(t *testing.T) {
Convey("test archive", t, WithDao(func(d *Dao) {
_, err := d.TagNameMap(context.Background(), []int64{1, 2})
So(err, ShouldBeNil)
}))
}

View File

@@ -0,0 +1,32 @@
package archive
import (
"context"
"time"
"go-common/app/admin/main/videoup/model/archive"
"go-common/library/log"
)
const (
_taskTooksByHalfHourSQL = "SELECT id,m50,m60,m80,m90,type,ctime,mtime FROM task_dispatch_took WHERE type=2 AND ctime>=? AND ctime<=? ORDER BY ctime ASC"
)
// TaskTooksByHalfHour get TaskTooks by half hour
func (d *Dao) TaskTooksByHalfHour(c context.Context, stime time.Time, etime time.Time) (tooks []*archive.TaskTook, err error) {
rows, err := d.rddb.Query(c, _taskTooksByHalfHourSQL, stime, etime)
if err != nil {
log.Error("d.TaskTooksByHalfHour.Query(%v,%v) error(%v)", stime, etime, err)
return
}
defer rows.Close()
for rows.Next() {
took := &archive.TaskTook{}
if err = rows.Scan(&took.ID, &took.M50, &took.M60, &took.M80, &took.M90, &took.TypeID, &took.Ctime, &took.Mtime); err != nil {
log.Error("rows.Scan error(%v)", err)
return
}
tooks = append(tooks, took)
}
return
}

View File

@@ -0,0 +1,67 @@
package archive
import (
"context"
"go-common/app/admin/main/videoup/model/archive"
"go-common/library/database/sql"
"go-common/library/log"
)
const (
_taskUserCheckInSQL = "INSERT INTO task_consumer (uid,state) VALUES (?,1) ON DUPLICATE KEY UPDATE state = 1"
_taskUserCheckOffSQL = "UPDATE task_consumer SET state = 0 WHERE uid=?"
_consumersSQL = "SELECT id,uid,state,ctime,mtime FROM task_consumer where state=1"
_isConsumerOnSQL = "SELECT state FROM task_consumer WHERE uid=?"
)
// TaskUserCheckIn insert or update task consumer check state
func (d *Dao) TaskUserCheckIn(c context.Context, uid int64) (rows int64, err error) {
res, err := d.db.Exec(c, _taskUserCheckInSQL, uid)
if err != nil {
log.Error("tx.Exec(%s, %d) error(%v)", _taskUserCheckInSQL, uid, err)
return
}
return res.RowsAffected()
}
// TaskUserCheckOff update task consumer check state
func (d *Dao) TaskUserCheckOff(c context.Context, uid int64) (rows int64, err error) {
res, err := d.db.Exec(c, _taskUserCheckOffSQL, uid)
if err != nil {
log.Error("tx.Exec(%s, %d) error(%v)", _taskUserCheckOffSQL, uid, err)
return
}
return res.RowsAffected()
}
// Consumers 用户列表
func (d *Dao) Consumers(c context.Context) (cms []*archive.Consumers, err error) {
rows, err := d.rddb.Query(c, _consumersSQL)
if err != nil {
log.Error("d.rddb.Query(%s) error(%v)", _consumersSQL, err)
return
}
defer rows.Close()
for rows.Next() {
cm := new(archive.Consumers)
err = rows.Scan(&cm.ID, &cm.UID, &cm.State, &cm.Ctime, &cm.Mtime)
if err != nil {
log.Error("rows.Scan error(%v)", err)
return
}
cms = append(cms, cm)
}
return
}
// IsConsumerOn 判断是否登入
func (d *Dao) IsConsumerOn(c context.Context, uid int64) (state int8) {
err := d.rddb.QueryRow(c, _isConsumerOnSQL, uid).Scan(&state)
if err != nil {
if err != sql.ErrNoRows {
log.Error("d.rddb.QueryRow error(%v)", err)
}
}
return
}

View File

@@ -0,0 +1,42 @@
package archive
import (
"context"
. "github.com/smartystreets/goconvey/convey"
"testing"
)
func Test_Consumers(t *testing.T) {
Convey("test archive", t, WithDao(func(d *Dao) {
_, err := d.Consumers(context.Background())
So(err, ShouldBeNil)
}))
}
func Test_IsConsumerOn(t *testing.T) {
Convey("test archive", t, WithDao(func(d *Dao) {
state := d.IsConsumerOn(context.Background(), 1)
So(state, ShouldNotBeNil)
}))
}
func Test_WeightConf(t *testing.T) {
Convey("test archive", t, WithDao(func(d *Dao) {
r, err := d.WeightConf(context.Background())
So(err, ShouldBeNil)
So(r, ShouldNotBeNil)
}))
}
func Test_TaskUserCheckIn(t *testing.T) {
Convey("TaskUserCheckIn", t, WithDao(func(d *Dao) {
_, err := d.TaskUserCheckIn(context.Background(), 0)
So(err, ShouldBeNil)
}))
}
func Test_TaskUserCheckOff(t *testing.T) {
Convey("TaskUserCheckOff", t, WithDao(func(d *Dao) {
r, err := d.TaskUserCheckOff(context.Background(), 0)
So(err, ShouldBeNil)
So(r, ShouldNotBeNil)
}))
}

View File

@@ -0,0 +1,243 @@
package archive
import (
"context"
xsql "database/sql"
"encoding/json"
"fmt"
"strings"
"time"
"go-common/app/admin/main/videoup/model/archive"
"go-common/app/admin/main/videoup/model/utils"
"go-common/library/database/sql"
"go-common/library/log"
"go-common/library/xstr"
)
const (
_userUndoneSpecifiedSQL = "SELECT id,pool,subject,adminid,aid,cid,uid,state,ctime,mtime FROM task_dispatch WHERE uid = ? AND state !=2 AND subject = 1"
_dispatchTaskSQL = "SELECT id,cid,mtime FROM task_dispatch WHERE uid in (0,?) AND state = 0 ORDER BY `weight` DESC,`subject` DESC,`id` ASC limit 8"
_upDispatchTaskSQL = "UPDATE task_dispatch SET state=1,uid=?,gtime='0000-00-00 00:00:00' WHERE id IN (%s) AND state=0"
_getNextTaskSQL = "SELECT id,pool,subject,adminid,aid,cid,uid,state,utime,ctime,mtime,dtime,gtime,weight FROM task_dispatch WHERE uid=? AND state = 1 ORDER BY `weight` DESC,`subject` DESC,`id` ASC limit 1"
_upTaskGtimeSQL = "UPDATE task_dispatch SET gtime=? WHERE id=?"
_listByConditionSQL = "SELECT id,pool,subject,adminid,aid,cid,uid,state,utime,ctime,mtime,dtime,gtime,weight FROM task_dispatch where %s order by %s %s"
_taskByIDSQL = "SELECT id,pool,subject,adminid,aid,cid,uid,state,utime,ctime,mtime,dtime,gtime,ptime,weight FROM task_dispatch WHERE id =? union " +
"SELECT task_id as id,pool,subject,adminid,aid,cid,uid,state,utime,ctime,mtime,dtime,gtime,ptime,weight FROM task_dispatch_done WHERE task_id=?"
_getWeightDBSQL = "SELECT t.id,t.state,a.mid,t.ctime,t.upspecial,t.ptime,e.description FROM `task_dispatch` AS t " +
"LEFT JOIN `task_dispatch_extend` AS e ON t.id=e.task_id INNER JOIN archive as a ON a.id=t.aid WHERE t.id IN (%s)"
_taskDispatchByIDSQL = `SELECT id,subject,aid,cid,uid,state,ctime,utime,mtime,dtime,gtime FROM task_dispatch WHERE id=?`
)
// UserUndoneSpecTask get undone dispatch which belongs to someone.
func (d *Dao) UserUndoneSpecTask(c context.Context, uid int64) (tasks []*archive.Task, err error) {
rows, err := d.db.Query(c, _userUndoneSpecifiedSQL, uid)
if err != nil {
log.Error("d.db.Query() error(%v)", err)
return
}
defer rows.Close()
for rows.Next() {
t := &archive.Task{}
if err = rows.Scan(&t.ID, &t.Pool, &t.Subject, &t.AdminID, &t.Aid, &t.Cid, &t.UID, &t.State, &t.CTime, &t.MTime); err != nil {
if err == sql.ErrNoRows {
err = nil
return
}
log.Error("row.Scan(%d) error(%v)", err)
return
}
tasks = append(tasks, t)
}
return
}
// GetDispatchTask 获取抢占到的任务(用于记录日志)
func (d *Dao) GetDispatchTask(c context.Context, uid int64) (tls []*archive.TaskForLog, err error) {
rows, err := d.rddb.Query(c, _dispatchTaskSQL, uid)
if err != nil {
log.Error("d.rddb.Query(%s, %d) error(%v)", _dispatchTaskSQL, uid, err)
return
}
defer rows.Close()
for rows.Next() {
taskLog := &archive.TaskForLog{}
if err = rows.Scan(&taskLog.ID, &taskLog.Cid, &taskLog.Mtime); err != nil {
log.Error("rows.Scan(%s, %d) error(%v)", _dispatchTaskSQL, uid, err)
return
}
tls = append(tls, taskLog)
}
return
}
// UpDispatchTask 抢占任务
func (d *Dao) UpDispatchTask(c context.Context, uid int64, ids []int64) (rows int64, err error) {
var (
res xsql.Result
sqlstring = fmt.Sprintf(_upDispatchTaskSQL, xstr.JoinInts(ids))
)
res, err = d.db.Exec(c, sqlstring, uid)
if err != nil {
log.Error("d.db.Exec(%s %d %v) error(%v)", sqlstring, uid, err)
return
}
return res.RowsAffected()
}
// GetNextTask 获取一条任务
func (d *Dao) GetNextTask(c context.Context, uid int64) (task *archive.Task, err error) {
task = new(archive.Task)
err = d.rddb.QueryRow(c, _getNextTaskSQL, uid).Scan(&task.ID, &task.Pool, &task.Subject, &task.AdminID,
&task.Aid, &task.Cid, &task.UID, &task.State, &task.UTime, &task.CTime, &task.MTime, &task.DTime, &task.GTime, &task.Weight)
if err != nil {
if err == sql.ErrNoRows {
return nil, nil
}
log.Error("db.QueryRow(%d) error(%v)", err)
return nil, err
}
if task.GTime.TimeValue().IsZero() {
timeNow := time.Now()
_, err = d.db.Exec(c, _upTaskGtimeSQL, timeNow, task.ID)
if err != nil {
log.Error("d.db.Exec(%v,%d) error(%v)", timeNow, task.ID, err)
return nil, err
}
task.GTime = utils.NewFormatTime(timeNow)
}
return
}
// TaskByID get task
func (d *Dao) TaskByID(c context.Context, id int64) (task *archive.Task, err error) {
task = new(archive.Task)
err = d.rddb.QueryRow(c, _taskByIDSQL, id, id).Scan(&task.ID, &task.Pool, &task.Subject, &task.AdminID,
&task.Aid, &task.Cid, &task.UID, &task.State, &task.UTime, &task.CTime, &task.MTime, &task.DTime, &task.GTime, &task.PTime, &task.Weight)
if err != nil {
if err == sql.ErrNoRows {
return nil, nil
}
log.Error("db.QueryRow(%d) error(%v)", err)
return nil, err
}
return
}
// ListByCondition 从数据库获取读取任务列表
func (d *Dao) ListByCondition(c context.Context, uid int64, pn, ps int, ltype, leader int8) (tasks []*archive.Task, err error) {
var task *archive.Task
tasks = []*archive.Task{}
if !archive.IsDispatch(ltype) {
log.Error("ListByCondition listtype(%d) error", ltype)
return
}
listSQL := d.sqlHelper(uid, pn, ps, ltype, leader)
rows, err := d.rddb.Query(c, listSQL)
if err != nil {
log.Error("rddb.Query(%s) error(%v)", listSQL, err)
return
}
defer rows.Close()
for rows.Next() {
task = &archive.Task{}
err = rows.Scan(&task.ID, &task.Pool, &task.Subject, &task.AdminID,
&task.Aid, &task.Cid, &task.UID, &task.State, &task.UTime, &task.CTime, &task.MTime, &task.DTime, &task.GTime, &task.Weight)
if err != nil {
log.Error("rows.Scan(%s) error(%v)", listSQL, err)
return nil, nil
}
tasks = append(tasks, task)
}
return
}
func (d *Dao) sqlHelper(uid int64, pn, ps int, ltype int8, leader int8) string {
var (
wherecase []string
ordercase []string
limitStr string
whereStr string
orderStr string
)
limitStr = fmt.Sprintf("LIMIT %d,%d", (pn-1)*ps, ps)
if uid != 0 && (ltype != archive.TypeRealTime && leader != 1) { //实时任务或者组长不区分uid
wherecase = append(wherecase, fmt.Sprintf("uid=%d", uid))
}
ordercase = append(ordercase, "weight desc,ctime asc")
switch ltype {
case archive.TypeRealTime:
wherecase = append(wherecase, "state=0")
case archive.TypeDispatched:
wherecase = append(wherecase, "state=1 AND subject=0")
ordercase = append(ordercase, "utime desc")
case archive.TypeDelay:
wherecase = append(wherecase, "state=3")
ordercase = append(ordercase, "dtime asc")
case archive.TypeSpecial:
wherecase = append(wherecase, "state=5 AND subject=1")
ordercase = append(ordercase, "mtime asc")
case archive.TypeSpecialWait:
wherecase = append(wherecase, "state=1 AND subject=1")
ordercase = append(ordercase, "utime desc")
default:
wherecase = append(wherecase, "state=0")
}
whereStr = strings.Join(wherecase, " AND ")
orderStr = strings.Join(ordercase, ",")
return fmt.Sprintf(_listByConditionSQL, whereStr, orderStr, limitStr)
}
// GetWeightDB 从数据库读取权重配置
func (d *Dao) GetWeightDB(c context.Context, ids []int64) (mcases map[int64]*archive.TaskPriority, err error) {
var (
rows *sql.Rows
desc xsql.NullString
)
sqlstring := fmt.Sprintf(_getWeightDBSQL, xstr.JoinInts(ids))
if rows, err = d.db.Query(c, sqlstring); err != nil {
log.Error("d.db.Query(%s) error(%v)", sqlstring, err)
return
}
defer rows.Close()
mcases = make(map[int64]*archive.TaskPriority)
for rows.Next() {
tp := new(archive.TaskPriority)
if err = rows.Scan(&tp.TaskID, &tp.State, &tp.Mid, &tp.Ctime, &tp.Special, &tp.Ptime, &desc); err != nil {
log.Error("rows.Scan error(%v)", err)
return
}
if desc.Valid && len(desc.String) > 0 {
if err = json.Unmarshal([]byte(desc.String), &(tp.CfItems)); err != nil {
log.Error("json.Unmarshal error(%v)", err)
return
}
}
mcases[tp.TaskID] = tp
}
return
}
//TaskDispatchByID task by id
func (d *Dao) TaskDispatchByID(c context.Context, id int64) (tk *archive.Task, err error) {
tk = &archive.Task{}
if err = d.rddb.QueryRow(c, _taskDispatchByIDSQL, id).Scan(&tk.ID, &tk.Subject, &tk.Aid, &tk.Cid, &tk.UID, &tk.State, &tk.CTime, &tk.UTime, &tk.MTime, &tk.DTime, &tk.GTime); err != nil {
if err == sql.ErrNoRows {
err = nil
} else {
log.Error("TaskDispatchByID rows.Scan error(%v) id(%d)", err, id)
}
}
return
}

View File

@@ -0,0 +1,58 @@
package archive
import (
"context"
"testing"
. "github.com/smartystreets/goconvey/convey"
)
func Test_UserUndoneSpecTask(t *testing.T) {
Convey("UserUndoneSpecTask", t, WithDao(func(d *Dao) {
_, err := d.UserUndoneSpecTask(context.Background(), 0)
So(err, ShouldBeNil)
}))
}
func Test_GetDispatchTask(t *testing.T) {
Convey("GetDispatchTask", t, WithDao(func(d *Dao) {
_, err := d.GetDispatchTask(context.Background(), 0)
So(err, ShouldBeNil)
}))
}
func Test_UpDispatchTask(t *testing.T) {
Convey("UpDispatchTask", t, WithDao(func(d *Dao) {
_, err := d.UpDispatchTask(context.Background(), 0, []int64{1, 2})
So(err, ShouldBeNil)
}))
}
func Test_TaskByID(t *testing.T) {
Convey("TaskByID", t, WithDao(func(d *Dao) {
_, err := d.TaskByID(context.Background(), 0)
So(err, ShouldBeNil)
}))
}
func Test_ListByCondition(t *testing.T) {
var c = context.Background()
Convey("ListByCondition", t, WithDao(func(d *Dao) {
_, err := d.ListByCondition(c, 0, 0, 0, 0, 0)
So(err, ShouldBeNil)
}))
}
func Test_GetWeightDB(t *testing.T) {
Convey("GetWeightDB", t, WithDao(func(d *Dao) {
_, err := d.GetWeightDB(context.Background(), []int64{1, 2})
So(err, ShouldBeNil)
}))
}
func Test_TaskDispatchByID(t *testing.T) {
Convey("TaskDispatchByID", t, WithDao(func(d *Dao) {
_, err := d.TaskDispatchByID(context.Background(), 0)
So(err, ShouldBeNil)
}))
}

View File

@@ -0,0 +1,61 @@
package archive
import (
"context"
"fmt"
"strings"
"go-common/app/admin/main/videoup/model/archive"
"go-common/library/database/sql"
"go-common/library/log"
)
const (
_inTaskHisSQL = "INSERT INTO task_oper_history(pool,action,task_id,cid,uid,result,reason,utime) VALUE (?,?,?,?,?,?,?,?)"
_mulinTaskHisSQL = "INSERT INTO task_oper_history(action,task_id,cid,uid) VALUES "
)
// TxAddTaskHis add task oper history
func (d *Dao) TxAddTaskHis(tx *sql.Tx, pool, action int8, taskID, cid, uid, utime int64, result int16, reason string) (rows int64, err error) {
res, err := tx.Exec(_inTaskHisSQL, pool, action, taskID, cid, uid, result, reason, utime)
if err != nil {
log.Error("tx.Exec(%s) error(%v)", _inTaskHisSQL, err)
return
}
return res.RowsAffected()
}
// AddTaskHis 非事务
func (d *Dao) AddTaskHis(c context.Context, pool, action int8, taskID, cid, uid, utime int64, result int16, reason string) (rows int64, err error) {
res, err := d.db.Exec(c, _inTaskHisSQL, pool, action, taskID, cid, uid, result, reason, utime)
if err != nil {
log.Error("d.db.Exec(%s) error(%v)", _inTaskHisSQL, err)
return
}
return res.RowsAffected()
}
// MulAddTaskHis 批量插入日志
func (d *Dao) MulAddTaskHis(c context.Context, tls []*archive.TaskForLog, action int8, uid int64) (rows int64, err error) {
params := []string{}
for _, item := range tls {
var itemstr string
itemstr += fmt.Sprintf("(%d,", action)
itemstr += fmt.Sprintf("%d,", item.ID)
itemstr += fmt.Sprintf("%d,", item.Cid)
itemstr += fmt.Sprintf("%d)", uid)
params = append(params, itemstr)
}
if len(params) == 0 {
log.Warn("MulAddTaskHis empty params")
return
}
sqlsring := strings.Join(params, ",")
res, err := d.db.Exec(c, _mulinTaskHisSQL+sqlsring)
if err != nil {
log.Error("d.db.Exec(%s, %s) error(%v)", _mulinTaskHisSQL, sqlsring, err)
return
}
return res.RowsAffected()
}

View File

@@ -0,0 +1,66 @@
package archive
import (
"bytes"
"context"
"crypto/md5"
"encoding/hex"
"fmt"
"net/http"
"net/url"
"strconv"
"strings"
"time"
"go-common/library/log"
)
//SendQAVideoAdd http request to add qa video task
func (d *Dao) SendQAVideoAdd(c context.Context, task []byte) (err error) {
ctx, cancel := context.WithTimeout(c, time.Millisecond*500)
defer cancel()
res := new(struct {
Code int `json:"code"`
Data int64 `json:"data"`
})
val := url.Values{}
val.Set("appkey", d.c.HTTPClient.Write.App.Key)
val.Set("ts", strconv.FormatInt(time.Now().Unix(), 10))
val.Set("sign", sign(val, d.c.HTTPClient.Write.App.Key, d.c.HTTPClient.Write.App.Secret, true))
host := fmt.Sprintf("%s?%s", d.addQAVideoURL, val.Encode())
req, err := http.NewRequest(http.MethodPost, host, bytes.NewBuffer(task))
if err != nil {
log.Error("SendQAVideoAdd http.NewRequest error(%v), params(%s)", err, string(task))
return
}
req = req.WithContext(ctx)
req.Header.Set("Content-Type", "application/json;charset=UTF-8")
if err = d.clientW.Do(c, req, res); err != nil {
log.Error("SendQAVideoAdd d.clientW.Do error(%v)", err)
return
}
if res == nil || res.Code != 0 {
log.Error("SendQAVideoAdd request failed, response(%+v)", res)
return
}
return
}
// sign is used to sign form params by given condition.
func sign(params url.Values, appkey string, secret string, lower bool) (hexdigest string) {
data := params.Encode()
if strings.IndexByte(data, '+') > -1 {
data = strings.Replace(data, "+", "%20", -1)
}
if lower {
data = strings.ToLower(data)
}
digest := md5.Sum([]byte(data + secret))
hexdigest = hex.EncodeToString(digest[:])
return
}

View File

@@ -0,0 +1,131 @@
package archive
import (
"context"
xsql "database/sql"
"fmt"
"strings"
"time"
"go-common/app/admin/main/videoup/model/archive"
"go-common/library/database/sql"
"go-common/library/log"
"go-common/library/xstr"
)
const (
_upTaskByIDSQL = "UPDATE task_dispatch SET %s WHERE id=?"
_upGtimeByIDSQL = "UPDATE task_dispatch SET gtime=? WHERE id=?"
_releaseByIDSQL = "UPDATE task_dispatch SET subject=0,state=0,uid=0,gtime='0000-00-00 00:00:00' WHERE id=?"
_releaseMtimeSQL = "UPDATE task_dispatch SET subject=0,state=0,uid=0,gtime='0000-00-00 00:00:00' WHERE id IN (%s) AND mtime<=?"
_timeOutTaskSQL = "SELECT id,cid,subject,mtime FROM task_dispatch WHERE (state=1 AND mtime<?) OR (state=0 AND uid<>0 AND ctime<?)"
_getRelTaskSQL = "SELECT id,cid,subject,mtime,gtime FROM task_dispatch WHERE state IN (0,1) AND uid=?"
_releaseSpecialSQL = "UPDATE task_dispatch SET subject=0,state=0,uid=0 WHERE id=? AND gtime='0000-00-00 00:00:00' AND mtime<=? AND state=? AND uid=?"
)
// UpGtimeByID update gtime
func (d *Dao) UpGtimeByID(c context.Context, id int64, gtime string) (rows int64, err error) {
var res xsql.Result
if res, err = d.db.Exec(c, _upGtimeByIDSQL, gtime, id); err != nil {
log.Error("d.db.Exec(%s, %v, %d) error(%v)", _upGtimeByIDSQL, gtime, id)
return
}
return res.RowsAffected()
}
// TxUpTaskByID 更新任务状态
func (d *Dao) TxUpTaskByID(tx *sql.Tx, id int64, paras map[string]interface{}) (rows int64, err error) {
arrSet := []string{}
arrParas := []interface{}{}
for k, v := range paras {
arrSet = append(arrSet, k+"=?")
arrParas = append(arrParas, v)
}
arrParas = append(arrParas, id)
sqlstring := fmt.Sprintf(_upTaskByIDSQL, strings.Join(arrSet, ","))
res, err := tx.Exec(sqlstring, arrParas...)
if err != nil {
log.Error("tx.Exec(%v %v) error(%v)", sqlstring, arrParas, err)
return
}
return res.RowsAffected()
}
// TxReleaseByID 释放指定任务
func (d *Dao) TxReleaseByID(tx *sql.Tx, id int64) (rows int64, err error) {
res, err := tx.Exec(_releaseByIDSQL, id)
if err != nil {
log.Error("tx.Exec(%s, %d) error(%v)", _releaseByIDSQL, id, err)
return
}
return res.RowsAffected()
}
// MulReleaseMtime 批量释放任务,加时间防止释放错误
func (d *Dao) MulReleaseMtime(c context.Context, ids []int64, mtime time.Time) (rows int64, err error) {
sqlstring := fmt.Sprintf(_releaseMtimeSQL, xstr.JoinInts(ids))
res, err := d.db.Exec(c, sqlstring, mtime)
if err != nil {
log.Error("tx.Exec(%s, %v) error(%v)", sqlstring, mtime, err)
return
}
return res.RowsAffected()
}
// GetTimeOutTask 释放正在处理且超时的,释放指派后但长时间未审核的
func (d *Dao) GetTimeOutTask(c context.Context) (rts []*archive.TaskForLog, err error) {
var (
rows *sql.Rows
)
if rows, err = d.rddb.Query(c, _timeOutTaskSQL, time.Now().Add(-10*time.Minute), time.Now().Add(-80*time.Minute)); err != nil {
log.Error("d.rddb.Query(%s) error(%v)", _timeOutTaskSQL, err)
return
}
defer rows.Close()
for rows.Next() {
rt := &archive.TaskForLog{}
if err = rows.Scan(&rt.ID, &rt.Cid, &rt.Subject, &rt.Mtime); err != nil {
log.Error("rows.Scan error(%v)", err)
return
}
rts = append(rts, rt)
}
return
}
// GetRelTask 用户登出或者主动释放(分配给该用户的都释放)
func (d *Dao) GetRelTask(c context.Context, uid int64) (rts []*archive.TaskForLog, lastid int64, err error) {
var (
gtime time.Time
rows *sql.Rows
)
if rows, err = d.rddb.Query(c, _getRelTaskSQL, uid); err != nil {
log.Error("d.rddb.Query(%s, %d) error(%v)", _getRelTaskSQL, uid, err)
return
}
defer rows.Close()
for rows.Next() {
rt := &archive.TaskForLog{}
if err = rows.Scan(&rt.ID, &rt.Cid, &rt.Subject, &rt.Mtime, &gtime); err != nil {
log.Error("rows.Scan error(%v)", err)
return
}
if gtime.IsZero() {
rts = append(rts, rt)
} else {
lastid = rt.ID
}
}
return
}
// TxReleaseSpecial 延时固定时间释放的任务,需要校验释放时的状态,时间,认领人等
func (d *Dao) TxReleaseSpecial(tx *sql.Tx, mtime time.Time, state int8, taskid, uid int64) (rows int64, err error) {
res, err := tx.Exec(_releaseSpecialSQL, taskid, mtime, state, uid)
if err != nil {
log.Error("tx.Exec(%s, %d, %v, %d, %d) error(%v)", _releaseSpecialSQL, taskid, mtime, state, uid, err)
return
}
return res.RowsAffected()
}

View File

@@ -0,0 +1,70 @@
package archive
import (
"context"
"testing"
"time"
. "github.com/smartystreets/goconvey/convey"
)
func Test_UpGtimeByID(t *testing.T) {
Convey("UpGtimeByID", t, WithDao(func(d *Dao) {
_, err := d.UpGtimeByID(context.Background(), 0, "")
So(err, ShouldBeNil)
}))
}
func Test_TxUpTaskByID(t *testing.T) {
var c = context.Background()
Convey("TxUpTaskByID", t, WithDao(func(d *Dao) {
tx, _ := d.BeginTran(c)
_, err := d.TxUpTaskByID(tx, 0, map[string]interface{}{"id": 0})
So(err, ShouldBeNil)
tx.Commit()
}))
}
func Test_TxReleaseByID(t *testing.T) {
var c = context.Background()
Convey("TxReleaseByID", t, WithDao(func(d *Dao) {
tx, _ := d.BeginTran(c)
_, err := d.TxReleaseByID(tx, 0)
So(err, ShouldBeNil)
tx.Commit()
}))
}
func Test_MulReleaseMtime(t *testing.T) {
var c = context.Background()
Convey("MulReleaseMtime", t, WithDao(func(d *Dao) {
_, err := d.MulReleaseMtime(c, []int64{1, 2}, time.Now())
So(err, ShouldBeNil)
}))
}
func Test_GetTimeOutTask(t *testing.T) {
var c = context.Background()
Convey("GetTimeOutTask", t, WithDao(func(d *Dao) {
_, err := d.GetTimeOutTask(c)
So(err, ShouldBeNil)
}))
}
func Test_GetRelTask(t *testing.T) {
var c = context.Background()
Convey("GetRelTask", t, WithDao(func(d *Dao) {
_, _, err := d.GetRelTask(c, 0)
So(err, ShouldBeNil)
}))
}
func Test_TxReleaseSpecial(t *testing.T) {
var c = context.Background()
Convey("TxReleaseSpecial", t, WithDao(func(d *Dao) {
tx, _ := d.BeginTran(c)
_, err := d.TxReleaseSpecial(tx, time.Now(), 0, 0, 0)
So(err, ShouldBeNil)
tx.Commit()
}))
}

View File

@@ -0,0 +1,52 @@
package archive
import (
"context"
. "github.com/smartystreets/goconvey/convey"
"go-common/app/admin/main/videoup/model/archive"
"testing"
"time"
)
func Test_Weight(t *testing.T) {
cfg, boolean, err := archive.ParseWeightConf(&archive.WeightConf{
Radio: archive.WConfTaskID,
Ids: "1,2,3,4,5",
Rule: 0,
Weight: 15,
Desc: "测试taskid权重配置",
}, 10086, "cxf")
if err != nil || cfg == nil || !boolean {
t.Fatalf("err %+v cfg:%+v bool:%v\n", err, cfg, boolean)
}
if err = d.InWeightConf(context.TODO(), cfg); err != nil {
t.Fatal(err)
}
}
func Test_MulAddTaskHis(t *testing.T) {
row, err := d.MulAddTaskHis(context.TODO(), []*archive.TaskForLog{
&archive.TaskForLog{
ID: 1,
Cid: 2,
Subject: 0,
Mtime: time.Now(),
}, &archive.TaskForLog{
ID: 2,
Cid: 4,
Subject: 1,
Mtime: time.Now(),
},
}, archive.ActionDispatch, 10086)
if row != 2 || err != nil {
t.Fail()
}
}
func Test_TaskTooksByHalfHour(t *testing.T) {
Convey("test archive", t, WithDao(func(d *Dao) {
_, err := d.TaskTooksByHalfHour(context.Background(), time.Now().Add(-time.Hour), time.Now())
So(err, ShouldBeNil)
}))
}

View File

@@ -0,0 +1,205 @@
package archive
import (
"context"
"encoding/json"
"fmt"
"go-common/library/xstr"
"strings"
"time"
"go-common/app/admin/main/videoup/model/archive"
"go-common/library/database/sql"
"go-common/library/log"
)
const (
_getMaxWeightSQL = "SELECT MAX(weight) FROM task_dispatch WHERE state in (0,1)"
_upCwAfterAddSQL = "INSERT INTO `task_dispatch_extend` (`task_id`,`description`) VALUES(?,?) ON DUPLICATE KEY UPDATE description=?"
_inWeightConfSQL = "INSERT INTO task_weight_config(mid,rule,weight,uid,uname,radio,description) VALUES (?,?,?,?,?,?,?)" // 增
_delWeightConfSQL = "UPDATE task_weight_config SET state=1 WHERE id=?" // 软删
_listWeightConfSQL = "SELECT id,uname,state,rule,weight,mtime,description FROM task_weight_config" // 查
_WeightConfSQL = "SELECT id,description FROM task_weight_config WHERE state=0" // 查
_lwconfigHelpSQL = "SELECT t.id,t.cid,a.title,v.filename FROM task_dispatch t INNER JOIN archive a ON t.aid=a.id INNER JOIN archive_video v ON t.cid=v.cid WHERE t.id IN (%s)"
)
// GetMaxWeight 获取当前最大权重数值
func (d *Dao) GetMaxWeight(c context.Context) (max int64, err error) {
if err = d.rddb.QueryRow(c, _getMaxWeightSQL).Scan(&max); err != nil {
log.Error("d.rddb.QueryRow error(%v)", err)
err = nil
}
return
}
// UpCwAfterAdd update config weight after add config
func (d *Dao) UpCwAfterAdd(c context.Context, id int64, desc string) (rows int64, err error) {
row, err := d.db.Exec(c, _upCwAfterAddSQL, id, desc, desc)
if err != nil {
log.Error("db.Exec(%s,%d,%s,%s) error(%v)", _upCwAfterAddSQL, id, desc, desc, err)
return
}
return row.RowsAffected()
}
// InWeightConf 写入权重配置表
func (d *Dao) InWeightConf(c context.Context, mcases map[int64]*archive.WCItem) (err error) {
tx, err := d.db.Begin(c)
if err != nil {
log.Error("db.Begin() error(%v)", err)
return
}
for _, item := range mcases {
var descb []byte
if descb, err = json.Marshal(item); err != nil {
log.Error("json.Marshal(%+v) error(%v)", item, err)
tx.Rollback()
return
}
if _, err = tx.Exec(_inWeightConfSQL, item.CID, item.Rule, item.Weight, item.UID, item.Uname, item.Radio, string(descb)); err != nil {
log.Error("db.Exec(%s) error(%v)", _inWeightConfSQL, err)
tx.Rollback()
return
}
}
if err = tx.Commit(); err != nil {
log.Error("tx.Commit() error(%v)", err)
return
}
return
}
// DelWeightConf 删除权重配置
func (d *Dao) DelWeightConf(c context.Context, id int64) (rows int64, err error) {
res, err := d.db.Exec(c, _delWeightConfSQL, id)
if err != nil {
log.Error("tx.Exec(%s %d) error(%v)", _delWeightConfSQL, id, err)
return
}
return res.RowsAffected()
}
// ListWeightConf 查看权重配置表列表
func (d *Dao) ListWeightConf(c context.Context, cf *archive.Confs) (citems []*archive.WCItem, err error) {
var (
count int64
rows *sql.Rows
where string
wherecase []string
descb []byte
bt = cf.Bt.TimeValue()
et = cf.Et.TimeValue()
)
if cid := cf.Cid; cid != -1 {
wherecase = append(wherecase, fmt.Sprintf("mid=%d", cid))
}
if operator := cf.Operator; len(operator) > 0 {
wherecase = append(wherecase, fmt.Sprintf("uname='%s'", operator))
}
if rule := cf.Rule; rule != -1 {
wherecase = append(wherecase, fmt.Sprintf("rule=%d", rule))
}
wherecase = append(wherecase, fmt.Sprintf("radio=%d AND state=%d", cf.Radio, cf.State))
where = "WHERE " + strings.Join(wherecase, " AND ")
sqlstring := fmt.Sprintf("%s %s LIMIT %d,%d", _listWeightConfSQL, where, (cf.Pn-1)*cf.Ps, cf.Pn*cf.Ps)
rows, err = d.rddb.Query(c, sqlstring)
if err != nil {
log.Error("d.rddb.Query(%s) error(%v)", sqlstring, err)
return
}
defer rows.Close()
for rows.Next() {
wci := &archive.WCItem{}
if err = rows.Scan(&wci.ID, &wci.Uname, &wci.State, &wci.Rule, &wci.Weight, &wci.Mtime, &descb); err != nil {
log.Error("rows.Scan(%s) error(%v)", sqlstring, err)
return
}
if len(descb) > 0 {
if err = json.Unmarshal(descb, wci); err != nil {
log.Error("json.Unmarshal(%s) error(%v)", string(descb), err)
err = nil
continue
}
eti := wci.Et.TimeValue()
// filter time
if !et.IsZero() && !bt.IsZero() && (bt.After(wci.Mtime.TimeValue()) || et.Before(wci.Mtime.TimeValue())) {
log.Info("config expired (%+v) parse et(%v)", wci, et)
continue
}
// filter state
if cf.State == 0 && !eti.IsZero() && eti.Before(time.Now()) {
log.Info("config expired (%+v) parse et(%v)", wci, eti)
continue
}
}
if count > 50 {
break
}
count++
citems = append(citems, wci)
}
return
}
// WeightConf 所有有效的配置(用于检测是否和已有的配置冲突)
func (d *Dao) WeightConf(c context.Context) (items []*archive.WCItem, err error) {
var (
id int64
descb []byte
rows *sql.Rows
wci *archive.WCItem
)
if rows, err = d.rddb.Query(c, _WeightConfSQL); err != nil {
log.Error("d.rddb.Query(%s) error(%v)", _WeightConfSQL, err)
return
}
defer rows.Close()
for rows.Next() {
wci = new(archive.WCItem)
if err = rows.Scan(&id, &descb); err != nil {
log.Error("rows.Scan error(%v)", err)
return
}
if err = json.Unmarshal(descb, wci); err != nil {
log.Error("json.Unmarshal(%s) error(%v)", string(descb), err)
err = nil
continue
}
wci.ID = id
items = append(items, wci)
}
return
}
// LWConfigHelp 补充任务对应稿件的title和filename
func (d *Dao) LWConfigHelp(c context.Context, ids []int64) (res map[int64][]interface{}, err error) {
var (
taskid, vid int64
filename, title string
rows *sql.Rows
)
rows, err = d.rddb.Query(c, fmt.Sprintf(_lwconfigHelpSQL, xstr.JoinInts(ids)))
if err != nil {
log.Error("d.db.Query(%v) error(%v)", ids, err)
return
}
defer rows.Close()
res = make(map[int64][]interface{})
for rows.Next() {
err = rows.Scan(&taskid, &vid, &title, &filename)
if err != nil {
log.Error("rows.Scan error(%v)", err)
continue
}
res[taskid] = []interface{}{filename, title, vid}
}
return
}

View File

@@ -0,0 +1,38 @@
package archive
import (
"context"
"testing"
"go-common/app/admin/main/videoup/model/archive"
. "github.com/smartystreets/goconvey/convey"
)
func Test_GetMaxWeight(t *testing.T) {
Convey("GetMaxWeight", t, WithDao(func(d *Dao) {
_, err := d.GetMaxWeight(context.Background())
So(err, ShouldBeNil)
}))
}
func Test_DelWeightConf(t *testing.T) {
Convey("DelWeightConf", t, WithDao(func(d *Dao) {
_, err := d.DelWeightConf(context.Background(), 0)
So(err, ShouldBeNil)
}))
}
func Test_ListWeightConf(t *testing.T) {
Convey("ListWeightConf", t, WithDao(func(d *Dao) {
_, err := d.ListWeightConf(context.Background(), &archive.Confs{})
So(err, ShouldBeNil)
}))
}
func Test_LWConfigHelp(t *testing.T) {
Convey("LWConfigHelp", t, WithDao(func(d *Dao) {
_, err := d.LWConfigHelp(context.Background(), []int64{1, 2})
So(err, ShouldBeNil)
}))
}

View File

@@ -0,0 +1,32 @@
package archive
import (
"context"
"go-common/app/admin/main/videoup/model/archive"
"go-common/library/log"
)
const (
_tpsSQL = "SELECT id,pid,name,description FROM archive_type"
)
// TypeMapping is second types opposite first types.
func (d *Dao) TypeMapping(c context.Context) (tmap map[int16]*archive.Type, err error) {
rows, err := d.rddb.Query(c, _tpsSQL)
if err != nil {
log.Error("d.tpsStmt.Query error(%v)", err)
return
}
defer rows.Close()
tmap = make(map[int16]*archive.Type)
for rows.Next() {
t := &archive.Type{}
if err = rows.Scan(&t.ID, &t.PID, &t.Name, &t.Desc); err != nil {
log.Error("rows.Scan error(%v)", err)
return
}
tmap[t.ID] = t
}
return
}

View File

@@ -0,0 +1,14 @@
package archive
import (
"context"
. "github.com/smartystreets/goconvey/convey"
"testing"
)
func Test_TypeMapping(t *testing.T) {
Convey("test archive", t, WithDao(func(d *Dao) {
_, err := d.TypeMapping(context.Background())
So(err, ShouldBeNil)
}))
}

View File

@@ -0,0 +1,32 @@
package archive
import (
"context"
"go-common/library/log"
"net/url"
"strconv"
)
//GetUserCard get user card
func (d *Dao) GetUserCard(c context.Context, mid int64) (card map[string]interface{}, err error) {
params := url.Values{}
params.Set("mid", strconv.FormatInt(mid, 10))
res := new(struct {
Code int `json:"code"`
Card map[string]interface{} `json:"card"`
})
card = map[string]interface{}{}
if err = d.clientR.Get(c, d.userCardURL, "", params, res); err != nil {
log.Error("GetUserCard d.clientR.Get error(%v) mid(%d)", err, mid)
return
}
if res == nil || res.Code != 0 {
log.Warn("GetUserCard request failed res(%+v)", res)
return
}
card = res.Card
return
}

View File

@@ -0,0 +1,16 @@
package archive
import (
"context"
. "github.com/smartystreets/goconvey/convey"
"testing"
)
func TestDao_GetUserCard(t *testing.T) {
Convey("GetUserCard", t, WithDao(func(d *Dao) {
httpMock("GET", d.userCardURL).Reply(200).JSON(`{"code":0,"data":{"has":100}}`)
card, err := d.GetUserCard(context.Background(), 27515615)
t.Logf("GetUserCard error(%v)\r\n card(%+v)", err, card)
So(err, ShouldBeNil)
}))
}

View File

@@ -0,0 +1,208 @@
package archive
import (
"context"
"fmt"
"go-common/app/admin/main/videoup/model/archive"
"go-common/library/database/sql"
"go-common/library/log"
"go-common/library/xstr"
)
const (
_inVdoSQL = `INSERT INTO archive_video(filename,cid,aid,eptitle,description,src_type,duration,filesize,resolutions,playurl,failinfo,index_order,
attribute,xcode_state,status,ctime,mtime) VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)`
_upVdoSQL = "UPDATE archive_video SET eptitle=?,description=? WHERE id=?"
_upVdoIndexSQL = "UPDATE archive_video SET index_order=? WHERE id=?"
_upVdoLinkSQL = "UPDATE archive_video SET weblink=? WHERE id=?"
_upVdoStatusSQL = "UPDATE archive_video SET status=? WHERE id=?"
_upVdoAttrSQL = "UPDATE archive_video SET attribute=attribute&(~(1<<?))|(?<<?) WHERE id=?"
_videoIDSQL = `SELECT id,filename,cid,aid,eptitle,description,src_type,duration,filesize,resolutions,playurl,failinfo,
index_order,attribute,xcode_state,status,ctime,mtime FROM archive_video WHERE id=? LIMIT 1`
_videoIDsSQL = `SELECT id,filename,cid,aid,eptitle,description,src_type,duration,filesize,resolutions,playurl,failinfo,
index_order,attribute,xcode_state,status,ctime,mtime FROM archive_video WHERE id in (%s)`
_videoAidSQL = `SELECT id,filename,cid,aid,eptitle,description,src_type,duration,filesize,resolutions,playurl,failinfo,
index_order,attribute,xcode_state,status,ctime,mtime FROM archive_video WHERE aid=? and status != -100 ORDER BY index_order ASC`
_videoStatesSQL = "SELECT vr.id,vr.state AS vr_state,v.status AS v_status FROM archive_video_relation AS vr LEFT JOIN video AS v on vr.cid = v.id WHERE vr.id IN (%s)"
_aidByVidsSQL = "SELECT id,aid FROM archive_video_relation WHERE id IN (%s)"
)
// TxAddVideo insert video.
func (d *Dao) TxAddVideo(tx *sql.Tx, v *archive.Video) (vid int64, err error) {
res, err := tx.Exec(_inVdoSQL, v.Filename, v.Cid, v.Aid, v.Title, v.Desc, v.SrcType, v.Duration, v.Filesize, v.Resolutions,
v.Playurl, v.FailCode, v.Index, v.Attribute, v.XcodeState, v.Status, v.CTime, v.MTime)
if err != nil {
log.Error("d.inVideo.Exec error(%v)", err)
return
}
vid, err = res.LastInsertId()
return
}
// TxUpVideo update video by id.
func (d *Dao) TxUpVideo(tx *sql.Tx, vid int64, title, desc string) (rows int64, err error) {
res, err := tx.Exec(_upVdoSQL, title, desc, vid)
if err != nil {
log.Error("d.upVideo.Exec error(%v)", err)
return
}
rows, err = res.RowsAffected()
return
}
// TxUpVideoIndex update video index by id.
func (d *Dao) TxUpVideoIndex(tx *sql.Tx, vid int64, index int) (rows int64, err error) {
res, err := tx.Exec(_upVdoIndexSQL, index, vid)
if err != nil {
log.Error("d.upVideoIndex.Exec error(%v)", err)
return
}
rows, err = res.RowsAffected()
return
}
// TxUpVideoLink update weblink.
func (d *Dao) TxUpVideoLink(tx *sql.Tx, id int64, weblink string) (rows int64, err error) {
res, err := tx.Exec(_upVdoLinkSQL, weblink, id)
if err != nil {
log.Error("d.upVideoLink.Exec error(%v)", err)
return
}
rows, err = res.RowsAffected()
return
}
// TxUpVideoStatus update video status by id.
func (d *Dao) TxUpVideoStatus(tx *sql.Tx, id int64, status int16) (rows int64, err error) {
res, err := tx.Exec(_upVdoStatusSQL, status, id)
if err != nil {
log.Error("d.upVideoStatus.Exec error(%v)", err)
return
}
rows, err = res.RowsAffected()
return
}
// TxUpVideoAttr update video attribute by id.
func (d *Dao) TxUpVideoAttr(tx *sql.Tx, id int64, bit uint, val int32) (rows int64, err error) {
res, err := tx.Exec(_upVdoAttrSQL, bit, val, bit, id)
if err != nil {
log.Error("d.upVideoAttr.Exec() error(%v)", err)
return
}
rows, err = res.RowsAffected()
return
}
// VideoByID Video get video info by id. TODO Depreciated
func (d *Dao) VideoByID(c context.Context, id int64) (v *archive.Video, err error) {
row := d.rddb.QueryRow(c, _videoIDSQL, id)
v = &archive.Video{}
if err = row.Scan(&v.ID, &v.Filename, &v.Cid, &v.Aid, &v.Title, &v.Desc, &v.SrcType, &v.Duration, &v.Filesize, &v.Resolutions,
&v.Playurl, &v.FailCode, &v.Index, &v.Attribute, &v.XcodeState, &v.Status, &v.CTime, &v.MTime); err != nil {
if err == sql.ErrNoRows {
v = nil
err = nil
} else {
log.Error("row.Scan error(%v)", err)
}
}
return
}
// VideoByIDs Video get video info by ids. TODO Depreciated
func (d *Dao) VideoByIDs(c context.Context, id []int64) (vs []*archive.Video, err error) {
rows, err := d.rddb.Query(c, fmt.Sprintf(_videoIDsSQL, xstr.JoinInts(id)))
if err != nil {
log.Error("db.Query() error(%v)", err)
return
}
defer rows.Close()
for rows.Next() {
v := &archive.Video{}
if err = rows.Scan(&v.ID, &v.Filename, &v.Cid, &v.Aid, &v.Title, &v.Desc, &v.SrcType, &v.Duration, &v.Filesize, &v.Resolutions,
&v.Playurl, &v.FailCode, &v.Index, &v.Attribute, &v.XcodeState, &v.Status, &v.CTime, &v.MTime); err != nil {
log.Error("rows.Scan error(%v)", err)
return
}
vs = append(vs, v)
}
return
}
// VideosByAid Video get video info by aid. TODO Depreciated
func (d *Dao) VideosByAid(c context.Context, aid int64) (vs []*archive.Video, err error) {
rows, err := d.rddb.Query(c, _videoAidSQL, aid)
if err != nil {
log.Error("db.Query() error(%v)", err)
return
}
defer rows.Close()
for rows.Next() {
v := &archive.Video{}
if err = rows.Scan(&v.ID, &v.Filename, &v.Cid, &v.Aid, &v.Title, &v.Desc, &v.SrcType, &v.Duration, &v.Filesize, &v.Resolutions,
&v.Playurl, &v.FailCode, &v.Index, &v.Attribute, &v.XcodeState, &v.Status, &v.CTime, &v.MTime); err != nil {
log.Error("rows.Scan error(%v)", err)
return
}
vs = append(vs, v)
}
return
}
// VideoStateMap get archive id and state map
func (d *Dao) VideoStateMap(c context.Context, vids []int64) (sMap map[int64]int, err error) {
sMap = make(map[int64]int)
if len(vids) == 0 {
return
}
rows, err := d.rddb.Query(c, fmt.Sprintf(_videoStatesSQL, xstr.JoinInts(vids)))
if err != nil {
log.Error("db.Query() error(%v)", err)
return
}
defer rows.Close()
for rows.Next() {
a := struct {
ID int64
State int
Status int
}{}
if err = rows.Scan(&a.ID, &a.State, &a.Status); err != nil {
log.Error("rows.Scan error(%v)", err)
return
}
if a.State == -100 {
sMap[a.ID] = -100
} else {
sMap[a.ID] = a.Status
}
}
return
}
// VideoAidMap 批量通过视频id获取稿件id
func (d *Dao) VideoAidMap(c context.Context, vids []int64) (vMap map[int64]int64, err error) {
var (
aid, vid int64
)
vMap = make(map[int64]int64)
if len(vids) == 0 {
return
}
rows, err := d.rddb.Query(c, fmt.Sprintf(_aidByVidsSQL, xstr.JoinInts(vids)))
defer rows.Close()
if err != nil {
log.Error("db.Query() error(%v)", err)
return
}
for rows.Next() {
if err = rows.Scan(&vid, &aid); err != nil {
log.Error("rows.Scan error(%v)", err)
return
}
vMap[vid] = aid
}
return
}

View File

@@ -0,0 +1,21 @@
package archive
import (
"go-common/library/database/sql"
"go-common/library/log"
)
const (
_inAuditSQL = "INSERT INTO archive_video_audit (vid,aid,tid,oname,note,reason,passtime) VALUES (?,?,?,?,?,?,now()) ON DUPLICATE KEY UPDATE tid=?,oname=?,note=?,reason=?,passtime=now()"
)
// TxAddAudit insert video audit
func (d *Dao) TxAddAudit(tx *sql.Tx, aid, vid, tagID int64, oname, note, reason string) (rows int64, err error) {
res, err := tx.Exec(_inAuditSQL, vid, aid, tagID, oname, note, reason, tagID, oname, note, reason)
if err != nil {
log.Error("d.TxAddAudit.tx.Exec error(%v)", err)
return
}
rows, err = res.RowsAffected()
return
}

View File

@@ -0,0 +1,33 @@
package archive
import (
"context"
"go-common/app/admin/main/videoup/model/archive"
"go-common/library/log"
)
const (
_vhistoryByHIDSQL = "SELECT `id`, `cid`, `eptitle`, `description`, `filename`, `ctime` FROM `archive_video_edit_history` WHERE `hid`=? ORDER BY `id` ASC;"
)
//VideoHistoryByHID 根据稿件编辑历史id, 获取当时视频的用户编辑历史
func (d *Dao) VideoHistoryByHID(c context.Context, hid int64) (hs []*archive.VideoHistory, err error) {
hs = []*archive.VideoHistory{}
rows, err := d.db.Query(c, _vhistoryByHIDSQL, hid)
if err != nil {
log.Error("VideoHistoryByHID d.db.Query(hid(%d)) error(%v)", hid, err)
return
}
defer rows.Close()
for rows.Next() {
h := &archive.VideoHistory{}
if err = rows.Scan(&h.ID, &h.CID, &h.EpTitle, &h.Description, &h.Filename, &h.CTime); err != nil {
log.Error("VideoHistoryByHID rows.Scan(hid(%d)) error(%v)", hid, err)
return
}
hs = append(hs, h)
}
return
}

View File

@@ -0,0 +1,41 @@
package archive
import (
"context"
. "github.com/smartystreets/goconvey/convey"
"testing"
)
func TestDao_VideoByID(t *testing.T) {
Convey("VideoByID", t, WithDao(func(d *Dao) {
_, err := d.VideoByID(context.Background(), 1)
So(err, ShouldBeNil)
}))
}
func TestDao_VideosByAid(t *testing.T) {
Convey("VideosByAid", t, WithDao(func(d *Dao) {
_, err := d.VideosByAid(context.Background(), 1)
So(err, ShouldBeNil)
}))
}
func TestDao_VideoByIDs(t *testing.T) {
Convey("VideoByIDs", t, WithDao(func(d *Dao) {
_, err := d.VideoByIDs(context.Background(), []int64{1, 2})
So(err, ShouldBeNil)
}))
}
func Test_VideoStateMap(t *testing.T) {
Convey("test archive", t, WithDao(func(d *Dao) {
_, err := d.VideoStateMap(context.Background(), []int64{1, 2})
So(err, ShouldBeNil)
}))
}
func Test_VideoAidMap(t *testing.T) {
Convey("test archive", t, WithDao(func(d *Dao) {
_, err := d.VideoAidMap(context.Background(), []int64{1, 2})
So(err, ShouldBeNil)
}))
}

View File

@@ -0,0 +1,35 @@
package archive
import (
"context"
"go-common/app/admin/main/videoup/model/archive"
"go-common/library/database/sql"
"go-common/library/log"
)
const _watermark = "SELECT id, info, md5, mid, position, type, uname, url, state, mtime FROM watermark WHERE mid=? AND state != 0"
//Watermark get watermark
func (d *Dao) Watermark(c context.Context, mid int64) (m []*archive.Watermark, err error) {
var rows *sql.Rows
m = []*archive.Watermark{}
if rows, err = d.creativeDB.Query(c, _watermark, mid); err != nil {
log.Error("Watermark d.rddb.Query error(%v) mid(%d)", err, mid)
return
}
defer rows.Close()
for rows.Next() {
wm := new(archive.Watermark)
if err = rows.Scan(&wm.ID, &wm.Info, &wm.MD5, &wm.MID, &wm.Position, &wm.Type, &wm.Uname, &wm.URL, &wm.State, &wm.MTime); err != nil {
log.Error("Watermark rows.Scan error(%v) mid(%d)", err, mid)
return
}
if wm.State == "0" {
continue
}
m = append(m, wm)
}
return
}

View File

@@ -0,0 +1,15 @@
package archive
import (
"context"
"github.com/smartystreets/goconvey/convey"
"testing"
)
func TestDao_Watermark(t *testing.T) {
convey.Convey("水印", t, WithDao(func(d *Dao) {
m, err := d.Watermark(context.TODO(), 1)
convey.So(err, convey.ShouldBeNil)
t.Logf("watermark(%+v)", m)
}))
}

View File

@@ -0,0 +1,48 @@
package(default_visibility = ["//visibility:public"])
load(
"@io_bazel_rules_go//go:def.bzl",
"go_test",
"go_library",
)
go_test(
name = "go_default_test",
srcs = ["dao_test.go"],
embed = [":go_default_library"],
rundir = ".",
tags = ["automanaged"],
deps = [
"//app/admin/main/videoup/conf:go_default_library",
"//vendor/github.com/smartystreets/goconvey/convey:go_default_library",
],
)
go_library(
name = "go_default_library",
srcs = ["dao.go"],
importpath = "go-common/app/admin/main/videoup/dao/data",
tags = ["automanaged"],
visibility = ["//visibility:public"],
deps = [
"//app/admin/main/videoup/conf:go_default_library",
"//library/log:go_default_library",
"//library/net/http/blademaster:go_default_library",
"//library/xstr:go_default_library",
"//vendor/gopkg.in/h2non/gock.v1:go_default_library",
],
)
filegroup(
name = "package-srcs",
srcs = glob(["**"]),
tags = ["automanaged"],
visibility = ["//visibility:private"],
)
filegroup(
name = "all-srcs",
srcs = [":package-srcs"],
tags = ["automanaged"],
visibility = ["//visibility:public"],
)

View File

@@ -0,0 +1,114 @@
package data
import (
"context"
"fmt"
"net/url"
"go-common/app/admin/main/videoup/conf"
"go-common/library/log"
bm "go-common/library/net/http/blademaster"
"go-common/library/xstr"
"gopkg.in/h2non/gock.v1"
"strings"
"strconv"
)
// data.bilibili.co/recsys/related?key=XXAVID
const (
_relatedURL = "/recsys/related"
_moniOidsURL = "/x/internal/aegis/monitor/result/oids"
)
// Dao is search dao
type Dao struct {
c *bm.ClientConfig
httpClient *bm.Client
relatedURI string
moniOidsURI string
}
var (
d *Dao
)
// New new search dao
func New(c *conf.Config) *Dao {
return &Dao{
c: c.HTTPClient.Read,
httpClient: bm.NewClient(c.HTTPClient.Read),
relatedURI: c.Host.Data + _relatedURL,
moniOidsURI: c.Host.API + _moniOidsURL,
}
}
// ArchiveRelated get related archive from ai
func (d *Dao) ArchiveRelated(c context.Context, aidarr []int64) (aids string, err error) {
params := url.Values{}
params.Set("key", xstr.JoinInts(aidarr))
res := new(struct {
Code int `json:"code"`
Data []struct {
Key string `json:"key"`
Value string `json:"value"`
} `json:"data"`
})
if err = d.httpClient.Get(c, d.relatedURI, "", params, res); err != nil || res == nil {
log.Error(" d.httpClient.Get error(%v)", err)
return
}
log.Info("ArchiveRelated aids(%v) res(%+v)", aids, res)
if res.Code != 0 {
err = fmt.Errorf("data.bilibili.co错误(%d)", res.Code)
log.Error(" d.httpClient.Get res(%+v)", res)
return
}
if len(res.Data) > 0 {
for _, item := range res.Data {
if len(item.Value) > 0 {
if len(aids) == 0 {
aids = item.Value
} else {
aids += "," + item.Value
}
}
}
}
return
}
// MonitorOids 获取监控的id
func (d *Dao) MonitorOids(c context.Context, id int64) (oidMap map[int64]int, err error) {
oidMap = make(map[int64]int)
params := url.Values{}
params.Set("id", strconv.Itoa(int(id)))
res := new(struct {
Code int `json:"code"`
Data []struct {
OID int64 `json:"oid"`
Time int `json:"time"`
} `json:"data"`
})
if err = d.httpClient.Get(c, d.moniOidsURI, "", params, res); err != nil || res == nil {
log.Error("d.MonitorOids() d.httpClient.Get(%s,%v) error(%v)", d.moniOidsURI, params, err)
return
}
if res.Code != 0 {
err = fmt.Errorf("monitor return code(%d)", res.Code)
log.Error("d.MonitorOids() d.httpClient.Get(%s,%v) res(%v)", d.moniOidsURI, params, res)
return
}
for _, v := range res.Data {
oidMap[v.OID] = v.Time
}
return
}
func httpMock(method, url string) *gock.Request {
r := gock.New(url)
r.Method = strings.ToUpper(method)
d.httpClient.SetTransport(gock.DefaultTransport)
return r
}

View File

@@ -0,0 +1,56 @@
package data
import (
"context"
"flag"
"go-common/app/admin/main/videoup/conf"
"testing"
. "github.com/smartystreets/goconvey/convey"
"os"
)
func WithDao(f func(d *Dao)) func() {
return func() {
Reset(func() {})
f(d)
}
}
func TestArchiveRelated(t *testing.T) {
Convey("ArchiveRelated", t, WithDao(func(d *Dao) {
httpMock("GET", d.relatedURI).Reply(200).JSON(`{"code":0,"data":[{"key":"123","value":"123"}]}`)
_, err := d.ArchiveRelated(context.TODO(), []int64{10010, 10086})
So(err, ShouldBeNil)
}))
}
func TestMain(m *testing.M) {
if os.Getenv("DEPLOY_ENV") != "" {
flag.Set("app_id", "main.archive.videoup-admin")
flag.Set("conf_token", "gRSfeavV7kJdY9875Gf29pbd2wrdKZ1a")
flag.Set("tree_id", "2307")
flag.Set("conf_version", "docker-1")
flag.Set("deploy_env", "uat")
flag.Set("conf_host", "config.bilibili.co")
flag.Set("conf_path", "/tmp")
flag.Set("region", "sh")
flag.Set("zone", "sh001")
} else {
flag.Set("conf", "../../cmd/videoup-admin.toml")
}
flag.Parse()
if err := conf.Init(); err != nil {
panic(err)
}
d = New(conf.Conf)
os.Exit(m.Run())
}
func TestMonitorOids(t *testing.T) {
Convey("MonitorOids", t, WithDao(func(d *Dao) {
httpMock("GET", d.moniOidsURI).Reply(200).JSON(`{"code":0,"data":[{"oid":123,"time":123}]}`)
_, err := d.MonitorOids(context.TODO(), 1)
So(err, ShouldBeNil)
}))
}

View File

@@ -0,0 +1,57 @@
package(default_visibility = ["//visibility:public"])
load(
"@io_bazel_rules_go//go:def.bzl",
"go_test",
"go_library",
)
go_test(
name = "go_default_test",
srcs = [
"archive_test.go",
"dao_test.go",
],
embed = [":go_default_library"],
rundir = ".",
tags = ["automanaged"],
deps = [
"//app/admin/main/videoup/conf:go_default_library",
"//app/admin/main/videoup/model/archive:go_default_library",
"//app/admin/main/videoup/model/message:go_default_library",
"//vendor/github.com/smartystreets/goconvey/convey:go_default_library",
],
)
go_library(
name = "go_default_library",
srcs = [
"dao.go",
"redis.go",
],
importpath = "go-common/app/admin/main/videoup/dao/databus",
tags = ["automanaged"],
visibility = ["//visibility:public"],
deps = [
"//app/admin/main/videoup/conf:go_default_library",
"//app/admin/main/videoup/model/archive:go_default_library",
"//app/admin/main/videoup/model/message:go_default_library",
"//library/cache/redis:go_default_library",
"//library/conf/env:go_default_library",
"//library/log:go_default_library",
],
)
filegroup(
name = "package-srcs",
srcs = glob(["**"]),
tags = ["automanaged"],
visibility = ["//visibility:private"],
)
filegroup(
name = "all-srcs",
srcs = [":package-srcs"],
tags = ["automanaged"],
visibility = ["//visibility:public"],
)

View File

@@ -0,0 +1,50 @@
package databus
import (
"context"
. "github.com/smartystreets/goconvey/convey"
"go-common/app/admin/main/videoup/model/archive"
"go-common/app/admin/main/videoup/model/message"
"testing"
)
func TestPopMsgCache(t *testing.T) {
var (
err error
)
Convey("PopMsgCache", t, WithDao(func(d *Dao) {
_, err = d.PopMsgCache(context.Background())
So(err, ShouldBeNil)
}))
}
func TestDao_PushMultSync(t *testing.T) {
Convey("PushMultSync", t, WithDao(func(d *Dao) {
c := context.TODO()
sync := &archive.MultSyncParam{}
_, err := d.PushMultSync(c, sync)
So(err, ShouldBeNil)
}))
}
func TestDao_PopMultSync(t *testing.T) {
Convey("PopMultSync", t, WithDao(func(d *Dao) {
c := context.TODO()
_, err := d.PopMultSync(c)
So(err, ShouldBeNil)
}))
}
func TestDao_PopMsgCache(t *testing.T) {
Convey("FlowGroupPools", t, WithDao(func(d *Dao) {
c := context.TODO()
_, err := d.PopMsgCache(c)
So(err, ShouldBeNil)
}))
}
func TestDao_PushMsgCache(t *testing.T) {
Convey("FlowGroupPools", t, WithDao(func(d *Dao) {
c := context.TODO()
msg := &message.Videoup{}
err := d.PushMsgCache(c, msg)
So(err, ShouldBeNil)
}))
}

View File

@@ -0,0 +1,44 @@
package databus
import (
"context"
"go-common/app/admin/main/videoup/conf"
"go-common/library/cache/redis"
"go-common/library/log"
)
// Dao is redis dao.
type Dao struct {
c *conf.Config
// redis
redis *redis.Pool
}
var (
d *Dao
)
// New new a dao.
func New(c *conf.Config) (d *Dao) {
d = &Dao{
c: c,
redis: redis.NewPool(c.Redis.Track.Config),
}
return d
}
// Ping ping redis.
func (d *Dao) Ping(c context.Context) (err error) {
conn := d.redis.Get(c)
if _, err = conn.Do("SET", "ping", "pong"); err != nil {
log.Error("conn.Do(SET) error(%v)", err)
}
conn.Close()
return
}
//Close close redis conn
func (d *Dao) Close() {
d.redis.Close()
}

View File

@@ -0,0 +1,38 @@
package databus
import (
"flag"
. "github.com/smartystreets/goconvey/convey"
"go-common/app/admin/main/videoup/conf"
"os"
"testing"
)
func WithDao(f func(d *Dao)) func() {
return func() {
Reset(func() {})
f(d)
}
}
func TestMain(m *testing.M) {
if os.Getenv("DEPLOY_ENV") != "" {
flag.Set("app_id", "main.archive.videoup-admin")
flag.Set("conf_token", "gRSfeavV7kJdY9875Gf29pbd2wrdKZ1a")
flag.Set("tree_id", "2307")
flag.Set("conf_version", "docker-1")
flag.Set("deploy_env", "uat")
flag.Set("conf_host", "config.bilibili.co")
flag.Set("conf_path", "/tmp")
flag.Set("region", "sh")
flag.Set("zone", "sh001")
} else {
flag.Set("conf", "../../cmd/videoup-admin.toml")
}
flag.Parse()
if err := conf.Init(); err != nil {
panic(err)
}
d = New(conf.Conf)
os.Exit(m.Run())
}

View File

@@ -0,0 +1,113 @@
package databus
import (
"context"
"encoding/json"
"go-common/app/admin/main/videoup/model/archive"
"go-common/app/admin/main/videoup/model/message"
"go-common/library/cache/redis"
"go-common/library/conf/env"
"go-common/library/log"
)
const (
_multSyncList = "m_sync_list"
_prefixMsgInfo = "videoup_admin_msg"
)
// PopMsgCache get databus message from redis
func (d *Dao) PopMsgCache(c context.Context) (msg *message.Videoup, err error) {
var (
conn = d.redis.Get(c)
bs []byte
)
defer conn.Close()
if bs, err = redis.Bytes(conn.Do("LPOP", fixRedisList(_prefixMsgInfo))); err != nil {
if err == redis.ErrNil {
err = nil
} else {
log.Error("conn.Do(LPOP, %s) error(%v)", fixRedisList(_prefixMsgInfo), err)
}
return
}
msg = &message.Videoup{}
if err = json.Unmarshal(bs, msg); err != nil {
log.Error("json.Unmarshal error(%v)", err)
}
return
}
// PushMsgCache add message into redis.
func (d *Dao) PushMsgCache(c context.Context, msg *message.Videoup) (err error) {
var (
bs []byte
conn = d.redis.Get(c)
)
defer conn.Close()
if bs, err = json.Marshal(msg); err != nil {
log.Error("json.Marshal(%s) error(%v)", bs, err)
return
}
if _, err = conn.Do("RPUSH", fixRedisList(_prefixMsgInfo), bs); err != nil {
log.Error("conn.Do(RPUSH, %s) error(%v)", bs, err)
}
return
}
func fixRedisList(list string) (target string) {
if env.DeployEnv == env.DeployEnvPre {
target = "pre_" + list
} else {
target = list
}
return
}
// PushMultSync rpush stuct item to redis
func (d *Dao) PushMultSync(c context.Context, sync *archive.MultSyncParam) (ok bool, err error) {
var (
conn = d.redis.Get(c)
bs []byte
)
defer conn.Close()
if bs, err = json.Marshal(sync); err != nil {
log.Error("json.Marshal(%v) error(%v)", sync, err)
return
}
if err = conn.Send("SADD", fixRedisList(_multSyncList), bs); err != nil {
log.Error("conn.Send(SADD, %s, %s) error(%v)", fixRedisList(_multSyncList), bs, err)
return
}
if err = conn.Flush(); err != nil {
log.Error("conn.Flush error(%v)", err)
return
}
if ok, err = redis.Bool(conn.Receive()); err != nil {
log.Error("conn.Receive error(%v)", err)
}
return
}
// PopMultSync lpop stuct item from redis
func (d *Dao) PopMultSync(c context.Context) (res *archive.MultSyncParam, err error) {
var (
conn = d.redis.Get(c)
bs []byte
sync = &archive.MultSyncParam{}
)
defer conn.Close()
if bs, err = redis.Bytes(conn.Do("SPOP", fixRedisList(_multSyncList))); err != nil && err != redis.ErrNil {
log.Error("redis.Bytes(conn.Do(SPOP, %s)) error(%v)", fixRedisList(_multSyncList), err)
return
}
if len(bs) == 0 {
return
}
if err = json.Unmarshal(bs, sync); err != nil {
log.Error("json.Unmarshal(%s) error(%v)", sync, err)
return
}
res = sync
return
}

View File

@@ -0,0 +1,63 @@
package(default_visibility = ["//visibility:public"])
load(
"@io_bazel_rules_go//go:def.bzl",
"go_test",
"go_library",
)
go_test(
name = "go_default_test",
srcs = [
"auth_role_test.go",
"dao_test.go",
"httpclient_test.go",
"manager_test.go",
"reason_log_test.go",
],
embed = [":go_default_library"],
rundir = ".",
tags = ["automanaged"],
deps = [
"//app/admin/main/videoup/conf:go_default_library",
"//vendor/github.com/smartystreets/goconvey/convey:go_default_library",
],
)
go_library(
name = "go_default_library",
srcs = [
"auth_role.go",
"dao.go",
"httpclient.go",
"manager.go",
"reason_log.go",
"up.go",
],
importpath = "go-common/app/admin/main/videoup/dao/manager",
tags = ["automanaged"],
visibility = ["//visibility:public"],
deps = [
"//app/admin/main/videoup/conf:go_default_library",
"//app/admin/main/videoup/model/manager:go_default_library",
"//library/database/sql:go_default_library",
"//library/log:go_default_library",
"//library/net/http/blademaster:go_default_library",
"//library/xstr:go_default_library",
"//vendor/github.com/pkg/errors:go_default_library",
],
)
filegroup(
name = "package-srcs",
srcs = glob(["**"]),
tags = ["automanaged"],
visibility = ["//visibility:private"],
)
filegroup(
name = "all-srcs",
srcs = [":package-srcs"],
tags = ["automanaged"],
visibility = ["//visibility:public"],
)

View File

@@ -0,0 +1,22 @@
package manager
import (
"context"
"database/sql"
"go-common/library/log"
)
const (
_getUserRoleSQL = "SELECT `role` FROM `auth_role` WHERE uid = ?"
)
// GetUserRole 用户角色
func (d *Dao) GetUserRole(c context.Context, uid int64) (role int8, err error) {
err = d.managerDB.QueryRow(c, _getUserRoleSQL, uid).Scan(&role)
if err != nil && err != sql.ErrNoRows {
log.Error("d.managerDB.Query error(%v)", err)
return
}
return role, nil
}

View File

@@ -0,0 +1,17 @@
package manager
import (
"context"
. "github.com/smartystreets/goconvey/convey"
"testing"
)
func Test_GetUserRole(t *testing.T) {
var (
err error
)
Convey("GetUserRole", t, WithDao(func(d *Dao) {
_, err = d.GetUserRole(context.Background(), 1)
So(err, ShouldBeNil)
}))
}

View File

@@ -0,0 +1,39 @@
package manager
import (
"context"
"go-common/app/admin/main/videoup/conf"
"go-common/library/database/sql"
bm "go-common/library/net/http/blademaster"
)
// Dao is redis dao.
type Dao struct {
c *conf.Config
// db
managerDB *sql.DB
httpClient *bm.Client
}
// New new a dao.
func New(c *conf.Config) (d *Dao) {
d = &Dao{
c: c,
managerDB: sql.NewMySQL(c.DB.Manager),
httpClient: bm.NewClient(c.HTTPClient.Read),
}
return d
}
// Close close.
func (d *Dao) Close() {
if d.managerDB != nil {
d.managerDB.Close()
}
}
// Ping ping cpdb
func (d *Dao) Ping(c context.Context) (err error) {
return d.managerDB.Ping(c)
}

View File

@@ -0,0 +1,50 @@
package manager
import (
"context"
"flag"
. "github.com/smartystreets/goconvey/convey"
"go-common/app/admin/main/videoup/conf"
"os"
"testing"
)
var (
d *Dao
)
func WithDao(f func(d *Dao)) func() {
return func() {
Reset(func() {})
f(d)
}
}
func TestPing(t *testing.T) {
Convey("Ping", t, WithDao(func(d *Dao) {
err := d.Ping(context.TODO())
So(err, ShouldBeNil)
}))
}
func TestMain(m *testing.M) {
if os.Getenv("DEPLOY_ENV") != "" {
flag.Set("app_id", "main.archive.videoup-admin")
flag.Set("conf_token", "gRSfeavV7kJdY9875Gf29pbd2wrdKZ1a")
flag.Set("tree_id", "2307")
flag.Set("conf_version", "docker-1")
flag.Set("deploy_env", "uat")
flag.Set("conf_host", "config.bilibili.co")
flag.Set("conf_path", "/tmp")
flag.Set("region", "sh")
flag.Set("zone", "sh001")
} else {
flag.Set("conf", "../../cmd/videoup-admin.toml")
}
flag.Parse()
if err := conf.Init(); err != nil {
panic(err)
}
d = New(conf.Conf)
os.Exit(m.Run())
}

View File

@@ -0,0 +1,95 @@
package manager
import (
"context"
"net/url"
"strings"
"go-common/library/log"
"go-common/library/xstr"
)
const (
_uidsURL = "/x/admin/manager/users/uids"
_unamesURL = "/x/admin/manager/users/unames"
)
// Unames get unames by uid
func (d *Dao) Unames(c context.Context, uids []int64) (res map[int64]string, err error) {
var (
param = url.Values{}
uidStr = xstr.JoinInts(uids)
unameURI = d.c.Host.Manager + _unamesURL
)
param.Set("uids", uidStr)
var httpRes struct {
Code int `json:"code"`
Data map[int64]string `json:"data"`
Message string `json:"message"`
}
err = d.httpClient.Get(c, unameURI, "", param, &httpRes)
if err != nil {
log.Error("d.client.Get(%s) error(%v)", unameURI+"?"+param.Encode(), err)
return
}
if httpRes.Code != 0 {
log.Error("url(%s) error(%v), code(%d), message(%s)", unameURI+"?"+param.Encode(), err, httpRes.Code, httpRes.Message)
}
res = httpRes.Data
return
}
// Uids get uids by unames
func (d *Dao) Uids(c context.Context, names []string) (res map[string]int64, err error) {
var (
param = url.Values{}
namesStr = strings.Join(names, ",")
uidURI = d.c.Host.Manager + _uidsURL
)
param.Set("unames", namesStr)
var httpRes struct {
Code int `json:"code"`
Data map[string]int64 `json:"data"`
Message string `json:"message"`
}
err = d.httpClient.Get(c, uidURI, "", param, &httpRes)
if err != nil {
log.Error("d.client.Get(%s) error(%v)", uidURI+"?"+param.Encode(), err)
return
}
if httpRes.Code != 0 {
log.Error("url(%s) error(%v), code(%d), message(%s)", uidURI+"?"+param.Encode(), err, httpRes.Code, httpRes.Message)
}
res = httpRes.Data
return
}
// GetUIDByName 获取uid
func (d *Dao) GetUIDByName(c context.Context, name string) (uid int64, err error) {
var res map[string]int64
if res, err = d.Uids(c, []string{name}); err != nil {
return
}
if uid, ok := res[name]; ok {
return uid, nil
}
return
}
// GetNameByUID 获取用户名
func (d *Dao) GetNameByUID(c context.Context, uids []int64) (mcases map[int64][]interface{}, err error) {
var res map[int64]string
if res, err = d.Unames(c, uids); err != nil {
return
}
mcases = make(map[int64][]interface{})
for uid, uname := range res {
mcases[uid] = []interface{}{uname}
}
return
}

View File

@@ -0,0 +1,27 @@
package manager
import (
"context"
. "github.com/smartystreets/goconvey/convey"
"testing"
)
func Test_GetUIDByName(t *testing.T) {
var (
err error
)
Convey("GetUIDByName", t, WithDao(func(d *Dao) {
_, err = d.GetUIDByName(context.Background(), "1111")
So(err, ShouldBeNil)
}))
}
func Test_GetNameByUID(t *testing.T) {
var (
err error
)
Convey("GetUIDByName", t, WithDao(func(d *Dao) {
_, err = d.GetNameByUID(context.Background(), []int64{1, 2})
So(err, ShouldBeNil)
}))
}

View File

@@ -0,0 +1,25 @@
package manager
import (
"context"
"go-common/app/admin/main/videoup/model/manager"
"go-common/library/log"
)
const (
_userInfoSQL = "SELECT id,username,nickname,state FROM user WHERE id = ?"
)
// User get manager user by id
func (d *Dao) User(c context.Context, id int64) (u *manager.User, err error) {
var (
row = d.managerDB.QueryRow(c, _userInfoSQL, id)
)
u = &manager.User{}
if err = row.Scan(&u.ID, &u.UserName, &u.NickName, &u.State); err != nil {
log.Error("row.Scan error(%v)", err)
return
}
return
}

View File

@@ -0,0 +1,14 @@
package manager
import (
"context"
"github.com/smartystreets/goconvey/convey"
"testing"
)
func Test_User(t *testing.T) {
convey.Convey("User", t, WithDao(func(d *Dao) {
_, err := d.User(context.TODO(), 1)
convey.So(err, convey.ShouldBeNil)
}))
}

View File

@@ -0,0 +1,22 @@
package manager
import (
"context"
"go-common/library/log"
"time"
)
const (
_inLogSQL = "INSERT INTO reason_log (oid, type, category_id, reason_id, uid, typeid, ctime, mtime) VALUES (?,?,?,?,?,?,?,?)"
)
// AddReasonLog add a reason log
func (d *Dao) AddReasonLog(c context.Context, oid int64, tp int8, cateID int64, rid int64, uid int64, tid int16, ctime, mtime time.Time) (rows int64, err error) {
res, err := d.managerDB.Exec(c, _inLogSQL, oid, tp, cateID, rid, uid, tid, ctime, mtime)
if err != nil {
log.Error("d.AddReasonLog.Exec error(%v)", err)
return
}
rows, err = res.RowsAffected()
return
}

View File

@@ -0,0 +1,15 @@
package manager
import (
"context"
"github.com/smartystreets/goconvey/convey"
"testing"
"time"
)
func Test_AddReasonLog(t *testing.T) {
convey.Convey("AddReasonLog", t, WithDao(func(d *Dao) {
_, err := d.AddReasonLog(context.TODO(), 0, 0, 0, 0, 0, 0, time.Now(), time.Now())
convey.So(err, convey.ShouldBeNil)
}))
}

View File

@@ -0,0 +1,33 @@
package manager
import (
"context"
"github.com/pkg/errors"
"go-common/app/admin/main/videoup/model/manager"
"go-common/library/log"
"net/url"
)
const (
_upGroupURI = "/x/internal/uper/group/get"
)
// UpGroups get all up groups
func (d *Dao) UpGroups(c context.Context) (groups map[int64]*manager.UpGroup, err error) {
groups = make(map[int64]*manager.UpGroup)
params := url.Values{}
params.Set("state", "1")
var res *manager.UpGroupData
if err = d.httpClient.Get(c, d.c.Host.API+_upGroupURI, "", params, &res); err != nil {
log.Error("d.UpGroups() error(%v)", err)
return
}
if res == nil {
err = errors.New("error when get up groups")
return
}
for _, v := range res.Data {
groups[v.ID] = v
}
return
}

View File

@@ -0,0 +1,54 @@
package(default_visibility = ["//visibility:public"])
load(
"@io_bazel_rules_go//go:def.bzl",
"go_test",
"go_library",
)
go_test(
name = "go_default_test",
srcs = [
"dao_test.go",
"redis_test.go",
],
embed = [":go_default_library"],
rundir = ".",
tags = ["automanaged"],
deps = [
"//app/admin/main/videoup/conf:go_default_library",
"//app/admin/main/videoup/model/monitor:go_default_library",
"//vendor/github.com/smartystreets/goconvey/convey:go_default_library",
],
)
go_library(
name = "go_default_library",
srcs = [
"dao.go",
"redis.go",
],
importpath = "go-common/app/admin/main/videoup/dao/monitor",
tags = ["automanaged"],
visibility = ["//visibility:public"],
deps = [
"//app/admin/main/videoup/conf:go_default_library",
"//app/admin/main/videoup/model/monitor:go_default_library",
"//library/cache/redis:go_default_library",
"//library/log:go_default_library",
],
)
filegroup(
name = "package-srcs",
srcs = glob(["**"]),
tags = ["automanaged"],
visibility = ["//visibility:private"],
)
filegroup(
name = "all-srcs",
srcs = [":package-srcs"],
tags = ["automanaged"],
visibility = ["//visibility:public"],
)

View File

@@ -0,0 +1,30 @@
package monitor
import (
"go-common/app/admin/main/videoup/conf"
"go-common/library/cache/redis"
)
// Dao is redis dao.
type Dao struct {
c *conf.Config
redis *redis.Pool
}
var (
d *Dao
)
// New new a archive dao.
func New(c *conf.Config) (d *Dao) {
d = &Dao{
c: c,
redis: redis.NewPool(c.Redis.Secondary.Config),
}
return d
}
// Close close dao.
func (d *Dao) Close() {
d.redis.Close()
}

View File

@@ -0,0 +1,30 @@
package monitor
import (
"flag"
"go-common/app/admin/main/videoup/conf"
"os"
"testing"
)
func TestMain(m *testing.M) {
if os.Getenv("DEPLOY_ENV") != "" {
flag.Set("app_id", "main.archive.videoup-admin")
flag.Set("conf_token", "gRSfeavV7kJdY9875Gf29pbd2wrdKZ1a")
flag.Set("tree_id", "2307")
flag.Set("conf_version", "docker-1")
flag.Set("deploy_env", "uat")
flag.Set("conf_host", "config.bilibili.co")
flag.Set("conf_path", "/tmp")
flag.Set("region", "sh")
flag.Set("zone", "sh001")
} else {
flag.Set("conf", "../../cmd/videoup-admin.toml")
}
flag.Parse()
if err := conf.Init(); err != nil {
panic(err)
}
d = New(conf.Conf)
os.Exit(m.Run())
}

View File

@@ -0,0 +1,282 @@
package monitor
import (
"context"
"encoding/json"
"errors"
"fmt"
"go-common/app/admin/main/videoup/model/monitor"
"go-common/library/cache/redis"
"go-common/library/log"
"sort"
"strconv"
"time"
)
const (
FieldKeyFormat = "%d_%d_%d" //监控规则配置的Redis key中的field格式
)
// StatsResult 获取稿件停留统计
func (d *Dao) StatsResult(c context.Context, key string, conf *monitor.RuleConf) (res *monitor.Stats, err error) {
var (
conn = d.redis.Get(c)
totalC, moniC, maxT int
now = time.Now().Unix()
tFrom, tTo int64
timeCdt int64
compCdt string
ok bool
)
defer conn.Close()
if _, ok = conf.NotifyCdt["time"]; !ok {
err = errors.New("配置的 NotifyCdt 中不存在 time")
return
}
timeCdt = conf.NotifyCdt["time"].Value
compCdt = conf.NotifyCdt["time"].Comp
switch compCdt {
case monitor.CompGT:
tFrom = 0
tTo = now - timeCdt
case monitor.CompLT:
tFrom = now - timeCdt
tTo = now
default:
err = errors.New("配置的 NotifyCdt 中 comparison 不合法: " + compCdt)
return
}
if totalC, err = redis.Int(conn.Do("ZCOUNT", key, 0, now)); err != nil {
log.Error("conn.Do(ZCOUNT,%s,0,%d) error(%v)", key, now, err)
return
}
if moniC, err = redis.Int(conn.Do("ZCOUNT", key, tFrom, tTo)); err != nil {
log.Error("conn.Do(ZCOUNT,%s,%d,%d) error(%v)", key, tFrom, tTo, err)
return
}
var oldest map[string]string //进入列表最久的项
oldest, err = redis.StringMap(conn.Do("ZRANGE", key, 0, 0, "WITHSCORES"))
for _, t := range oldest {
var i int
if i, err = strconv.Atoi(t); err != nil {
return
}
maxT = int(now) - i
}
res = &monitor.Stats{
TotalCount: totalC,
MoniCount: moniC,
MaxTime: maxT,
}
return
}
// GetAllRules 获取所有规则
func (d *Dao) GetAllRules(c context.Context, all bool) (rules []*monitor.Rule, err error) {
var (
conn = d.redis.Get(c)
res = make(map[string]string)
)
defer conn.Close()
if res, err = redis.StringMap(conn.Do("HGETALL", monitor.RulesKey)); err != nil {
if err != redis.ErrNil {
log.Error("conn.Do(HGETALL, %s) error(%v)", monitor.RulesKey, err)
return
}
}
for _, v := range res {
rule := &monitor.Rule{}
if err = json.Unmarshal([]byte(v), rule); err != nil {
log.Error("json.Unmarshal(%v) error(%v)", v, err)
break
}
if !all && rule.State != 1 {
continue
}
rules = append(rules, rule)
}
return
}
// GetRules 获取业务下的规则
func (d *Dao) GetRules(c context.Context, tp, bid int8, all bool) (rules []*monitor.Rule, err error) {
if rules, err = d.GetAllRules(c, all); err != nil {
return
}
for k := 0; k < len(rules); k++ {
v := rules[k]
if v.Type != tp || v.Business != bid { //去掉非当前业务开头的配置
rules = append(rules[:k], rules[k+1:]...)
k--
continue
}
}
return
}
// SetRule 修改/添加监控规则
func (d *Dao) SetRule(c context.Context, rule *monitor.Rule) (err error) {
if rule.ID == 0 {
if rule.ID, err = d.RuleIDIncKey(c); err != nil {
return
}
}
var (
conn = d.redis.Get(c)
field = fmt.Sprintf(FieldKeyFormat, rule.Type, rule.Business, rule.ID)
bs []byte
)
defer conn.Close()
if bs, err = json.Marshal(rule); err != nil {
log.Error("json.Marshal(%v) error(%v)", rule, err)
return
}
if _, err = conn.Do("HSET", monitor.RulesKey, field, bs); err != nil {
log.Error("conn.Do(HSET,%s,%s,%s) error(%v)", monitor.RulesKey, field, bs, err)
return
}
return
}
// GetRule 获取某条监控规则
func (d *Dao) GetRule(c context.Context, tp, bid int8, id int64) (rule *monitor.Rule, err error) {
var (
conn = d.redis.Get(c)
field = fmt.Sprintf(FieldKeyFormat, tp, bid, id)
bs []byte
)
defer conn.Close()
if bs, err = redis.Bytes(conn.Do("HGET", monitor.RulesKey, field)); err != nil {
log.Error("conn.Do(HGET,%s,%s) error(%v)", monitor.RulesKey, field, err)
return
}
rule = &monitor.Rule{}
if err = json.Unmarshal(bs, rule); err != nil {
log.Error("json.Unmarshal(%v) error(%v)", bs, err)
return
}
return
}
// SetRuleState 修改监控规则的状态
func (d *Dao) SetRuleState(c context.Context, tp, bid int8, id int64, state int8) (err error) {
var (
rule *monitor.Rule
)
if rule, err = d.GetRule(c, tp, bid, id); err != nil {
return
}
rule.State = state
if err = d.SetRule(c, rule); err != nil {
return
}
return
}
// RuleIDIncKey 自增配置id
func (d *Dao) RuleIDIncKey(c context.Context) (id int64, err error) {
var (
conn = d.redis.Get(c)
)
defer conn.Close()
if id, err = redis.Int64(conn.Do("INCR", monitor.RuleIDIncKey)); err != nil {
log.Error("conn.Do(INCR,%s) error(%v)", monitor.RuleIDIncKey, err)
}
return
}
// BusStatsKeys 获取某业务统计的所有keys
func (d *Dao) BusStatsKeys(c context.Context, bid int8) (prefix string, keys []string, err error) {
var (
conf *monitor.KeyConf
ok bool
)
if conf, ok = monitor.RedisKeyConf[bid]; !ok {
err = errors.New("业务redis key配置不存在")
log.Error("d.BusStatsKeys(%d) error(%v)", bid, err)
return
}
prefix = fmt.Sprintf(monitor.BusPrefix, bid)
//TODO 递归实现
if bid == monitor.BusVideo {
for _, v := range conf.KFields["state"] {
key := prefix + fmt.Sprintf(monitor.SuffixVideo, v)
keys = append(keys, key)
}
} else if bid == monitor.BusArc {
for _, round := range conf.KFields["round"] {
for _, state := range conf.KFields["state"] {
key := prefix + fmt.Sprintf(monitor.SuffixArc, round, state)
keys = append(keys, key)
}
}
}
return
}
// StayOids 获取多个key 中的滞留oid
func (d *Dao) StayOids(c context.Context, rule *monitor.Rule, keys []string) (oidMap map[int64]int, total int, err error) {
var (
conn = d.redis.Get(c)
intMap map[string]int
min, max int64
now = time.Now().Unix()
)
defer conn.Close()
oidMap = make(map[int64]int)
intMap = make(map[string]int)
if _, ok := rule.RuleConf.NotifyCdt["time"]; !ok {
log.Error("StayOids(%+v) Rule配置中NotifyCdt 没有time", *rule)
err = errors.New(fmt.Sprintf("Rule(%d) NotifyCdt Error: no time", rule.ID))
return
}
timeConf := rule.RuleConf.NotifyCdt["time"]
switch timeConf.Comp {
case monitor.CompGT:
min = 0
max = now - timeConf.Value
case monitor.CompLT:
min = now - timeConf.Value
max = now
default:
log.Error("StayOids(%+v) Rule配置NotifyCdt中time的表达式错误", *rule)
err = errors.New(fmt.Sprintf("Rule(%d) NotifyCdt Error: unknown time comp", rule.ID))
return
}
//key排序
sort.Strings(keys)
//计算count 翻页
for _, key := range keys {
count := 0
if count, err = redis.Int(conn.Do("ZCOUNT", key, min, max)); err != nil {
log.Error("redis.Int(conn.Do(\"ZCOUNT\", %s, %d, %d)) error(%v)", key, min, max, err)
return
}
total += count
if intMap, err = redis.IntMap(conn.Do("ZRANGEBYSCORE", key, min, max, "WITHSCORES")); err != nil {
log.Error("redis.IntMap(conn.Do(\"ZRANGEBYSCORE\", %s, %d, %d, \"WITHSCORES\")) error(%v)", key, min, max, err)
return
}
for k, v := range intMap {
oid := 0
if oid, err = strconv.Atoi(k); err != nil {
log.Error("strconv.Atoi(%s) error(%v)", k, err)
}
oidMap[int64(oid)] = v
}
}
return
}
// RemMonitorStats remove stay stats
func (d *Dao) RemMonitorStats(c context.Context, key string, oid int64) (err error) {
var (
conn = d.redis.Get(c)
)
defer conn.Close()
if _, err = conn.Do("ZREM", key, oid); err != nil {
log.Error("conn.Do(ZADD, %s, %d) error(%v)", key, oid, err)
}
return
}

View File

@@ -0,0 +1,127 @@
package monitor
import (
"context"
"go-common/app/admin/main/videoup/model/monitor"
"testing"
. "github.com/smartystreets/goconvey/convey"
)
func TestDao_GetRules(t *testing.T) {
Convey("GetRules", t, func() {
rules, err := d.GetRules(context.TODO(), 1, 1, true)
So(err, ShouldBeNil)
So(rules, ShouldNotBeNil)
})
}
func TestDao_SetRule(t *testing.T) {
Convey("SetRule", t, func() {
rule := &monitor.Rule{
ID: 1,
Type: 1,
Business: 1,
Name: "一审阶段",
State: 1,
RuleConf: &monitor.RuleConf{
Name: "一审长耗时",
MoniCdt: map[string]struct {
Comp string `json:"comparison"`
Value int64 `json:"value"`
}{
"state": {
Comp: "=",
Value: -1,
},
},
NotifyCdt: map[string]struct {
Comp string `json:"comparison"`
Value int64 `json:"value"`
}{
"count": {
Comp: ">",
Value: 10,
},
"time": {
Comp: ">",
Value: 10,
},
},
Notify: struct {
Way int8 `json:"way"`
Member []string `json:"member"`
}{
Way: monitor.NotifyTypeEmail,
Member: []string{"liusiming@bilibili.com"},
},
},
}
/*rule := &monitor.Rule{
ID: 6,
Type: 1,
Business: 2,
Name: "二审阶段",
State: 1,
RuleConf: &monitor.RuleConf{
Name: "二审长耗时",
MoniCdt: map[string]struct {
Comp string `json:"comparison"`
Value int64 `json:"value"`
}{
"state": {
Comp: "=",
Value: -1,
},
"round": {
Comp: "=",
Value: 10,
},
},
NotifyCdt: map[string]struct {
Comp string `json:"comparison"`
Value int64 `json:"value"`
}{
"count": {
Comp: ">",
Value: 10,
},
"time": {
Comp: ">",
Value: 10,
},
},
Notify: struct {
Way int8 `json:"way"`
Member []string `json:"member"`
}{
Way: monitor.NotifyTypeEmail,
Member: []string{"liusiming@bilibili.com"},
},
},
}*/
err := d.SetRule(context.TODO(), rule)
So(err, ShouldBeNil)
})
}
func TestDao_SetRuleState(t *testing.T) {
Convey("SetRuleState", t, func() {
err := d.SetRuleState(context.TODO(), 1, 1, 1, monitor.RuleStateOK)
So(err, ShouldBeNil)
})
}
func TestDao_BusKeys(t *testing.T) {
Convey("BusKeys", t, func() {
_, keys, err := d.BusStatsKeys(context.TODO(), 1)
So(err, ShouldBeNil)
So(keys, ShouldNotBeNil)
})
}
func TestDao_GetAllRules(t *testing.T) {
Convey("BusKeys", t, func() {
_, err := d.GetAllRules(context.Background(), true)
So(err, ShouldBeNil)
})
}

View File

@@ -0,0 +1,46 @@
package(default_visibility = ["//visibility:public"])
load(
"@io_bazel_rules_go//go:def.bzl",
"go_test",
"go_library",
)
go_test(
name = "go_default_test",
srcs = ["dao_test.go"],
embed = [":go_default_library"],
rundir = ".",
tags = ["automanaged"],
deps = [
"//app/admin/main/videoup/conf:go_default_library",
"//vendor/github.com/smartystreets/goconvey/convey:go_default_library",
],
)
go_library(
name = "go_default_library",
srcs = ["dao.go"],
importpath = "go-common/app/admin/main/videoup/dao/music",
tags = ["automanaged"],
visibility = ["//visibility:public"],
deps = [
"//app/admin/main/videoup/conf:go_default_library",
"//library/database/orm:go_default_library",
"//vendor/github.com/jinzhu/gorm:go_default_library",
],
)
filegroup(
name = "package-srcs",
srcs = glob(["**"]),
tags = ["automanaged"],
visibility = ["//visibility:private"],
)
filegroup(
name = "all-srcs",
srcs = [":package-srcs"],
tags = ["automanaged"],
visibility = ["//visibility:public"],
)

View File

@@ -0,0 +1,52 @@
package music
import (
"context"
"go-common/app/admin/main/videoup/conf"
"go-common/library/database/orm"
"github.com/jinzhu/gorm"
)
// Dao struct user of Dao.
type Dao struct {
c *conf.Config
// db
DB *gorm.DB
}
var (
d *Dao
)
// New create a instance of Dao and return.
func New(c *conf.Config) (d *Dao) {
d = &Dao{
// conf
c: c,
// db
DB: orm.NewMySQL(c.ORMArchive),
}
d.initORM()
return
}
func (d *Dao) initORM() {
d.DB.LogMode(true)
}
// Ping check connection of db , mc.
func (d *Dao) Ping(c context.Context) (err error) {
if d.DB != nil {
err = d.DB.DB().PingContext(c)
}
return
}
// Close close connection of db , mc.
func (d *Dao) Close() {
if d.DB != nil {
d.DB.Close()
}
}

View File

@@ -0,0 +1,47 @@
package music
import (
"context"
"flag"
"go-common/app/admin/main/videoup/conf"
"testing"
. "github.com/smartystreets/goconvey/convey"
"os"
)
func WithDao(f func(d *Dao)) func() {
return func() {
Reset(func() {})
f(d)
}
}
func TestPing(t *testing.T) {
Convey("Ping", t, WithDao(func(d *Dao) {
err := d.Ping(context.TODO())
So(err, ShouldBeNil)
}))
}
func TestMain(m *testing.M) {
if os.Getenv("DEPLOY_ENV") != "" {
flag.Set("app_id", "main.archive.videoup-admin")
flag.Set("conf_token", "gRSfeavV7kJdY9875Gf29pbd2wrdKZ1a")
flag.Set("tree_id", "2307")
flag.Set("conf_version", "docker-1")
flag.Set("deploy_env", "uat")
flag.Set("conf_host", "config.bilibili.co")
flag.Set("conf_path", "/tmp")
flag.Set("region", "sh")
flag.Set("zone", "sh001")
} else {
flag.Set("conf", "../../cmd/videoup-admin.toml")
}
flag.Parse()
if err := conf.Init(); err != nil {
panic(err)
}
d = New(conf.Conf)
os.Exit(m.Run())
}

View File

@@ -0,0 +1,54 @@
package(default_visibility = ["//visibility:public"])
load(
"@io_bazel_rules_go//go:def.bzl",
"go_test",
"go_library",
)
go_test(
name = "go_default_test",
srcs = [
"dao_test.go",
"oversea_test.go",
],
embed = [":go_default_library"],
rundir = ".",
tags = ["automanaged"],
deps = [
"//app/admin/main/videoup/conf:go_default_library",
"//vendor/github.com/smartystreets/goconvey/convey:go_default_library",
],
)
go_library(
name = "go_default_library",
srcs = [
"dao.go",
"oversea.go",
],
importpath = "go-common/app/admin/main/videoup/dao/oversea",
tags = ["automanaged"],
visibility = ["//visibility:public"],
deps = [
"//app/admin/main/videoup/conf:go_default_library",
"//app/admin/main/videoup/model/oversea:go_default_library",
"//library/database/orm:go_default_library",
"//library/log:go_default_library",
"//vendor/github.com/jinzhu/gorm:go_default_library",
],
)
filegroup(
name = "package-srcs",
srcs = glob(["**"]),
tags = ["automanaged"],
visibility = ["//visibility:private"],
)
filegroup(
name = "all-srcs",
srcs = [":package-srcs"],
tags = ["automanaged"],
visibility = ["//visibility:public"],
)

View File

@@ -0,0 +1,42 @@
package manager
import (
"context"
"github.com/jinzhu/gorm"
"go-common/app/admin/main/videoup/conf"
"go-common/library/database/orm"
)
// Dao is redis dao.
type Dao struct {
c *conf.Config
// db
OverseaDB *gorm.DB
}
var (
d *Dao
)
// New new a dao.
func New(c *conf.Config) (d *Dao) {
d = &Dao{
c: c,
OverseaDB: orm.NewMySQL(c.DB.Oversea),
}
return d
}
// Close close.
func (d *Dao) Close() {
if d.OverseaDB != nil {
d.OverseaDB.Close()
}
}
// Ping ping cpdb
func (d *Dao) Ping(c context.Context) (err error) {
err = d.OverseaDB.DB().PingContext(c)
return
}

View File

@@ -0,0 +1,38 @@
package manager
import (
"flag"
. "github.com/smartystreets/goconvey/convey"
"go-common/app/admin/main/videoup/conf"
"os"
"testing"
)
func WithDao(f func(d *Dao)) func() {
return func() {
Reset(func() {})
f(d)
}
}
func TestMain(m *testing.M) {
if os.Getenv("DEPLOY_ENV") != "" {
flag.Set("app_id", "main.archive.videoup-admin")
flag.Set("conf_token", "gRSfeavV7kJdY9875Gf29pbd2wrdKZ1a")
flag.Set("tree_id", "2307")
flag.Set("conf_version", "docker-1")
flag.Set("deploy_env", "uat")
flag.Set("conf_host", "config.bilibili.co")
flag.Set("conf_path", "/tmp")
flag.Set("region", "sh")
flag.Set("zone", "sh001")
} else {
flag.Set("conf", "../../cmd/videoup-admin.toml")
}
flag.Parse()
if err := conf.Init(); err != nil {
panic(err)
}
d = New(conf.Conf)
os.Exit(m.Run())
}

View File

@@ -0,0 +1,258 @@
package manager
import (
"context"
"go-common/app/admin/main/videoup/model/oversea"
"go-common/library/log"
)
// UpPolicyRelation update or into archive_relation.
func (d *Dao) UpPolicyRelation(c context.Context, aid, gid int64) (relation *oversea.ArchiveRelation, err error) {
var assign = map[string]interface{}{
"policy_id": gid,
"aid": aid,
}
relation = &oversea.ArchiveRelation{}
if err = d.OverseaDB.Where("aid=?", aid).Assign(assign).FirstOrCreate(&relation).Error; err != nil {
log.Error("d.UpPolicyRelation.FirstOrCreate error(%v)", err)
return
}
return
}
// PolicyRelation get archive policy group relation.
func (d *Dao) PolicyRelation(c context.Context, aid int64) (relation *oversea.ArchiveRelation, err error) {
relation = &oversea.ArchiveRelation{}
res := d.OverseaDB.Where("aid=?", aid).Find(&relation)
if res.RecordNotFound() {
relation = nil
return
}
err = res.Error
return
}
// PolicyGroups get policy group
func (d *Dao) PolicyGroups(c context.Context, uid, id int64, gType, state int8, count, page int64, order, sort string) (groups []*oversea.PolicyGroup, total int64, err error) {
var (
db = d.OverseaDB.Model(&groups)
orders = map[string]int{
"mtime": 1,
}
)
db = db.Where("is_global=?", 1)
if uid > 0 {
db = db.Where("uid=?", uid)
}
if id > 0 {
db = db.Where("id=?", id)
}
if gType > 0 {
db = db.Where("type=?", gType)
}
if state >= 0 {
db = db.Where("state=?", state)
}
if order != "" && sort != "" {
if _, ok := orders[order]; ok {
db = db.Order(order + " " + sort)
}
}
if count <= 0 {
count = 20
}
if page <= 0 {
page = 1
}
db.Count(&total)
db = db.Offset((page - 1) * count)
db = db.Limit(count)
if err = db.Find(&groups).Error; err != nil {
log.Error("d.PolicyGroups.Find error(%v)", err)
}
err = d.ItemsByGroup(groups)
return
}
// PolicyGroupsByIds get policy groups by ids
func (d *Dao) PolicyGroupsByIds(c context.Context, ids []int64) (groups []*oversea.PolicyGroup, err error) {
if err = d.OverseaDB.Where(ids).Find(&groups).Error; err != nil {
log.Error("d.PolicyGroupsByIds.Find error(%v)", err)
}
return
}
// PolicyGroup get policy group by id
func (d *Dao) PolicyGroup(c context.Context, id int64) (group *oversea.PolicyGroup, err error) {
var (
groups []*oversea.PolicyGroup
)
group = &oversea.PolicyGroup{}
res := d.OverseaDB.Where("id=?", id).Find(&group)
if res.RecordNotFound() {
group = nil
return
}
err = res.Error
if err != nil {
log.Error("d.PolicyGroup.Find error(%v)", err)
return
}
groups = append(groups, group)
if err = d.ItemsByGroup(groups); err != nil {
log.Error("d.ItemsByGroup.Find error(%v)", err)
return
}
if len(groups) != 0 {
group = groups[0]
}
return
}
// ArchiveGroups get archive's policy groups
func (d *Dao) ArchiveGroups(c context.Context, aid int64) (groups []*oversea.PolicyGroup, err error) {
var (
db = d.OverseaDB
relations []*oversea.ArchiveRelation
gids []int64
)
err = db.Where("aid=?", aid).Find(&relations).Error
if err != nil {
log.Error("d.ArchiveGroups.Find error(%v)", err)
return
}
gids = make([]int64, len(relations))
for i, v := range relations {
gids[i] = v.GroupID
}
db = d.OverseaDB
err = db.Where(gids).Find(&groups).Error
if err != nil {
log.Error("d.ArchiveGroups.Find error(%v)", err)
return
}
err = d.ItemsByGroup(groups)
return
}
// ItemsByGroup get policy items into group
func (d *Dao) ItemsByGroup(groups []*oversea.PolicyGroup) (err error) {
var (
items []*oversea.PolicyItem
itemMap = make(map[int64][]*oversea.PolicyItem)
)
gids := make([]int64, len(groups))
for i, v := range groups {
gids[i] = v.ID
}
db := d.OverseaDB
err = db.Where("group_id in (?) and state=?", gids, oversea.StateOK).Find(&items).Error
if err != nil {
log.Error("d.ArchiveGroups.Find error(%v)", err)
return
}
for _, v := range items {
itemMap[v.GroupID] = append(itemMap[v.GroupID], v)
}
for i, g := range groups {
if _, ok := itemMap[g.ID]; !ok {
groups[i].Items = make([]*oversea.PolicyItem, 0)
continue
}
groups[i].Items = itemMap[g.ID]
}
return
}
// AddPolicyGroup add policy group
func (d *Dao) AddPolicyGroup(c context.Context, group *oversea.PolicyGroup) (err error) {
var (
db = d.OverseaDB
)
group.IsGlobal = 1
group.Aid = 0
group.State = 1
err = db.Create(&group).Error
if err != nil {
group = &oversea.PolicyGroup{}
log.Error("d.AddPolicyGroup.Create error(%v)", err)
}
return
}
// UpdatePolicyGroup update policy group
func (d *Dao) UpdatePolicyGroup(c context.Context, id int64, attrs map[string]interface{}) (err error) {
var (
db = d.OverseaDB
)
err = db.Model(&oversea.PolicyGroup{}).Where("id=?", id).Update(attrs).Error
if err != nil {
log.Error("d.UpdatePolicyGroup.Update error(%v)", err)
}
return
}
// UpdatePolicyGroups multi update policy groups
func (d *Dao) UpdatePolicyGroups(c context.Context, ids []int64, attrs map[string]interface{}) (err error) {
var (
db = d.OverseaDB
)
err = db.Model(&oversea.PolicyGroup{}).Where(ids).Update(attrs).Error
if err != nil {
log.Error("d.UpdatePolicyGroup.Update error(%v)", err)
}
return
}
// PolicyItems get policy items
func (d *Dao) PolicyItems(c context.Context, gid int64) (items []*oversea.PolicyItem, err error) {
err = d.OverseaDB.Where("group_id=? AND state=?", gid, oversea.StateOK).Find(&items).Error
return
}
// ZoneIDs get zone ids by area ids
func (d *Dao) ZoneIDs(c context.Context, aids []int64) (ids []int64, err error) {
var items []*oversea.Zone
if err = d.OverseaDB.Where(aids).Find(&items).Pluck("zone_id", &ids).Error; err != nil {
log.Error("d.ZoneIDs.Find error(%v)", err)
}
return
}
// AddPolicies add policy items
func (d *Dao) AddPolicies(c context.Context, policies []oversea.PolicyItem) (err error) {
var assign = map[string]interface{}{
"group_id": 0,
"play_auth": 0,
"down_auth": 0,
"area_id": "",
"zone_id": "",
}
for _, v := range policies {
if v.ID > 0 {
assign["group_id"] = v.GroupID
assign["play_auth"] = v.PlayAuth
assign["down_auth"] = v.DownAuth
assign["area_id"] = v.AreaID
assign["zone_id"] = v.ZoneID
err = d.OverseaDB.Model(&v).Where("id=?", v.ID).Update(assign).Error
} else {
err = d.OverseaDB.Create(&v).Error
}
if err != nil {
log.Error("d.AddPolicies.FirstOrCreate error(%v)", err)
return
}
}
return
}
// DelPolices soft delete policy items
func (d *Dao) DelPolices(c context.Context, gid int64, ids []int64) (err error) {
err = d.OverseaDB.Debug().Model(&oversea.PolicyItem{}).Where(ids).Where("group_id=?", gid).Update("state", oversea.StateDeleted).Error
if err != nil {
log.Error("d.DelPolices.Update error(%v)", err)
}
return
}

Some files were not shown because too many files have changed in this diff Show More