Create & Init Project...

This commit is contained in:
2019-04-22 18:49:16 +08:00
commit fc4fa37393
25440 changed files with 4054998 additions and 0 deletions

View File

@@ -0,0 +1,96 @@
package(default_visibility = ["//visibility:public"])
load(
"@io_bazel_rules_go//go:def.bzl",
"go_test",
"go_library",
)
go_test(
name = "go_default_test",
srcs = [
"archive_test.go",
"dao_test.go",
"delay_test.go",
"desc_format_test.go",
"flow_test.go",
"history_test.go",
"netsafe_test.go",
"new_video_test.go",
"porder_test.go",
"redis_test.go",
"report_test.go",
"staff_test.go",
"type_test.go",
"video_test.go",
],
embed = [":go_default_library"],
tags = ["automanaged"],
deps = [
"//app/service/main/videoup/conf:go_default_library",
"//app/service/main/videoup/model/archive:go_default_library",
"//library/database/sql:go_default_library",
"//library/time:go_default_library",
"//vendor/github.com/bouk/monkey:go_default_library",
"//vendor/github.com/davecgh/go-spew/spew:go_default_library",
"//vendor/github.com/smartystreets/goconvey/convey:go_default_library",
],
)
go_library(
name = "go_default_library",
srcs = [
"archive.go",
"dao.cache.go",
"dao.go",
"delay.go",
"desc_format.go",
"flow.go",
"forbid.go",
"history.go",
"mc.cache.go",
"netsafe.go",
"new_video.go",
"porder.go",
"redis.go",
"relation.go",
"report.go",
"staff.go",
"staff_apply.go",
"type.go",
"video.go",
],
importpath = "go-common/app/service/main/videoup/dao/archive",
tags = ["automanaged"],
visibility = ["//visibility:public"],
deps = [
"//app/service/main/videoup/conf:go_default_library",
"//app/service/main/videoup/model/archive:go_default_library",
"//library/cache/memcache:go_default_library",
"//library/cache/redis:go_default_library",
"//library/database/sql:go_default_library",
"//library/log:go_default_library",
"//library/net/metadata:go_default_library",
"//library/stat/prom:go_default_library",
"//library/sync/pipeline/fanout:go_default_library",
"//library/time:go_default_library",
"//library/xstr:go_default_library",
"//vendor/github.com/dgryski/go-farm:go_default_library",
"//vendor/github.com/pkg/errors:go_default_library",
"//vendor/golang.org/x/sync/singleflight:go_default_library",
],
)
filegroup(
name = "package-srcs",
srcs = glob(["**"]),
tags = ["automanaged"],
visibility = ["//visibility:private"],
)
filegroup(
name = "all-srcs",
srcs = [":package-srcs"],
tags = ["automanaged"],
visibility = ["//visibility:public"],
)

View File

@@ -0,0 +1,462 @@
package archive
import (
"context"
"database/sql"
"fmt"
"strconv"
"strings"
"time"
"go-common/app/service/main/videoup/model/archive"
xsql "go-common/library/database/sql"
"go-common/library/log"
"go-common/library/xstr"
)
const (
// insert
_inArcSQL = "INSERT INTO archive (mid,typeid,title,author,cover,content,tag,attribute,copyright,state,round,pubtime,ctime,mtime,reject_reason) VALUES (?,?,?,?,?,?,?,?,?,-30,0,?,?,?,'')"
_inAddSQL = "INSERT INTO archive_addit (aid,mission_id,up_from,ipv6,source,order_id,flow_id,advertiser,flow_remark,description,desc_format_id,dynamic) VALUES (?,?,?,?,?,?,?,?,?,?,?,?) ON DUPLICATE KEY UPDATE mission_id=?,source=?,order_id=?,flow_id=?,advertiser=?,flow_remark=?,description=?,desc_format_id=?,dynamic=?"
_inAddRdrSQL = "INSERT INTO archive_addit (aid,redirect_url) VALUES (?,?) ON DUPLICATE KEY UPDATE redirect_url=?"
_inAddReaSQL = "INSERT INTO archive_addit (aid,recheck_reason) VALUES (?,?) ON DUPLICATE KEY UPDATE recheck_reason=?"
_inAddMissionSQL = "INSERT INTO archive_addit (aid,mission_id) VALUES (?,?) ON DUPLICATE KEY UPDATE mission_id=?"
// update
_upArcSQL = "UPDATE archive SET typeid=?,title=?,cover=?,content=?,tag=?,copyright=? WHERE id=?"
_upArcMidSQL = "UPDATE archive SET mid=?,state=? WHERE id=?"
_upArcStateSQL = "UPDATE archive SET state=? WHERE id=?"
_upArcAttrSQL = "UPDATE archive SET attribute=attribute&(~(1<<?))|(?<<?) WHERE id=?"
_upTagSQL = "UPDATE archive SET tag=? WHERE id=?"
// select
_arcSQL = "SELECT id,mid,typeid,copyright,author,title,cover,reject_reason,content,tag,duration,round,attribute,access,state,pubtime,ctime,mtime FROM archive WHERE id=?"
_arcAddSQL = "SELECT aid,mission_id,from_ip,ipv6,up_from,recheck_reason,redirect_url,source,order_id,flow_id,advertiser,flow_remark,description,desc_format_id,dynamic FROM archive_addit WHERE aid=?"
_arcMidsSQL = "SELECT id,mid FROM archive WHERE id IN (%s)"
_arcUpAllSQL = `SELECT id FROM archive WHERE mid = ? AND state != -100 ORDER BY id DESC LIMIT ?,?`
_arcUpOpenSQL = `SELECT id FROM archive WHERE mid = ? AND (state >= 0 OR state = -6) ORDER BY id DESC LIMIT ?,?`
_arcUpUnOpenSQL = `SELECT id FROM archive WHERE mid = ? AND state < 0 AND state != -100 AND state != -6 ORDER BY id DESC LIMIT ?,?`
_arcUpAllCountSQL = "SELECT count(*) FROM archive WHERE mid = ? AND state != -100"
_arcUpOpenCountSQL = "SELECT count(*) FROM archive WHERE mid = ? AND (state >= 0 OR state = -6)"
_arcUpUnOpenCountSQL = "SELECT count(*) FROM archive WHERE mid = ? AND state < 0 AND state != -100 AND state != -6"
_simpleArcSQL = "SELECT id,title,mid FROM archive WHERE id = ?"
_getRecoSQL = "SELECT reco_aid FROM archive_recommend WHERE state= 0 and aid=? ORDER BY ctime asc"
_rejectArcsSQL = "SELECT id,mid,title,reject_reason,mtime FROM archive WHERE mid = ? AND state = ? AND mtime > ? ORDER BY mtime DESC LIMIT ?,?"
_rejectArcsCountSQL = "SELECT count(*) FROM archive WHERE mid = ? AND state = ? AND mtime > ?"
_delRecoSQL = "UPDATE archive_recommend SET state=1 WHERE aid=?"
_batchAddRecoSQL = "INSERT IGNORE INTO archive_recommend (aid,reco_aid) VALUES %s on duplicate key update state=0"
//POI 元数据
_arcPOISQL = "SELECT data from archive_biz WHERE aid=? AND type= ?"
_arcVoteSQL = "SELECT data from archive_biz WHERE aid=? AND type= 2"
_inADDBizSQL = "INSERT INTO archive_biz (aid,type,data) VALUES (?,?,?) ON DUPLICATE KEY UPDATE data=?"
)
// TxAddArchive insert archive.
func (d *Dao) TxAddArchive(tx *xsql.Tx, a *archive.Archive) (aid int64, err error) {
var now = time.Now()
res, err := tx.Exec(_inArcSQL, a.Mid, a.TypeID, a.Title, a.Author, a.Cover, a.Desc, a.Tag, a.Attribute, a.Copyright, now, now, now)
if err != nil {
log.Error("d.inArc.Exec() error(%v)", err)
return
}
if aid, err = res.LastInsertId(); err != nil {
log.Error("res.LastInsertId() error(%v)", err)
return
}
if strings.Contains(xstr.JoinInts(d.c.KeepArc.Aids)+",", strconv.FormatInt(aid, 10)+",") {
keepAid := aid
aid, err = d.TxAddArchive(tx, a)
a.State = archive.StateForbidUpDelete
a.Mid = d.c.KeepArc.Mid // 内部归属mid
a.Aid = keepAid // 内部保留aid
d.TxUpArchiveMid(tx, a)
return
}
return
}
// TxUpArchive update archive.
func (d *Dao) TxUpArchive(tx *xsql.Tx, a *archive.Archive) (rows int64, err error) {
res, err := tx.Exec(_upArcSQL, a.TypeID, a.Title, a.Cover, a.Desc, a.Tag, a.Copyright, a.Aid)
if err != nil {
log.Error("d.upArc.Exec() error(%v)", err)
return
}
rows, err = res.RowsAffected()
return
}
// TxUpArchiveMid update archive mid.
func (d *Dao) TxUpArchiveMid(tx *xsql.Tx, a *archive.Archive) (rows int64, err error) {
res, err := tx.Exec(_upArcMidSQL, a.Mid, a.State, a.Aid)
if err != nil {
log.Error("d.upArcMid.Exec() error(%v)", err)
return
}
rows, err = res.RowsAffected()
return
}
// TxUpArchiveState update Archive state.
func (d *Dao) TxUpArchiveState(tx *xsql.Tx, aid int64, state int8) (rows int64, err error) {
res, err := tx.Exec(_upArcStateSQL, state, aid)
if err != nil {
log.Error("d.upVideoState.Exec error(%v)", err)
return
}
rows, err = res.RowsAffected()
return
}
// TxUpAddit update archive addit.
func (d *Dao) TxUpAddit(tx *xsql.Tx, aid, missionID, orderID, flowID, descFormatID int64, ipv6 []byte, source, advertiser, flowRemark, desc, dynamic string, upFrom int8) (rows int64, err error) {
if ipv6 == nil {
ipv6 = []byte{}
}
res, err := tx.Exec(_inAddSQL, aid, missionID, upFrom, ipv6, source, orderID, flowID, advertiser, flowRemark, desc, descFormatID, dynamic, missionID, source, orderID, flowID, advertiser, flowRemark, desc, descFormatID, dynamic)
if err != nil {
log.Error("d.inArcAddit.Exec() error(%v)", err)
return
}
rows, err = res.RowsAffected()
return
}
// TxUpArchiveBiz update archive biz.
func (d *Dao) TxUpArchiveBiz(tx *xsql.Tx, aid, bizType int64, data string) (rows int64, err error) {
res, err := tx.Exec(_inADDBizSQL, aid, bizType, data, data)
if err != nil {
log.Error("d.TxUpArchiveBiz.Exec() error(%v)", err)
return
}
rows, err = res.RowsAffected()
return
}
// TxUpAdditReason update archive recheck_reason
func (d *Dao) TxUpAdditReason(tx *xsql.Tx, aid int64, reason string) (rows int64, err error) {
res, err := tx.Exec(_inAddReaSQL, aid, reason, reason)
if err != nil {
log.Error("d.inAdditReason.Exec() error(%v)", err)
return
}
rows, err = res.RowsAffected()
return
}
// TxUpAdditRedirect update archive redirect url.
func (d *Dao) TxUpAdditRedirect(tx *xsql.Tx, aid int64, redirectURL string) (rows int64, err error) {
res, err := tx.Exec(_inAddRdrSQL, aid, redirectURL, redirectURL)
if err != nil {
log.Error("d._inAdditRedirect.Exec() error(%v)", err)
return
}
rows, err = res.RowsAffected()
return
}
// TxUpArcAttr update attribute by aid.
func (d *Dao) TxUpArcAttr(tx *xsql.Tx, aid int64, bit uint, val int32) (rows int64, err error) {
res, err := tx.Exec(_upArcAttrSQL, bit, val, bit, aid)
attSql := fmt.Sprintf("UPDATE archive SET attribute=attribute&(~(1<<%d))|(%d<<%d) WHERE id=%d", bit, val, bit, aid)
log.Info("aid(%d) attribute update log sql (%s)", aid, attSql)
if err != nil {
log.Error("d.upArcAttr.Exec() error(%v)", err)
return
}
rows, err = res.RowsAffected()
return
}
// TxUpTag update tag by aid.
func (d *Dao) TxUpTag(tx *xsql.Tx, aid int64, tag string) (rows int64, err error) {
res, err := tx.Exec(_upTagSQL, tag, aid)
if err != nil {
log.Error("d.upTag.Exec() error(%v)", err)
return
}
rows, err = res.RowsAffected()
return
}
// Archive get a archive by avid.
func (d *Dao) Archive(c context.Context, aid int64) (a *archive.Archive, err error) {
var (
row = d.rddb.QueryRow(c, _arcSQL, aid)
reason, tag sql.NullString
)
a = &archive.Archive{}
if err = row.Scan(&a.Aid, &a.Mid, &a.TypeID, &a.Copyright, &a.Author, &a.Title, &a.Cover, &reason, &a.Desc, &tag, &a.Duration,
&a.Round, &a.Attribute, &a.Access, &a.State, &a.PTime, &a.CTime, &a.MTime); err != nil {
if err == xsql.ErrNoRows {
a = nil
err = nil
} else {
log.Error("row.Scan error(%v)", err)
}
return
}
a.RejectReason = reason.String
a.Tag = tag.String
return
}
// POI get a archive POI by avid.
func (d *Dao) POI(c context.Context, aid int64) (data []byte, err error) {
var (
row = d.rddb.QueryRow(c, _arcPOISQL, aid, archive.BIZPOI)
)
if err = row.Scan(&data); err != nil {
if err == xsql.ErrNoRows {
err = nil
} else {
log.Error("row.Scan error(%v)", err)
}
return
}
return
}
// Vote get a archive Vote by avid.
func (d *Dao) Vote(c context.Context, aid int64) (data []byte, err error) {
var (
row = d.rddb.QueryRow(c, _arcVoteSQL, aid)
)
if err = row.Scan(&data); err != nil {
if err == xsql.ErrNoRows {
err = nil
} else {
log.Error("row.Scan error(%v)", err)
}
return
}
return
}
// Addit get a archive addit by avid.
func (d *Dao) Addit(c context.Context, aid int64) (ad *archive.Addit, err error) {
row := d.rddb.QueryRow(c, _arcAddSQL, aid)
ad = &archive.Addit{}
if err = row.Scan(&ad.Aid, &ad.MissionID, &ad.FromIP, &ad.IPv6, &ad.UpFrom, &ad.RecheckReason, &ad.RedirectURL, &ad.Source, &ad.OrderID, &ad.FlowID, &ad.Advertiser, &ad.FlowRemark, &ad.Desc, &ad.DescFormatID, &ad.Dynamic); err != nil {
if err == xsql.ErrNoRows {
err = nil
ad = nil
} else {
log.Error("row.Scan error(%v)", err)
}
}
return
}
// Mids multi get archive mid by aids.
func (d *Dao) Mids(c context.Context, aids []int64) (mm map[int64]int64, err error) {
rows, err := d.rddb.Query(c, fmt.Sprintf(_arcMidsSQL, xstr.JoinInts(aids)))
if err != nil {
log.Error("db.Query() error(%v)", err)
return
}
defer rows.Close()
mm = make(map[int64]int64, len(aids))
for rows.Next() {
var aid, mid int64
if err = rows.Scan(&aid, &mid); err != nil {
log.Error("rows.Scan error(%v)", err)
return
}
mm[aid] = mid
}
return
}
// ArchivesUpAll get archive all aids by mid.
func (d *Dao) ArchivesUpAll(c context.Context, mid int64, offset int, ps int) (aids []int64, err error) {
rows, err := d.rddb.Query(c, _arcUpAllSQL, mid, offset, ps)
if err != nil {
log.Error("db.Query() error(%v)", err)
return
}
defer rows.Close()
for rows.Next() {
var aid int64
if err = rows.Scan(&aid); err != nil {
log.Error("rows.Scan error(%v)", err)
return
}
aids = append(aids, aid)
}
return
}
// ArchivesUpOpen get archive open aids by mid.
func (d *Dao) ArchivesUpOpen(c context.Context, mid int64, offset int, ps int) (aids []int64, err error) {
rows, err := d.rddb.Query(c, _arcUpOpenSQL, mid, offset, ps)
if err != nil {
log.Error("db.Query() error(%v)", err)
return
}
defer rows.Close()
for rows.Next() {
var aid int64
if err = rows.Scan(&aid); err != nil {
log.Error("rows.Scan error(%v)", err)
return
}
aids = append(aids, aid)
}
return
}
// ArchivesUpUnOpen get archive unopen aids by mid.
func (d *Dao) ArchivesUpUnOpen(c context.Context, mid int64, offset int, ps int) (aids []int64, err error) {
rows, err := d.rddb.Query(c, _arcUpUnOpenSQL, mid, offset, ps)
if err != nil {
log.Error("db.Query() error(%v)", err)
return
}
defer rows.Close()
for rows.Next() {
var aid int64
if err = rows.Scan(&aid); err != nil {
log.Error("rows.Scan error(%v)", err)
return
}
aids = append(aids, aid)
}
return
}
// ArchiveAllUpCount get all archive count by mid.
func (d *Dao) ArchiveAllUpCount(c context.Context, mid int64) (count int64, err error) {
row := d.rddb.QueryRow(c, _arcUpAllCountSQL, mid)
if err = row.Scan(&count); err != nil {
if err == sql.ErrNoRows {
err = nil
} else {
log.Error("row.Scan error(%v)", err)
}
}
return
}
// ArchiveOpenUpCount get open archive count by mid.
func (d *Dao) ArchiveOpenUpCount(c context.Context, mid int64) (count int64, err error) {
row := d.rddb.QueryRow(c, _arcUpOpenCountSQL, mid)
if err = row.Scan(&count); err != nil {
if err == sql.ErrNoRows {
err = nil
} else {
log.Error("row.Scan error(%v)", err)
}
}
return
}
// ArchiveUnOpenUpCount get un open archive count by mid.
func (d *Dao) ArchiveUnOpenUpCount(c context.Context, mid int64) (count int64, err error) {
row := d.rddb.QueryRow(c, _arcUpUnOpenCountSQL, mid)
if err = row.Scan(&count); err != nil {
if err == sql.ErrNoRows {
err = nil
} else {
log.Error("row.Scan error(%v)", err)
}
}
return
}
// SimpleArchive get a archive by avid.
func (d *Dao) SimpleArchive(c context.Context, aid int64) (a *archive.SimpleArchive, err error) {
row := d.rddb.QueryRow(c, _simpleArcSQL, aid)
a = &archive.SimpleArchive{}
if err = row.Scan(&a.Aid, &a.Title, &a.Mid); err != nil {
if err == xsql.ErrNoRows {
a = nil
err = nil
} else {
log.Error("row.Scan error(%v)", err)
}
}
return
}
// Recos fn
func (d *Dao) Recos(c context.Context, aid int64) (aids []int64, err error) {
rows, err := d.db.Query(c, _getRecoSQL, aid)
if err != nil {
log.Error("d.db.Query(%d) error(%v)", aid, err)
return
}
defer rows.Close()
for rows.Next() {
var aid int64
if err = rows.Scan(&aid); err != nil {
log.Error("rows.Scan error(%v)", err)
return
}
aids = append(aids, aid)
}
return
}
// RecoUpdate fn
func (d *Dao) RecoUpdate(c context.Context, aid int64, recoIDs []int64) (effCnt int64, err error) {
if len(recoIDs) == 0 {
_, err = d.db.Query(c, _delRecoSQL, aid)
if err != nil {
log.Error("d.db.Query error(%v)| aid (%d)", err, aid)
return
}
return
}
_, err = d.db.Query(c, _delRecoSQL, aid)
if err != nil {
log.Error("d.db.Query(%d) error(%v)", aid, err)
return
}
var (
batchVals = make([]string, 0, len(recoIDs))
)
for _, recoID := range recoIDs {
batchVals = append(batchVals, fmt.Sprintf("(%d,%d)", aid, recoID))
}
res, err := d.db.Exec(c, fmt.Sprintf(_batchAddRecoSQL, strings.Join(batchVals, ",")))
if err != nil {
log.Error("d.db.Exe _batchAddRecoSQL batchVals(%+v) error(%+v)", batchVals, err)
return
}
return res.RowsAffected()
}
// UpMissionID update mission_id for archive.
func (d *Dao) UpMissionID(c context.Context, aa *archive.ArcMissionParam) (rows int64, err error) {
res, err := d.db.Exec(c, _inAddMissionSQL, aa.AID, aa.MissionID, aa.MissionID)
if err != nil {
log.Error("UpMissionID.Exec error(%v)", err)
return
}
return res.RowsAffected()
}
// RejectedArchives list rejected archives
func (d *Dao) RejectedArchives(c context.Context, mid int64, state, offset, limit int32, start *time.Time) (arcs []*archive.Archive, count int32, err error) {
row := d.slaveDB.QueryRow(c, _rejectArcsCountSQL, mid, state, start)
if err = row.Scan(&count); err != nil {
log.Error("rows.Scan error(%v)", err)
return
}
if count == 0 {
return
}
rows, err := d.slaveDB.Query(c, _rejectArcsSQL, mid, state, start, offset, limit)
if err != nil {
log.Error("db.Query error(%v)", err)
return
}
for rows.Next() {
a := archive.Archive{}
if err = rows.Scan(&a.Aid, &a.Mid, &a.Title, &a.RejectReason, &a.MTime); err != nil {
log.Error("rows.Scan error(%v)", err)
return
}
arcs = append(arcs, &a)
}
return
}

View File

@@ -0,0 +1,471 @@
package archive
import (
"context"
"testing"
"time"
"database/sql"
"fmt"
"go-common/app/service/main/videoup/model/archive"
xsql "go-common/library/database/sql"
"reflect"
"github.com/bouk/monkey"
. "github.com/smartystreets/goconvey/convey"
)
func TestDao_TxAddArchive(t *testing.T) {
var (
c = context.Background()
tx, _ = d.BeginTran(c)
a = &archive.Archive{
Mid: 123,
TypeID: 22,
Title: "UT测试",
Author: "ut",
Desc: "UT测试UT测试",
}
)
Convey("TxUpArchiveState", t, func(ctx C) {
_, err := d.TxAddArchive(tx, a)
if err != nil {
tx.Rollback()
} else {
tx.Commit()
}
So(err, ShouldBeNil)
})
}
func TestDao_TxAUpArchive(t *testing.T) {
var (
c = context.Background()
tx, _ = d.BeginTran(c)
a = &archive.Archive{
Aid: 10111210,
Mid: 123,
TypeID: 22,
Title: "UT测试",
Author: "ut",
Desc: "UT测试UT测试",
}
)
Convey("TxUpArchiveState", t, func(ctx C) {
_, err := d.TxUpArchive(tx, a)
if err != nil {
tx.Rollback()
} else {
tx.Commit()
}
So(err, ShouldBeNil)
})
}
func TestDao_TxAUpArchiveMid(t *testing.T) {
var (
c = context.Background()
tx, _ = d.BeginTran(c)
a = &archive.Archive{
Aid: 10111210,
Mid: 123,
TypeID: 22,
Title: "UT测试",
Author: "ut",
Desc: "UT测试UT测试",
}
)
Convey("TxUpArchiveState", t, func(ctx C) {
_, err := d.TxUpArchiveMid(tx, a)
if err != nil {
tx.Rollback()
} else {
tx.Commit()
}
So(err, ShouldBeNil)
})
}
func TestArchiveTxUpArchiveState(t *testing.T) {
var (
c = context.Background()
tx, _ = d.BeginTran(c)
aid = int64(233333)
state = int8(0)
)
Convey("TxUpArchiveState", t, func(ctx C) {
_, err := d.TxUpArchiveState(tx, aid, state)
if err != nil {
tx.Rollback()
} else {
tx.Commit()
}
So(err, ShouldBeNil)
})
}
func TestDao_TxUpAddit(t *testing.T) {
var (
c = context.Background()
tx, _ = d.BeginTran(c)
aid = int64(233333)
)
Convey("TxUpAddit", t, func(ctx C) {
_, err := d.TxUpAddit(tx, aid, 0, 0, 0, 0, []byte{}, "", "", "", "", "", 0)
if err != nil {
tx.Rollback()
} else {
tx.Commit()
}
So(err, ShouldBeNil)
})
}
func TestDao_TxUpArchiveBiz(t *testing.T) {
var (
c = context.Background()
tx, _ = d.BeginTran(c)
aid = int64(233333)
)
Convey("TxUpArchiveBiz", t, func(ctx C) {
_, err := d.TxUpArchiveBiz(tx, aid, 0, "")
if err != nil {
tx.Rollback()
} else {
tx.Commit()
}
So(err, ShouldBeNil)
})
}
func TestDao_TxUpAdditReason(t *testing.T) {
var (
c = context.Background()
tx, _ = d.BeginTran(c)
aid = int64(233333)
)
Convey("TxUpAdditReason", t, func(ctx C) {
_, err := d.TxUpAdditReason(tx, aid, "")
if err != nil {
tx.Rollback()
} else {
tx.Commit()
}
So(err, ShouldBeNil)
})
}
func TestDao_TxUpAdditRedirect(t *testing.T) {
var (
c = context.Background()
tx, _ = d.BeginTran(c)
aid = int64(233333)
)
Convey("TxUpAdditRedirect", t, func(ctx C) {
_, err := d.TxUpAdditRedirect(tx, aid, "")
if err != nil {
tx.Rollback()
} else {
tx.Commit()
}
So(err, ShouldBeNil)
})
}
// func TestArchiveTxUpAdditReason(t *testing.T) {
// var (
// c = context.Background()
// tx, _ = d.BeginTran(c)
// aid = int64(233333)
// reason = "2333"
// )
// Convey("TxUpAdditReason", t, func(ctx C) {
// rows, err := d.TxUpAdditReason(tx, aid, reason)
// ctx.Convey("Then err should be nil.rows should not be nil.", func(ctx C) {
// ctx.So(err, ShouldBeNil)
// ctx.So(rows, ShouldNotBeNil)
// })
// })
// }
func TestArchiveTxUpAdditRedirect(t *testing.T) {
var (
c = context.Background()
tx, _ = d.BeginTran(c)
aid = int64(0)
redirectURL = "233333"
)
Convey("TxUpAdditRedirect", t, func(ctx C) {
rows, err := d.TxUpAdditRedirect(tx, aid, redirectURL)
ctx.Convey("Then err should be nil.rows should not be nil.", func(ctx C) {
ctx.So(err, ShouldBeNil)
ctx.So(rows, ShouldNotBeNil)
})
})
}
func TestArchiveTxUpArcAttr(t *testing.T) {
var (
c = context.Background()
tx, _ = d.BeginTran(c)
aid = int64(0)
bit = uint(0)
val = int32(0)
)
Convey("TxUpArcAttr", t, func(ctx C) {
rows, err := d.TxUpArcAttr(tx, aid, bit, val)
ctx.Convey("Then err should be nil.rows should not be nil.", func(ctx C) {
ctx.So(err, ShouldBeNil)
ctx.So(rows, ShouldNotBeNil)
})
})
}
func TestArchiveTxUpTag(t *testing.T) {
var (
c = context.Background()
tx, _ = d.BeginTran(c)
aid = int64(22333)
tag = "2333"
)
Convey("TxUpTag", t, func(ctx C) {
rows, err := d.TxUpTag(tx, aid, tag)
ctx.Convey("Then err should be nil.rows should not be nil.", func(ctx C) {
ctx.So(err, ShouldBeNil)
ctx.So(rows, ShouldNotBeNil)
})
})
}
func TestArchiveArchive(t *testing.T) {
var (
c = context.Background()
aid = int64(22333)
)
Convey("Archive", t, func(ctx C) {
_, err := d.Archive(c, aid)
ctx.Convey("Then err should be nil.a should not be nil.", func(ctx C) {
ctx.So(err, ShouldBeNil)
})
})
}
func TestArchiveAddit(t *testing.T) {
var (
c = context.Background()
aid = int64(22333)
aa = &archive.ArcMissionParam{}
)
Convey("Addit", t, func(ctx C) {
_, err := d.Addit(c, aid)
ctx.Convey("Then err should be nil.ad should not be nil.", func(ctx C) {
ctx.So(err, ShouldBeNil)
})
})
Convey("Vote", t, func(ctx C) {
ad, err := d.Vote(c, aid)
ctx.Convey("Then err should be nil.ad should not be nil.", func(ctx C) {
ctx.So(err, ShouldBeNil)
ctx.So(ad, ShouldBeNil)
})
})
Convey("Recos", t, func(ctx C) {
ad, err := d.Recos(c, aid)
ctx.Convey("Then err should be nil.ad should not be nil.", func(ctx C) {
ctx.So(err, ShouldBeNil)
ctx.So(ad, ShouldBeNil)
})
})
Convey("UpMissionID", t, func(ctx C) {
guard := monkey.PatchInstanceMethod(reflect.TypeOf(d.db), "Exec", func(_ *xsql.DB, _ context.Context, _ string, _ ...interface{}) (sql.Result, error) {
return nil, sql.ErrNoRows
})
defer guard.Unpatch()
ad, err := d.UpMissionID(c, aa)
ctx.Convey("Then err should be nil.ad should not be nil.", func(ctx C) {
ctx.So(err, ShouldNotBeNil)
ctx.So(ad, ShouldBeZeroValue)
})
})
}
func TestArchiveMids(t *testing.T) {
var (
c = context.Background()
aids = []int64{222}
)
Convey("Mids", t, func(ctx C) {
mm, err := d.Mids(c, aids)
ctx.Convey("Then err should be nil.mm should not be nil.", func(ctx C) {
ctx.So(err, ShouldBeNil)
ctx.So(mm, ShouldNotBeNil)
})
})
}
func TestArchiveArchivesUpAll(t *testing.T) {
var (
c = context.Background()
mid = int64(223345)
offset = int(20)
ps = int(1)
)
Convey("ArchivesUpAll", t, func(ctx C) {
_, err := d.ArchivesUpAll(c, mid, offset, ps)
ctx.Convey("Then err should be nil.aids should not be nil.", func(ctx C) {
ctx.So(err, ShouldBeNil)
})
})
}
func TestArchiveArchivesUpOpen(t *testing.T) {
var (
c = context.Background()
mid = int64(223345)
offset = int(20)
ps = int(1)
)
Convey("ArchivesUpOpen", t, func(ctx C) {
_, err := d.ArchivesUpOpen(c, mid, offset, ps)
ctx.Convey("Then err should be nil.aids should not be nil.", func(ctx C) {
ctx.So(err, ShouldBeNil)
})
})
}
func TestArchiveArchivesUpUnOpen(t *testing.T) {
var (
c = context.Background()
mid = int64(223345)
offset = int(20)
ps = int(1)
)
Convey("ArchivesUpUnOpen", t, func(ctx C) {
_, err := d.ArchivesUpUnOpen(c, mid, offset, ps)
ctx.Convey("Then err should be nil.aids should not be nil.", func(ctx C) {
ctx.So(err, ShouldBeNil)
})
})
}
func TestArchiveArchiveAllUpCount(t *testing.T) {
var (
c = context.Background()
mid = int64(223345)
)
Convey("ArchiveAllUpCount", t, func(ctx C) {
count, err := d.ArchiveAllUpCount(c, mid)
ctx.Convey("Then err should be nil.count should not be nil.", func(ctx C) {
ctx.So(err, ShouldBeNil)
ctx.So(count, ShouldNotBeNil)
})
})
}
func TestArchiveArchiveOpenUpCount(t *testing.T) {
var (
c = context.Background()
mid = int64(223345)
)
Convey("ArchiveOpenUpCount", t, func(ctx C) {
count, err := d.ArchiveOpenUpCount(c, mid)
ctx.Convey("Then err should be nil.count should not be nil.", func(ctx C) {
ctx.So(err, ShouldBeNil)
ctx.So(count, ShouldNotBeNil)
})
})
}
func TestArchiveArchiveUnOpenUpCount(t *testing.T) {
var (
c = context.Background()
mid = int64(223345)
)
Convey("ArchiveUnOpenUpCount", t, func(ctx C) {
count, err := d.ArchiveUnOpenUpCount(c, mid)
ctx.Convey("Then err should be nil.count should not be nil.", func(ctx C) {
ctx.So(err, ShouldBeNil)
ctx.So(count, ShouldNotBeNil)
})
})
}
func TestArchiveSimpleArchive(t *testing.T) {
var (
c = context.Background()
aid = int64(222)
)
Convey("SimpleArchive", t, func(ctx C) {
_, err := d.SimpleArchive(c, aid)
ctx.Convey("Then err should be nil.a should not be nil.", func(ctx C) {
ctx.So(err, ShouldBeNil)
})
})
}
func TestArchivePOI(t *testing.T) {
var (
c = context.Background()
aid = int64(222)
)
Convey("poi", t, func(ctx C) {
data, err := d.POI(c, aid)
fmt.Println(string(data))
ctx.Convey("Then err should be nil.a should not be nil.", func(ctx C) {
ctx.So(err, ShouldBeNil)
})
})
}
func TestArchivePOIAdd(t *testing.T) {
var (
c = context.Background()
aid = int64(222)
tx, _ = d.BeginTran(c)
err error
)
Convey("add poi err", t, func(ctx C) {
guard := monkey.PatchInstanceMethod(reflect.TypeOf(tx),
"Exec",
func(_ *xsql.Tx, _ string, _ ...interface{}) (sql.Result, error) {
return nil, fmt.Errorf("tx.Exec error")
})
defer guard.Unpatch()
_, err = d.TxUpArchiveBiz(tx, aid, 1, "2222")
ctx.Convey("TestArchivePOIAdd.", func(ctx C) {
ctx.So(err, ShouldNotBeNil)
})
})
}
func TestArchiverejectedArchives(t *testing.T) {
Convey("rejectedArchives", t, func(ctx C) {
var (
c = context.Background()
mid int64 = 2089809
state int32 = -4
offset int32
limit int32 = 20
start, _ = time.Parse("20060102", "20100101")
)
ctx.Convey("When everything gose positive", func(ctx C) {
arcs, count, err := d.RejectedArchives(c, mid, state, offset, limit, &start)
ctx.Convey("Then err should be nil.arcs should not be nil.", func(ctx C) {
ctx.So(err, ShouldBeNil)
ctx.So(arcs, ShouldNotBeNil)
ShouldNotEqual(count, 0)
})
})
ctx.Convey("When no rows found", func(ctx C) {
guard := monkey.PatchInstanceMethod(reflect.TypeOf(d.slaveDB), "Query", func(_ *xsql.DB, _ context.Context, _ string, _ ...interface{}) (*xsql.Rows, error) {
return nil, xsql.ErrNoRows
})
defer guard.Unpatch()
arcs, _, err := d.RejectedArchives(c, mid, state, offset, limit, &start)
ctx.Convey("Then err should be nil.arcs should not be nil.", func(ctx C) {
ctx.So(err, ShouldNotBeNil)
ctx.So(arcs, ShouldBeNil)
})
})
})
}

View File

@@ -0,0 +1,66 @@
// Code generated by $GOPATH/src/go-common/app/tool/cache/gen. DO NOT EDIT.
/*
Package archive is a generated cache proxy package.
It is generated from:
type _cache interface {
// cache: -singleflight=true -nullcache=[]*archive.Staff{{ID:-1}} -check_null_code=len($)==1&&$[0].ID==-1
StaffData(c context.Context, aid int64) ([]*archive.Staff, error)
}
*/
package archive
import (
"context"
"go-common/app/service/main/videoup/model/archive"
"go-common/library/net/metadata"
"go-common/library/stat/prom"
"golang.org/x/sync/singleflight"
)
var _ _cache
var cacheSingleFlights = [1]*singleflight.Group{{}}
// StaffData get data from cache if miss will call source method, then add to cache.
func (d *Dao) StaffData(c context.Context, id int64) (res []*archive.Staff, err error) {
addCache := true
res, err = d.CacheStaffData(c, id)
if err != nil {
addCache = false
err = nil
}
defer func() {
if len(res) == 1 && res[0].ID == -1 {
res = nil
}
}()
if len(res) != 0 {
prom.CacheHit.Incr("StaffData")
return
}
var rr interface{}
sf := d.cacheSFStaffData(id)
rr, err, _ = cacheSingleFlights[0].Do(sf, func() (r interface{}, e error) {
prom.CacheMiss.Incr("StaffData")
r, e = d.Staffs(c, id)
return
})
res = rr.([]*archive.Staff)
if err != nil {
return
}
miss := res
if len(miss) == 0 {
miss = []*archive.Staff{{ID: -1}}
}
if !addCache {
return
}
d.cache.Do(c, func(ctx context.Context) {
d.AddCacheStaffData(metadata.WithContext(c), id, miss)
})
return
}

View File

@@ -0,0 +1,81 @@
package archive
import (
"context"
"fmt"
"go-common/app/service/main/videoup/conf"
"go-common/app/service/main/videoup/model/archive"
"go-common/library/cache/memcache"
"go-common/library/cache/redis"
"go-common/library/database/sql"
"go-common/library/sync/pipeline/fanout"
)
// Dao is redis dao.
type Dao struct {
c *conf.Config
// db
db *sql.DB
rddb *sql.DB
slaveDB *sql.DB
redis *redis.Pool
cache *fanout.Fanout
mc *memcache.Pool
}
// New new a dao.
func New(c *conf.Config) (d *Dao) {
d = &Dao{
c: c,
db: sql.NewMySQL(c.DB.Archive),
rddb: sql.NewMySQL(c.DB.ArchiveRead),
slaveDB: sql.NewMySQL(c.DB.ArchiveSlave),
redis: redis.NewPool(c.Redis.Track.Config),
cache: fanout.New("cache"),
mc: memcache.NewPool(c.Memcache.Archive.Config),
}
return d
}
// BeginTran begin transcation.
func (d *Dao) BeginTran(c context.Context) (tx *sql.Tx, err error) {
return d.db.Begin(c)
}
// Close close dao.
func (d *Dao) Close() {
if d.db != nil {
d.db.Close()
}
}
// Ping ping cpdb
func (d *Dao) Ping(c context.Context) (err error) {
return d.db.Ping(c)
}
func staffKey(aid int64) string {
return fmt.Sprintf("staff_aid_%d", aid)
}
func (d *Dao) cacheSFStaffData(aid int64) string {
return fmt.Sprintf("staff_aid_sf_%d", aid)
}
//go:generate $GOPATH/src/go-common/app/tool/cache/gen
type _cache interface {
// cache: -singleflight=true -nullcache=[]*archive.Staff{{ID:-1}} -check_null_code=len($)==1&&$[0].ID==-1
StaffData(c context.Context, aid int64) ([]*archive.Staff, error)
}
//go:generate $GOPATH/src/go-common/app/tool/cache/mc
type _mc interface {
// mc: -key=staffKey
CacheStaffData(c context.Context, key int64) ([]*archive.Staff, error)
// 这里也支持自定义注释 会替换默认的注释
// mc: -key=staffKey -expire=3 -encode=json|gzip
AddCacheStaffData(c context.Context, key int64, value []*archive.Staff) error
// mc: -key=staffKey
DelCacheStaffData(c context.Context, key int64) error
}

View File

@@ -0,0 +1,39 @@
package archive
import (
"flag"
"os"
"path/filepath"
"testing"
"go-common/app/service/main/videoup/conf"
)
var (
d *Dao
)
func TestMain(m *testing.M) {
if os.Getenv("DEPLOY_ENV") != "" {
flag.Set("app_id", "main.archive.videoup-service")
flag.Set("conf_token", "4b62721602981eb3635dba3b0d866ac5")
flag.Set("tree_id", "2308")
flag.Set("conf_version", "docker-1")
flag.Set("deploy_env", "uat")
flag.Set("conf_host", "config.bilibili.co")
flag.Set("conf_path", "/tmp")
flag.Set("region", "sh")
flag.Set("zone", "sh001")
} else {
dir, _ := filepath.Abs("../../cmd/videoup-service.toml")
flag.Set("conf", dir)
}
flag.Parse()
if err := conf.Init(); err != nil {
panic(err)
}
//conf.Init()
d = New(conf.Conf)
m.Run()
os.Exit(0)
}

View File

@@ -0,0 +1,71 @@
package archive
import (
"context"
"time"
"go-common/app/service/main/videoup/model/archive"
"go-common/library/database/sql"
"go-common/library/log"
xtime "go-common/library/time"
)
const (
//insert
_inDelaySQL = "INSERT INTO archive_delay (mid,aid,state,type,dtime) VALUES (?,?,?,?,?)"
//update
_upDelaySQL = "INSERT INTO archive_delay (mid,aid,state,type,dtime,ctime) VALUES (?,?,?,?,?,?) ON DUPLICATE KEY UPDATE dtime=?,deleted_at='0000-00-00 00:00:00'"
//delete
_delDelaySQL = "UPDATE archive_delay SET deleted_at = ? WHERE aid=? AND type=?"
//select
_dTimeSQL = "SELECT aid,dtime,state FROM archive_delay WHERE aid=? AND type=? AND deleted_at = 0"
)
// TxAddDelay insert delay.
func (d *Dao) TxAddDelay(tx *sql.Tx, mid int64, aid int64, state, tp int8, dTime xtime.Time) (rows int64, err error) {
res, err := tx.Exec(_inDelaySQL, mid, aid, state, tp, dTime)
if err != nil {
log.Error("d.inDelay.Exec() error(%v)", err)
return
}
rows, err = res.RowsAffected()
return
}
// TxUpDelay update delay
func (d *Dao) TxUpDelay(tx *sql.Tx, mid, aid int64, state, tp int8, dTime xtime.Time) (rows int64, err error) {
var now = time.Now()
res, err := tx.Exec(_upDelaySQL, mid, aid, state, tp, dTime, now, dTime)
if err != nil {
log.Error("d.TxUpDelay.Exec() error(%v)", err)
return
}
rows, err = res.RowsAffected()
return
}
// TxDelDelay delete delay
func (d *Dao) TxDelDelay(tx *sql.Tx, aid int64, tp int8) (rows int64, err error) {
res, err := tx.Exec(_delDelaySQL, time.Now(), aid, tp)
if err != nil {
log.Error("d.TxDelDelay.Exec() error(%v)", err)
return
}
rows, err = res.RowsAffected()
return
}
// Delay get a delay time by avid.
func (d *Dao) Delay(c context.Context, aid int64, tp int8) (dl *archive.Delay, err error) {
row := d.rddb.QueryRow(c, _dTimeSQL, aid, tp)
dl = &archive.Delay{}
if err = row.Scan(&dl.Aid, &dl.DTime, &dl.State); err != nil {
if err == sql.ErrNoRows {
dl = nil
err = nil
} else {
log.Error("row.Scan error(%v)", err)
}
}
return
}

View File

@@ -0,0 +1,56 @@
package archive
import (
"context"
"testing"
. "github.com/smartystreets/goconvey/convey"
"go-common/library/time"
)
func TestDao_TxUpDelay(t *testing.T) {
var (
c = context.Background()
tx, _ = d.BeginTran(c)
dt time.Time
)
Convey("TxUpArchiveState", t, func(ctx C) {
_, err := d.TxUpDelay(tx, 123, 23333, 0, 0, dt)
if err != nil {
tx.Rollback()
} else {
tx.Commit()
}
So(err, ShouldBeNil)
})
}
func TestDao_TxDelDelay(t *testing.T) {
var (
c = context.Background()
tx, _ = d.BeginTran(c)
)
Convey("TxDelDelay", t, func(ctx C) {
_, err := d.TxDelDelay(tx, 123, 0)
if err != nil {
tx.Rollback()
} else {
tx.Commit()
}
So(err, ShouldBeNil)
})
}
func TestArchiveDelay(t *testing.T) {
var (
c = context.Background()
aid = int64(23333)
tp = int8(0)
)
Convey("Delay", t, func(ctx C) {
_, err := d.Delay(c, aid, tp)
ctx.Convey("Then err should be nil.dl should not be nil.", func(ctx C) {
ctx.So(err, ShouldBeNil)
})
})
}

View File

@@ -0,0 +1,31 @@
package archive
import (
"context"
"go-common/app/service/main/videoup/model/archive"
"go-common/library/log"
)
const (
_descFormatSQL = "SELECT id,typeid,copyright,components,lang,platform FROM archive_desc_format WHERE state=0"
)
// DescFormats get desc_format info.
func (d *Dao) DescFormats(c context.Context) (dfs []*archive.DescFormat, err error) {
rows, err := d.rddb.Query(c, _descFormatSQL)
if err != nil {
log.Error("d.db.Query error(%v)", err)
return
}
defer rows.Close()
for rows.Next() {
df := &archive.DescFormat{}
if err = rows.Scan(&df.ID, &df.TypeID, &df.Copyright, &df.Components, &df.Lang, &df.Platform); err != nil {
log.Error("rows.Scan error(%v)", err)
return
}
dfs = append(dfs, df)
}
return
}

View File

@@ -0,0 +1,20 @@
package archive
import (
"context"
"testing"
"github.com/smartystreets/goconvey/convey"
)
func TestArchiveDescFormats(t *testing.T) {
var (
c = context.Background()
)
convey.Convey("DescFormats", t, func(ctx convey.C) {
_, err := d.DescFormats(c)
ctx.Convey("Then err should be nil.dfs should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
})
})
}

View File

@@ -0,0 +1,334 @@
package archive
import (
"context"
"fmt"
"time"
"go-common/app/service/main/videoup/model/archive"
"go-common/library/database/sql"
"go-common/library/log"
"go-common/library/xstr"
"github.com/pkg/errors"
)
const (
_upStateFlowSQL = "UPDATE flow_design SET state =? where id=?"
_inFlowSQL = "INSERT into flow_design(pool,oid,group_id,uid,remark) VALUES (?,?,?,?,?)"
_inFlowLogSQL = "INSERT into flow_design_log(pool,oid,group_id,uid,action,remark) VALUES (?,?,?,?,1,?)"
_flowsSQL = "SELECT id,rank,type,value,name,ctime FROM flow_group WHERE state=0 order by rank desc"
_whiteMidSQL = "SELECT oid,uid FROM flow_design WHERE pool=1 AND state=0 AND group_id=11"
_isFlowGroupIDInSQL = "SELECT id FROM flow_design WHERE pool=? AND state=0 AND group_id=? AND oid=? limit 1"
//稿件 mid 配置
_midsForbidSQL = "SELECT flow_design.id,oid,value FROM flow_design left join flow_group on flow_design.group_id=flow_group.id WHERE flow_design.pool=1 AND flow_design.state=0 AND flow_design.parent=0 AND flow_design.group_id>=12 and flow_group.type=4 "
_isActGroupIDSQL = "SELECT state FROM flow_group WHERE id=?"
_isMidIDSQL = "SELECT id,pool,oid,group_id,parent,uid,remark,ctime,mtime FROM flow_design WHERE pool=1 AND state=0 AND oid=? AND parent=?"
_findGroupIDByScopeSQL = "SELECT group_id FROM flow_scope WHERE pool= ? AND industry_id=? AND brand_id=? AND official=? AND state=0 order by id desc limit 1;"
_appFlowsSQL = "SELECT oid FROM flow_design WHERE pool=0 AND state=0 AND mtime>=? AND mtime<=? AND group_id=11"
_flowGroupPool = "SELECT id, pool FROM flow_group WHERE id IN (%s)"
_flowsByOIDSQL = "SELECT fd.id,fd.pool,fd.oid,fd.group_id,fd.parent,fd.state,fg.value FROM flow_design fd LEFT JOIN flow_group fg ON fd.group_id=fg.id WHERE fd.oid=? AND fd.state=0 AND fg.state=0"
_flowsByGIDSQL = "SELECT fd.id,fd.pool,fd.oid,fd.group_id,fd.parent,fd.state,fg.value FROM flow_design fd LEFT JOIN flow_group fg ON fd.group_id=fg.id WHERE fd.pool=? AND fd.group_id=? AND fd.state=0 AND fg.state=0 LIMIT ?,?"
_flowUniqueSQL = "SELECT id,pool,oid,group_id,parent,state FROM flow_design WHERE oid=? AND pool=? AND group_id=? LIMIT 1"
_flowCountSQL = "SELECT count(*) FROM flow_design fd LEFT JOIN flow_group fg ON fd.group_id=fg.id WHERE fd.pool=? AND fd.group_id=? AND fd.state=0 AND fg.state=0 "
_flowOidsByGidSQL = "SELECT fd.id,fd.pool,fd.oid,fd.group_id,fd.parent,fd.state,fg.value FROM flow_design fd LEFT JOIN flow_group fg ON fd.group_id=fg.id WHERE fd.pool=? AND fd.group_id=? AND fd.state=0 AND fg.state=0 AND fd.oid IN (%s) "
)
// TxAddFlow tx add flow_design.
func (d *Dao) TxAddFlow(tx *sql.Tx, old, uid, groupID int64, pool int8, remark string) (id int64, err error) {
res, err := tx.Exec(_inFlowSQL, pool, old, groupID, uid, remark)
if err != nil {
log.Error("d._inFlow.Exec() error(%v)", err)
return
}
id, err = res.LastInsertId()
return
}
// TxUpFlowState tx set flow_design.state=1.
func (d *Dao) TxUpFlowState(tx *sql.Tx, state int8, id int64) (rows int64, err error) {
res, err := tx.Exec(_upStateFlowSQL, state, id)
if err != nil {
log.Error("d.TxUpFlowState.Exec() error(%v)", err)
return
}
rows, err = res.RowsAffected()
return
}
// TxAddFlowLog tx add flow_design log.
func (d *Dao) TxAddFlowLog(tx *sql.Tx, old, uid, groupID int64, pool int8, remark string) (rows int64, err error) {
res, err := tx.Exec(_inFlowLogSQL, pool, old, groupID, uid, remark)
if err != nil {
log.Error("d._inFlowLog.Exec() error(%v)", err)
return
}
rows, err = res.RowsAffected()
return
}
// Flows get flow_control id and remark.
func (d *Dao) Flows(c context.Context) (fs []*archive.Flow, err error) {
rows, err := d.db.Query(c, _flowsSQL)
if err != nil {
log.Error("d.db.Query(%s) error(%v)", _flowsSQL, err)
return
}
defer rows.Close()
for rows.Next() {
f := &archive.Flow{}
if err = rows.Scan(&f.ID, &f.Rank, &f.Type, &f.Value, &f.Remark, &f.CTime); err != nil {
log.Error("rows.Scan error(%v)", err)
return
}
fs = append(fs, f)
}
return
}
// WhiteMids get white mids.
func (d *Dao) WhiteMids(c context.Context) (mids map[int64]int64, err error) {
rows, err := d.db.Query(c, _whiteMidSQL)
if err != nil {
log.Error("d.db.Query(%s) error(%v)", _whiteMidSQL, err)
return
}
defer rows.Close()
mids = make(map[int64]int64)
for rows.Next() {
var mid, uid int64
if err = rows.Scan(&mid, &uid); err != nil {
log.Error("rows.Scan error(%v)", err)
return
}
mids[mid] = uid
}
return
}
// CheckActGroupID check active GroupID
func (d *Dao) CheckActGroupID(c context.Context, groupID int64) (state int8, err error) {
row := d.db.QueryRow(c, _isActGroupIDSQL, groupID)
if err = row.Scan(&state); err != nil {
if err != sql.ErrNoRows {
log.Error("row.Scan error(%v)", err)
return
}
err = nil
state = archive.FlowDelete
}
return
}
// CheckFlowGroupID check active GroupID
func (d *Dao) CheckFlowGroupID(c context.Context, pool int8, oid, groupID int64) (flowID int64, err error) {
row := d.db.QueryRow(c, _isFlowGroupIDInSQL, pool, groupID, oid)
if err = row.Scan(&flowID); err != nil {
if err != sql.ErrNoRows {
log.Error("row.Scan error(%v)", err)
return
}
err = nil
}
return
}
//CheckFlowMid 通用 mid 流量导向到flow_design
func (d *Dao) CheckFlowMid(c context.Context, pool int8, oid int64) (flows []*archive.FlowData, err error) {
rows, err := d.db.Query(c, _isMidIDSQL, oid, pool)
if err != nil {
log.Error("d.db.Query (%s)|(%d)|(%d) error(%v)", _isMidIDSQL, oid, pool, err)
return
}
defer rows.Close()
for rows.Next() {
item := &archive.FlowData{}
if err = rows.Scan(&item.ID, &item.Pool, &item.OID, &item.GroupID, &item.Parent, &item.UID, &item.Remark, &item.CTime, &item.MTime); err != nil {
log.Error("rows.Scan error(%v)", err)
return
}
flows = append(flows, item)
}
log.Info("flowDesign mids design (%+v)", flows)
return
}
// FindGroupIDByScope check active GroupID
func (d *Dao) FindGroupIDByScope(c context.Context, pool int8, IndustryID, brandID int64, official int8) (groupID int64, err error) {
row := d.db.QueryRow(c, _findGroupIDByScopeSQL, pool, IndustryID, brandID, official)
if err = row.Scan(&groupID); err != nil {
if err != sql.ErrNoRows {
log.Error("row.Scan error(%v)", err)
return
}
err = nil
groupID = 1
}
return
}
//ForbidMids 稿件 mid 禁止配置 ForbidMids get forbid mids.
func (d *Dao) ForbidMids(c context.Context) (mids map[int64][]string, err error) {
rows, err := d.db.Query(c, _midsForbidSQL)
if err != nil {
log.Error("d.db.Query(%s) error(%v)", _midsForbidSQL, err)
return
}
defer rows.Close()
mids = make(map[int64][]string)
for rows.Next() {
var (
id int64
mid int64
value string
)
if err = rows.Scan(&id, &mid, &value); err != nil {
log.Error("rows.Scan error(%v)", err)
return
}
mids[mid] = append(mids[mid], value)
}
log.Info("mids (%+v)", mids)
return
}
// AppFeedAids get aids by appFeed flow.
func (d *Dao) AppFeedAids(c context.Context, startTime, endTime time.Time) (aids []int64, err error) {
rows, err := d.db.Query(c, _appFlowsSQL, startTime, endTime)
if err != nil {
log.Error("d.db.Query(%s|%v|%v) error(%v)", _appFlowsSQL, startTime, endTime, err)
return
}
defer rows.Close()
for rows.Next() {
var aid int64
if err = rows.Scan(&aid); err != nil {
log.Error("rows.Scan error(%v)", err)
return
}
if aid == 0 {
continue
}
aids = append(aids, aid)
}
return
}
//FlowGroupPools 获取指定流量套餐的pool
func (d *Dao) FlowGroupPools(c context.Context, ids []int64) (res map[int64]int8, err error) {
var (
rows *sql.Rows
id int64
pool int8
)
res = map[int64]int8{}
idstr := xstr.JoinInts(ids)
if rows, err = d.db.Query(context.TODO(), fmt.Sprintf(_flowGroupPool, idstr)); err != nil {
log.Error("FlowGroupPools d.db.Query error(%v) ids(%s)", err, idstr)
return
}
defer rows.Close()
for rows.Next() {
if err = rows.Scan(&id, &pool); err != nil {
log.Error("FlowGroupPools rows.Scan error(%v) ids(%s)", err, idstr)
return
}
res[id] = pool
}
return
}
//FlowsByOID 获取所有命中的流量套餐记录
func (d *Dao) FlowsByOID(c context.Context, oid int64) (res []*archive.FlowData, err error) {
var (
rows *sql.Rows
)
res = []*archive.FlowData{}
if rows, err = d.db.Query(context.TODO(), _flowsByOIDSQL, oid); err != nil {
log.Error("FlowsByOID d.db.Query error(%v) oid(%d)", err, oid)
return
}
defer rows.Close()
for rows.Next() {
f := &archive.FlowData{}
if err = rows.Scan(&f.ID, &f.Pool, &f.OID, &f.GroupID, &f.Parent, &f.State, &f.GroupValue); err != nil {
log.Error("FlowsByOID rows.Scan error(%v) oid(%d)", err, oid)
return
}
res = append(res, f)
}
return
}
//FlowUnique 获取命中 指定流量套餐的记录
func (d *Dao) FlowUnique(c context.Context, oid, groupID int64, pool int8) (f *archive.FlowData, err error) {
f = &archive.FlowData{}
if err = d.db.QueryRow(context.TODO(), _flowUniqueSQL, oid, pool, groupID).Scan(&f.ID, &f.Pool, &f.OID, &f.GroupID, &f.Parent, &f.State); err != nil {
if err == sql.ErrNoRows {
err = nil
f = nil
} else {
log.Error("row.Scan error(%v)", err)
}
}
return
}
//OidsFlowByGID 判断指定oids 是否是gid禁止的
func (d *Dao) OidsFlowByGID(c context.Context, pool, gid int64, oids string) (res []*archive.FlowData, err error) {
var (
rows *sql.Rows
)
rows, err = d.db.Query(c, fmt.Sprintf(_flowOidsByGidSQL, oids), pool, gid)
if err != nil {
err = errors.WithStack(err)
return
}
defer rows.Close()
for rows.Next() {
f := &archive.FlowData{}
if err = rows.Scan(&f.ID, &f.Pool, &f.OID, &f.GroupID, &f.Parent, &f.State, &f.GroupValue); err != nil {
log.Error("FlowsByOID rows.Scan error(%v) oid(%d)", err, gid)
return
}
res = append(res, f)
}
return
}
// CountByGID count buy state.
func (d *Dao) CountByGID(c context.Context, pool, gid int64) (count int64, err error) {
row := d.db.QueryRow(c, _flowCountSQL, pool, gid)
if err = row.Scan(&count); err != nil {
if err == sql.ErrNoRows {
err = nil
} else {
err = errors.WithStack(err)
}
}
return
}
// FlowPage page.
func (d *Dao) FlowPage(c context.Context, pool, gid, pn, ps int64) (res []*archive.FlowData, err error) {
var rows *sql.Rows
rows, err = d.db.Query(c, _flowsByGIDSQL, pool, gid, (pn-1)*ps, ps)
if err != nil {
err = errors.WithStack(err)
return
}
defer rows.Close()
for rows.Next() {
f := &archive.FlowData{}
if err = rows.Scan(&f.ID, &f.Pool, &f.OID, &f.GroupID, &f.Parent, &f.State, &f.GroupValue); err != nil {
log.Error("FlowsByOID rows.Scan error(%v) oid(%d)", err, gid)
return
}
res = append(res, f)
}
err = rows.Err()
return
}

View File

@@ -0,0 +1,214 @@
package archive
import (
"context"
"testing"
"time"
. "github.com/smartystreets/goconvey/convey"
)
func TestDao_TxAddFlow(t *testing.T) {
var (
c = context.Background()
tx, _ = d.BeginTran(c)
)
Convey("TxAddFlow", t, func(ctx C) {
_, err := d.TxAddFlow(tx, 123, 23333, 0, 0, "ut")
if err != nil {
tx.Rollback()
} else {
tx.Commit()
}
So(err, ShouldBeNil)
})
}
func TestDao_TxUpFlowState(t *testing.T) {
var (
c = context.Background()
tx, _ = d.BeginTran(c)
)
Convey("TxUpFlowState", t, func(ctx C) {
_, err := d.TxUpFlowState(tx, 0, 23333)
if err != nil {
tx.Rollback()
} else {
tx.Commit()
}
So(err, ShouldBeNil)
})
}
func TestDao_CheckFlowGroupID(t *testing.T) {
var (
c = context.Background()
)
Convey("CheckFlowGroupID", t, func(ctx C) {
_, err := d.CheckFlowGroupID(c, 0, 23333, 24)
So(err, ShouldBeNil)
})
}
func TestDao_FlowPage(t *testing.T) {
var (
c = context.Background()
)
Convey("FlowPage", t, func(ctx C) {
_, err := d.FlowPage(c, 0, 23333, 1, 10)
So(err, ShouldBeNil)
})
}
func TestDao_TxAddFlowLog(t *testing.T) {
var (
c = context.Background()
tx, _ = d.BeginTran(c)
)
Convey("TxAddFlowLog", t, func(ctx C) {
_, err := d.TxAddFlowLog(tx, 0, 123, 0, 24, "")
if err != nil {
tx.Rollback()
} else {
tx.Commit()
}
So(err, ShouldBeNil)
})
}
func TestArchiveFlows(t *testing.T) {
var (
c = context.Background()
)
Convey("Flows", t, func(ctx C) {
_, err := d.Flows(c)
ctx.Convey("Then err should be nil.fs should not be nil.", func(ctx C) {
ctx.So(err, ShouldBeNil)
})
})
}
func TestFlowsByOID(t *testing.T) {
var (
c = context.Background()
)
Convey("FlowsByOID", t, func(ctx C) {
_, err := d.FlowsByOID(c, 2880441)
ctx.Convey("Then err should be nil.fs should not be nil.", func(ctx C) {
ctx.So(err, ShouldBeNil)
})
})
}
func TestFlowUnique(t *testing.T) {
var (
c = context.Background()
)
Convey("FlowUnique", t, func(ctx C) {
_, err := d.FlowUnique(c, 2880441, 11, 0)
ctx.Convey("Then err should be nil.fs should not be nil.", func(ctx C) {
ctx.So(err, ShouldBeNil)
})
})
}
func TestFlowGroupPools(t *testing.T) {
var (
c = context.Background()
)
Convey("FlowGroupPools", t, func(ctx C) {
_, err := d.FlowGroupPools(c, []int64{0})
ctx.Convey("Then err should be nil.fs should not be nil.", func(ctx C) {
ctx.So(err, ShouldBeNil)
})
})
}
func TestFlowPage(t *testing.T) {
var (
c = context.Background()
)
Convey("FlowPage", t, func(ctx C) {
_, err := d.FlowPage(c, 0, 11, 1, 10)
ctx.Convey("Then err should be nil.fs should not be nil.", func(ctx C) {
ctx.So(err, ShouldBeNil)
})
})
}
func TestFindGroupIDByScope(t *testing.T) {
var (
c = context.Background()
)
Convey("FindGroupIDByScope", t, func(ctx C) {
_, err := d.FindGroupIDByScope(c, 0, 11, 1, 1)
ctx.Convey("Then err should be nil.fs should not be nil.", func(ctx C) {
ctx.So(err, ShouldBeNil)
})
})
}
func TestArchiveWhiteMids(t *testing.T) {
var (
c = context.Background()
)
Convey("WhiteMids", t, func(ctx C) {
_, err := d.WhiteMids(c)
ctx.Convey("Then err should be nil.mids should not be nil.", func(ctx C) {
ctx.So(err, ShouldBeNil)
})
})
}
func TestArchiveCheckActGroupID(t *testing.T) {
var (
c = context.Background()
groupID = int64(1)
)
Convey("CheckActGroupID", t, func(ctx C) {
_, err := d.CheckActGroupID(c, groupID)
ctx.Convey("Then err should be nil.state should not be nil.", func(ctx C) {
ctx.So(err, ShouldBeNil)
})
})
}
func TestArchiveCheckFlowMid(t *testing.T) {
var (
c = context.Background()
pool = int8(1)
oid = int64(2)
)
Convey("CheckFlowMid", t, func(ctx C) {
_, err := d.CheckFlowMid(c, pool, oid)
ctx.Convey("Then err should be nil.flows should not be nil.", func(ctx C) {
ctx.So(err, ShouldBeNil)
})
})
}
func TestArchiveForbidMids(t *testing.T) {
var (
c = context.Background()
)
Convey("ForbidMids", t, func(ctx C) {
_, err := d.ForbidMids(c)
ctx.Convey("Then err should be nil.mids should not be nil.", func(ctx C) {
ctx.So(err, ShouldBeNil)
})
})
}
func TestArchiveAppFeedAids(t *testing.T) {
var (
c = context.Background()
startTime = time.Now()
endTime = time.Now()
)
Convey("AppFeedAids", t, func(ctx C) {
_, err := d.AppFeedAids(c, startTime, endTime)
ctx.Convey("Then err should be nil.aids should not be nil.", func(ctx C) {
ctx.So(err, ShouldBeNil)
})
})
}

View File

@@ -0,0 +1,35 @@
package archive
import (
"go-common/app/service/main/videoup/model/archive"
xsql "go-common/library/database/sql"
"go-common/library/log"
)
const (
_upFlowIDSQL = "INSERT INTO archive_forbid (aid,on_flow_id) VALUES (?,?) ON DUPLICATE KEY UPDATE on_flow_id=?"
_inForbidSQL = `INSERT INTO archive_forbid (aid,rank_attr,recommend_attr,dynamic_attr,show_attr) VALUES (?,?,?,?,?) ON DUPLICATE KEY UPDATE
rank_attr=?,recommend_attr=?,dynamic_attr=?,show_attr=?`
)
// TxUpForbid update archive addit.
func (d *Dao) TxUpForbid(tx *xsql.Tx, aid, flowID int64) (rows int64, err error) {
res, err := tx.Exec(_upFlowIDSQL, aid, flowID, flowID)
if err != nil {
log.Error("d.upFlowID.Exec() error(%v)", err)
return
}
rows, err = res.RowsAffected()
return
}
// TxUpForbidAttr insert archive forbid.
func (d *Dao) TxUpForbidAttr(tx *xsql.Tx, af *archive.ForbidAttr) (rows int64, err error) {
res, err := tx.Exec(_inForbidSQL, af.Aid, af.RankV, af.RecommendV, af.DynamicV, af.ShowV, af.RankV, af.RecommendV, af.DynamicV, af.ShowV)
if err != nil {
log.Error("d.inForbid.Exec error(%v)", err)
return
}
rows, err = res.RowsAffected()
return
}

View File

@@ -0,0 +1,131 @@
package archive
import (
"context"
"fmt"
"strconv"
"strings"
"time"
"go-common/app/service/main/videoup/model/archive"
"go-common/library/database/sql"
"go-common/library/log"
)
const (
_inArcHistorySQL = "INSERT INTO archive_edit_history(aid,mid,title,content,cover,tag) VALUE(?,?,?,?,?,?)"
_inVideoHistorySQL = "INSERT INTO archive_video_edit_history(aid,cid,hid,eptitle,description,filename) VALUE (?,?,?,?,?,?)"
_upVideoHistorySQL = "UPDATE archive_video_edit_history SET cid=? WHERE aid=? AND filename=? AND cid = 0"
_arcHistorySQL = "SELECT id,mid,aid,title,content,cover,tag,ctime FROM archive_edit_history WHERE id =?"
_arcHistorysSQL = "SELECT id,mid,aid,title,content,cover,tag,ctime FROM archive_edit_history WHERE aid =? and ctime >=? ORDER BY id DESC"
_videoHistorySQL = "SELECT cid,eptitle,description,filename FROM archive_video_edit_history WHERE hid =? ORDER BY id ASC"
_inVideoHistorysSQL = "INSERT INTO archive_video_edit_history(aid,cid,hid,eptitle,description,filename) VALUES %s"
)
// TxAddArcHistory insert archive_edit_history.
func (d *Dao) TxAddArcHistory(tx *sql.Tx, aid, mid int64, title, content, cover, tag string) (hid int64, err error) {
res, err := tx.Exec(_inArcHistorySQL, aid, mid, title, content, cover, tag)
if err != nil {
log.Error("d.inArcHistory.Exec() error(%v)", err)
return
}
hid, err = res.LastInsertId()
return
}
// TxAddVideoHistory insert archive_video_edit_history.
func (d *Dao) TxAddVideoHistory(tx *sql.Tx, hid int64, v *archive.Video) (rows int64, err error) {
res, err := tx.Exec(_inVideoHistorySQL, v.Aid, v.Cid, hid, v.Title, v.Desc, v.Filename)
if err != nil {
log.Error("d.inVideoHistory.Exec() error(%v)", err)
return
}
rows, err = res.RowsAffected()
return
}
// TxUpVideoHistory update cid to archive_video_edit_history
func (d *Dao) TxUpVideoHistory(tx *sql.Tx, aid, cid int64, filename string) (rows int64, err error) {
res, err := tx.Exec(_upVideoHistorySQL, cid, aid, filename)
if err != nil {
log.Error("d.upVideoHistory.Exec() error(%v)", err)
return
}
rows, err = res.RowsAffected()
return
}
// ArcHistory select archive edit history by hid.
func (d *Dao) ArcHistory(c context.Context, hid int64) (ah *archive.ArcHistory, err error) {
row := d.rddb.QueryRow(c, _arcHistorySQL, hid)
ah = &archive.ArcHistory{}
if err = row.Scan(&ah.ID, &ah.Mid, &ah.Aid, &ah.Title, &ah.Content, &ah.Cover, &ah.Tag, &ah.CTime); err != nil {
if err == sql.ErrNoRows {
err = nil
} else {
log.Error("row.Scan error(%v)", err)
}
}
return
}
// ArcHistorys select archive edit history by aid.
func (d *Dao) ArcHistorys(c context.Context, aid int64, stime time.Time) (ahs []*archive.ArcHistory, err error) {
rows, err := d.rddb.Query(c, _arcHistorysSQL, aid, stime)
if err != nil {
log.Error("d.arcHissStmt.Query(%d) error(%v)", aid, err)
return
}
defer rows.Close()
for rows.Next() {
ah := &archive.ArcHistory{}
if err = rows.Scan(&ah.ID, &ah.Mid, &ah.Aid, &ah.Title, &ah.Content, &ah.Cover, &ah.Tag, &ah.CTime); err != nil {
log.Error("rows.Scan error(%v)", err)
return
}
ahs = append(ahs, ah)
}
return
}
// VideoHistory select archive video edit history by hid.
func (d *Dao) VideoHistory(c context.Context, hid int64) (vhs []*archive.VideoHistory, err error) {
rows, err := d.rddb.Query(c, _videoHistorySQL, hid)
if err != nil {
log.Error("d.videoHisStmt.Query(%d) error(%v)", hid, err)
return
}
defer rows.Close()
for rows.Next() {
vh := &archive.VideoHistory{}
if err = rows.Scan(&vh.Cid, &vh.Title, &vh.Desc, &vh.Filename); err != nil {
log.Error("rows.Scan error(%v)", err)
return
}
vhs = append(vhs, vh)
}
return
}
// TxAddVideoHistorys batch add archive_video_history.
func (d *Dao) TxAddVideoHistorys(tx *sql.Tx, hid int64, vs []*archive.Video) (err error) {
log.Info("info TxAddVideoHistorys: hid(%d)|vs(%+v)|cntVs(%d)", hid, vs, len(vs))
l := len(vs)
vStrs := make([]string, 0, l)
vArgs := make([]interface{}, 0, l*6)
for _, v := range vs {
vStrs = append(vStrs, "(?, ?, ?, ?, ?, ?)")
vArgs = append(vArgs, strconv.FormatInt(v.Aid, 10))
vArgs = append(vArgs, strconv.FormatInt(v.Cid, 10))
vArgs = append(vArgs, strconv.FormatInt(hid, 10))
vArgs = append(vArgs, v.Title)
vArgs = append(vArgs, v.Desc)
vArgs = append(vArgs, v.Filename)
}
stmt := fmt.Sprintf(_inVideoHistorysSQL, strings.Join(vStrs, ","))
_, err = tx.Exec(stmt, vArgs...)
if err != nil {
log.Error("TxAddVideoHistorys: tx.Exec(vs(%+v))|hid(%d) error(%v)", vs, hid, err)
}
return
}

View File

@@ -0,0 +1,124 @@
package archive
import (
"context"
"testing"
"time"
. "github.com/smartystreets/goconvey/convey"
"go-common/app/service/main/videoup/model/archive"
)
func TestDao_TxAddArcHistory(t *testing.T) {
var (
c = context.Background()
tx, _ = d.BeginTran(c)
)
Convey("TxAddArcHistory", t, func(ctx C) {
_, err := d.TxAddArcHistory(tx, 23333, 123, "ssss", "content", "", "")
if err != nil {
tx.Rollback()
} else {
tx.Commit()
}
So(err, ShouldBeNil)
})
}
func TestDao_TxAddVideoHistory(t *testing.T) {
var (
c = context.Background()
tx, _ = d.BeginTran(c)
v = &archive.Video{
Aid: 23333,
Cid: 12121,
Title: "sssss",
}
)
Convey("TxAddVideoHistory", t, func(ctx C) {
_, err := d.TxAddVideoHistory(tx, 23333, v)
if err != nil {
tx.Rollback()
} else {
tx.Commit()
}
So(err, ShouldBeNil)
})
}
func TestDao_TxUpVideoHistory(t *testing.T) {
var (
c = context.Background()
tx, _ = d.BeginTran(c)
)
Convey("TxUpVideoHistory", t, func(ctx C) {
_, err := d.TxUpVideoHistory(tx, 23333, 1212, "")
if err != nil {
tx.Rollback()
} else {
tx.Commit()
}
So(err, ShouldBeNil)
})
}
func TestDao_TxAddVideoHistorys(t *testing.T) {
var (
c = context.Background()
tx, _ = d.BeginTran(c)
vs = []*archive.Video{{
Aid: 23333,
Cid: 12121,
Title: "sssss",
}}
)
Convey("TxAddVideoHistorys", t, func(ctx C) {
err := d.TxAddVideoHistorys(tx, 23333, vs)
if err != nil {
tx.Rollback()
} else {
tx.Commit()
}
So(err, ShouldBeNil)
})
}
func TestArchiveArcHistory(t *testing.T) {
var (
c = context.Background()
hid = int64(23333)
)
Convey("ArcHistory", t, func(ctx C) {
_, err := d.ArcHistory(c, hid)
ctx.Convey("Then err should be nil.ah should not be nil.", func(ctx C) {
ctx.So(err, ShouldBeNil)
})
})
}
func TestArchiveArcHistorys(t *testing.T) {
var (
c = context.Background()
aid = int64(23333)
stime = time.Now()
)
Convey("ArcHistorys", t, func(ctx C) {
_, err := d.ArcHistorys(c, aid, stime)
ctx.Convey("Then err should be nil.ahs should not be nil.", func(ctx C) {
ctx.So(err, ShouldBeNil)
})
})
}
func TestArchiveVideoHistory(t *testing.T) {
var (
c = context.Background()
hid = int64(23333)
)
Convey("VideoHistory", t, func(ctx C) {
_, err := d.VideoHistory(c, hid)
ctx.Convey("Then err should be nil.vhs should not be nil.", func(ctx C) {
ctx.So(err, ShouldBeNil)
})
})
}

View File

@@ -0,0 +1,88 @@
// Code generated by $GOPATH/src/go-common/app/tool/cache/mc. DO NOT EDIT.
/*
Package archive is a generated mc cache package.
It is generated from:
type _mc interface {
// mc: -key=staffKey
CacheStaffData(c context.Context, key int64) ([]*archive.Staff, error)
// 这里也支持自定义注释 会替换默认的注释
// mc: -key=staffKey -expire=3 -encode=json|gzip
AddCacheStaffData(c context.Context, key int64, value []*archive.Staff) error
// mc: -key=staffKey
DelCacheStaffData(c context.Context, key int64) error
}
*/
package archive
import (
"context"
"fmt"
"go-common/app/service/main/videoup/model/archive"
"go-common/library/cache/memcache"
"go-common/library/log"
"go-common/library/stat/prom"
)
var _ _mc
// CacheStaffData get data from mc
func (d *Dao) CacheStaffData(c context.Context, id int64) (res []*archive.Staff, err error) {
conn := d.mc.Get(c)
defer conn.Close()
key := staffKey(id)
reply, err := conn.Get(key)
if err != nil {
if err == memcache.ErrNotFound {
err = nil
return
}
prom.BusinessErrCount.Incr("mc:CacheStaffData")
log.Errorv(c, log.KV("CacheStaffData", fmt.Sprintf("%+v", err)), log.KV("key", key))
return
}
res = []*archive.Staff{}
err = conn.Scan(reply, &res)
if err != nil {
prom.BusinessErrCount.Incr("mc:CacheStaffData")
log.Errorv(c, log.KV("CacheStaffData", fmt.Sprintf("%+v", err)), log.KV("key", key))
return
}
return
}
// AddCacheStaffData 这里也支持自定义注释 会替换默认的注释
func (d *Dao) AddCacheStaffData(c context.Context, id int64, val []*archive.Staff) (err error) {
if len(val) == 0 {
return
}
conn := d.mc.Get(c)
defer conn.Close()
key := staffKey(id)
item := &memcache.Item{Key: key, Object: val, Expiration: 3, Flags: memcache.FlagJSON | memcache.FlagGzip}
if err = conn.Set(item); err != nil {
prom.BusinessErrCount.Incr("mc:AddCacheStaffData")
log.Errorv(c, log.KV("AddCacheStaffData", fmt.Sprintf("%+v", err)), log.KV("key", key))
return
}
return
}
// DelCacheStaffData delete data from mc
func (d *Dao) DelCacheStaffData(c context.Context, id int64) (err error) {
conn := d.mc.Get(c)
defer conn.Close()
key := staffKey(id)
if err = conn.Delete(key); err != nil {
if err == memcache.ErrNotFound {
err = nil
return
}
prom.BusinessErrCount.Incr("mc:DelCacheStaffData")
log.Errorv(c, log.KV("DelCacheStaffData", fmt.Sprintf("%+v", err)), log.KV("key", key))
return
}
return
}

View File

@@ -0,0 +1,20 @@
package archive
import (
"context"
"go-common/library/log"
)
const (
_inNetsafeSQL = "INSERT INTO netsafe (nid,md5) VALUES (?,?)"
)
// AddNetSafeMd5 fn
func (d *Dao) AddNetSafeMd5(c context.Context, nid int64, md5 string) (rows int64, err error) {
res, err := d.db.Exec(c, _inNetsafeSQL, nid, md5)
if err != nil {
log.Error("_inNetsafeSQL.Exec error(%v)", err)
return
}
return res.RowsAffected()
}

View File

@@ -0,0 +1,17 @@
package archive
import (
"context"
. "github.com/smartystreets/goconvey/convey"
"testing"
)
func TestDao_AddNetSafeMd5(t *testing.T) {
var (
c = context.Background()
)
Convey("AddNetSafeMd5", t, func(ctx C) {
_, err := d.AddNetSafeMd5(c, 23333, "ssadasdasdasd")
So(err, ShouldBeNil)
})
}

View File

@@ -0,0 +1,491 @@
package archive
import (
"bytes"
"context"
"fmt"
"time"
"go-common/app/service/main/videoup/model/archive"
"go-common/library/database/sql"
"go-common/library/log"
"go-common/library/xstr"
farm "github.com/dgryski/go-farm"
)
const (
_inVideoCidSQL = `INSERT IGNORE INTO video (id,filename,src_type,resolutions,playurl,status,xcode_state,duration,filesize,attribute,failcode,hash64)
VALUES (?,?,?,?,?,?,?,?,?,?,?,?)`
_inNewVideoSQL = `INSERT INTO video (filename,src_type,resolutions,playurl,status,xcode_state,duration,filesize,attribute,failcode,hash64)
VALUES (?,?,?,?,?,?,?,?,?,?,?)`
_inVideoRelationSQL = "INSERT IGNORE INTO archive_video_relation (id,aid,cid,title,description,index_order,ctime) VALUES (?,?,?,?,?,?,?)"
_upVideoRelationSQL = "UPDATE archive_video_relation SET title=?,description=?,index_order=? ,state=? WHERE aid=? and cid=?"
_upRelationStateSQL = "UPDATE archive_video_relation SET state=? WHERE aid=? AND cid=?"
_upVideoStatusSQL = "UPDATE video SET status=? WHERE id=?"
_upNewVideoSQL = "UPDATE video SET src_type=?,status=?,xcode_state=? WHERE id=?"
_newVideoFnSQL = "SELECT id,filename,src_type,resolutions,playurl,status,xcode_state,duration,filesize,attribute,failcode,ctime,mtime,dimensions FROM video WHERE hash64=? AND filename=?"
_newVideoByFnSQL = `SELECT avr.id,v.filename,avr.cid,avr.aid,avr.title,avr.description,v.src_type,v.duration,v.filesize,v.resolutions,v.playurl,v.failcode,
avr.index_order,v.attribute,v.xcode_state,avr.state,avr.ctime,avr.mtime,v.dimensions FROM archive_video_relation avr JOIN video v on avr.cid = v.id
WHERE hash64=? AND filename=?`
_newVideoDataCidsFnSQL = "SELECT id,filename FROM video WHERE hash64 in (%s) AND filename in (%s)"
_newsimpleArcVideoSQL = `SELECT cid,title,index_order,state,mtime FROM archive_video_relation WHERE aid=?`
_newVideosSQL = `SELECT avr.id,v.filename,avr.cid,avr.aid,avr.title,avr.description,v.src_type,v.duration,v.filesize,v.resolutions,v.playurl,v.failcode,
avr.index_order,v.attribute,v.xcode_state,avr.state,v.status,avr.ctime,avr.mtime,v.dimensions FROM archive_video_relation avr JOIN video v on avr.cid = v.id
WHERE aid=? ORDER BY index_order`
_newvideoCidSQL = `SELECT avr.id,v.filename,avr.cid,avr.aid,avr.title,avr.description,v.src_type,v.duration,v.filesize,v.resolutions,v.playurl,v.failcode,
avr.index_order,v.attribute,v.xcode_state,avr.state,v.status,avr.ctime,avr.mtime,v.dimensions FROM archive_video_relation avr JOIN video v on avr.cid = v.id
WHERE cid=? ORDER BY id LIMIT 1`
_newVideosCidSQL = `SELECT avr.id,v.filename,avr.cid,avr.aid,avr.title,avr.description,v.src_type,v.duration,v.filesize,v.resolutions,v.playurl,v.failcode,
avr.index_order,v.attribute,v.xcode_state,avr.state,v.status,avr.ctime,avr.mtime,v.dimensions FROM archive_video_relation avr JOIN video v on avr.cid = v.id
WHERE cid IN (%s)`
_newVideosFnSQL = `SELECT avr.id,v.filename,avr.cid,avr.aid,avr.title,avr.description,v.src_type,v.duration,v.filesize,v.resolutions,v.playurl,v.failcode,
avr.index_order,v.attribute,v.xcode_state,avr.state,v.status,avr.ctime,avr.mtime,v.dimensions FROM archive_video_relation avr JOIN video v on avr.cid = v.id
WHERE hash64 in (%s) AND filename in (%s)`
_newVidReasonSQL = `SELECT ava.vid,ava.reason FROM archive_video_audit ava LEFT JOIN archive_video_relation avr ON ava.vid=avr.id WHERE ava.aid=? AND avr.state!=-100`
_newVideosTimeoutSQL = `SELECT id ,filename,ctime,mtime from video WHERE hash64 in (%s) AND filename in (%s)`
)
// TxAddVideoCid insert video to get cid.
func (d *Dao) TxAddVideoCid(tx *sql.Tx, v *archive.Video) (cid int64, err error) {
hash64 := int64(farm.Hash64([]byte(v.Filename)))
res, err := tx.Exec(_inVideoCidSQL, v.Cid, v.Filename, v.SrcType, v.Resolutions, v.Playurl, v.Status, v.XcodeState, v.Duration, v.Filesize, v.Attribute, v.FailCode, hash64)
if err != nil {
log.Error("d.inVideoCid.Exec error(%v)", err)
return
}
cid, err = res.LastInsertId()
return
}
// AddNewVideo insert new video.
func (d *Dao) AddNewVideo(c context.Context, v *archive.Video) (cid int64, err error) {
hash64 := int64(farm.Hash64([]byte(v.Filename)))
res, err := d.db.Exec(c, _inNewVideoSQL, v.Filename, v.SrcType, v.Resolutions, v.Playurl, v.Status, v.XcodeState, v.Duration, v.Filesize, v.Attribute, v.FailCode, hash64)
if err != nil {
log.Error("d.inNewVideo.Exec error(%v)", err)
return
}
cid, err = res.LastInsertId()
return
}
// TxAddNewVideo insert new video.
func (d *Dao) TxAddNewVideo(tx *sql.Tx, v *archive.Video) (cid int64, err error) {
hash64 := int64(farm.Hash64([]byte(v.Filename)))
res, err := tx.Exec(_inNewVideoSQL, v.Filename, v.SrcType, v.Resolutions, v.Playurl, v.Status, v.XcodeState, v.Duration, v.Filesize, v.Attribute, v.FailCode, hash64)
if err != nil {
log.Error("tx.inNewVideo.Exec error(%v)", err)
return
}
cid, err = res.LastInsertId()
return
}
// TxAddVideoRelation insert archive_video_relation to get vid.
func (d *Dao) TxAddVideoRelation(tx *sql.Tx, v *archive.Video) (vid int64, err error) {
res, err := tx.Exec(_inVideoRelationSQL, v.ID, v.Aid, v.Cid, v.Title, v.Desc, v.Index, v.CTime)
if err != nil {
log.Error("d.inVideoRelation.Exec error(%v)", err)
return
}
vid, err = res.LastInsertId()
return
}
// TxUpVideoRelation update archive_video_relation info by aid and cid.
func (d *Dao) TxUpVideoRelation(tx *sql.Tx, v *archive.Video) (rows int64, err error) {
res, err := tx.Exec(_upVideoRelationSQL, v.Title, v.Desc, v.Index, archive.VideoStatusOpen, v.Aid, v.Cid)
if err != nil {
log.Error("d.upVideoRelation.Exec(%v) error(%v)", v, err)
return
}
rows, err = res.RowsAffected()
return
}
// TxUpRelationState update archive_video_relation state by aid and cid.
func (d *Dao) TxUpRelationState(tx *sql.Tx, aid, cid int64, state int16) (rows int64, err error) {
res, err := tx.Exec(_upRelationStateSQL, state, aid, cid)
if err != nil {
log.Error("d.upRelationState.Exec(%d,%d,%d) error(%v)", aid, cid, state, err)
return
}
rows, err = res.RowsAffected()
return
}
// TxUpVdoStatus update video state by cid.
func (d *Dao) TxUpVdoStatus(tx *sql.Tx, cid int64, status int16) (rows int64, err error) {
res, err := tx.Exec(_upVideoStatusSQL, status, cid)
if err != nil {
log.Error("d.upVideoStatus.Exec(%d,%d) error(%v)", cid, status, err)
return
}
rows, err = res.RowsAffected()
return
}
// TxUpNewVideo update video SrcType\Status\XcodeState by cid.
func (d *Dao) TxUpNewVideo(tx *sql.Tx, v *archive.Video) (rows int64, err error) {
res, err := tx.Exec(_upNewVideoSQL, v.SrcType, v.Status, v.XcodeState, v.Cid)
if err != nil {
log.Error("d.upSimNewVideo.Exec(%s,%d,%d,%d) error(%v)", v.SrcType, v.Status, v.XcodeState, v.Cid, err)
return
}
rows, err = res.RowsAffected()
return
}
// NewVideoFn get video by filename
func (d *Dao) NewVideoFn(c context.Context, filename string) (v *archive.Video, err error) {
hash64 := int64(farm.Hash64([]byte(filename)))
row := d.rddb.QueryRow(c, _newVideoFnSQL, hash64, filename)
v = &archive.Video{}
var dimStr string
if err = row.Scan(&v.Cid, &v.Filename, &v.SrcType, &v.Resolutions, &v.Playurl, &v.Status, &v.XcodeState, &v.Duration, &v.Filesize, &v.Attribute, &v.FailCode, &v.CTime, &v.MTime, &dimStr); err != nil {
if err == sql.ErrNoRows {
err = nil
v = nil
} else {
log.Error("row.Scan error(%v)", err)
}
return
}
v.Dimension, _ = d.parseDimensions(dimStr)
return
}
// NewVideoByFn get video by filename
func (d *Dao) NewVideoByFn(c context.Context, filename string) (v *archive.Video, err error) {
hash64 := int64(farm.Hash64([]byte(filename)))
row := d.rddb.QueryRow(c, _newVideoByFnSQL, hash64, filename)
v = &archive.Video{}
var dimStr string
if err = row.Scan(&v.ID, &v.Filename, &v.Cid, &v.Aid, &v.Title, &v.Desc, &v.SrcType, &v.Duration, &v.Filesize, &v.Resolutions,
&v.Playurl, &v.FailCode, &v.Index, &v.Attribute, &v.XcodeState, &v.Status, &v.CTime, &v.MTime, &dimStr); err != nil {
if err == sql.ErrNoRows {
v = nil
err = nil
} else {
log.Error("row.Scan error(%v)", err)
}
return
}
v.Dimension, _ = d.parseDimensions(dimStr)
return
}
// NewCidsByFns get cids map in batches by filenames and hash64s.
func (d *Dao) NewCidsByFns(c context.Context, nvs []*archive.Video) (cids map[string]int64, err error) {
var (
buf bytes.Buffer
hash64s []int64
)
for _, v := range nvs {
buf.WriteByte('\'')
buf.WriteString(v.Filename)
buf.WriteString("',")
hash64s = append(hash64s, int64(farm.Hash64([]byte(v.Filename))))
}
buf.Truncate(buf.Len() - 1)
rows, err := d.rddb.Query(c, fmt.Sprintf(_newVideoDataCidsFnSQL, xstr.JoinInts(hash64s), buf.String()))
if err != nil {
log.Error("db.Query() error(%v)", err)
return
}
defer rows.Close()
cids = make(map[string]int64)
for rows.Next() {
var (
cid int64
filename string
)
if err = rows.Scan(&cid, &filename); err != nil {
log.Error("rows.Scan error(%v)", err)
return
}
cids[filename] = cid
}
return
}
// SimpleArcVideos get simple videos from avr
func (d *Dao) SimpleArcVideos(c context.Context, aid int64) (vs []*archive.SimpleVideo, err error) {
rows, err := d.rddb.Query(c, _newsimpleArcVideoSQL, aid)
if err != nil {
log.Error("d.videosStmt.Query(%d) error(%v)", aid, err)
return
}
defer rows.Close()
for rows.Next() {
v := &archive.SimpleVideo{}
if err = rows.Scan(&v.Cid, &v.Title, &v.Index, &v.Status, &v.MTime); err != nil {
log.Error("rows.Scan error(%v)", err)
return
}
vs = append(vs, v)
}
return
}
// NewVideos get videos info by aid.
func (d *Dao) NewVideos(c context.Context, aid int64) (vs []*archive.Video, err error) {
rows, err := d.rddb.Query(c, _newVideosSQL, aid)
if err != nil {
log.Error("d.videosStmt.Query(%d) error(%v)", aid, err)
return
}
defer rows.Close()
for rows.Next() {
v := &archive.Video{}
var (
avrState, vState int16
dimStr string
)
if err = rows.Scan(&v.ID, &v.Filename, &v.Cid, &v.Aid, &v.Title, &v.Desc, &v.SrcType, &v.Duration, &v.Filesize, &v.Resolutions,
&v.Playurl, &v.FailCode, &v.Index, &v.Attribute, &v.XcodeState, &avrState, &vState, &v.CTime, &v.MTime, &dimStr); err != nil {
log.Error("rows.Scan error(%v)", err)
return
}
v.Dimension, _ = d.parseDimensions(dimStr)
// 2 state map to 1
if avrState == archive.VideoStatusDelete {
v.Status = archive.VideoStatusDelete
} else {
v.Status = vState
}
vs = append(vs, v)
}
return
}
// NewVideoMap get video map info by aid.
func (d *Dao) NewVideoMap(c context.Context, aid int64) (vm map[string]*archive.Video, cvm map[int64]*archive.Video, err error) {
rows, err := d.rddb.Query(c, _newVideosSQL, aid)
if err != nil {
log.Error("d.videosStmt.Query(%d) error(%v)", aid, err)
return
}
defer rows.Close()
vm = make(map[string]*archive.Video)
cvm = make(map[int64]*archive.Video)
for rows.Next() {
v := &archive.Video{}
var (
avrState, vState int16
dimStr string
)
if err = rows.Scan(&v.ID, &v.Filename, &v.Cid, &v.Aid, &v.Title, &v.Desc, &v.SrcType, &v.Duration, &v.Filesize, &v.Resolutions,
&v.Playurl, &v.FailCode, &v.Index, &v.Attribute, &v.XcodeState, &avrState, &vState, &v.CTime, &v.MTime, &dimStr); err != nil {
log.Error("rows.Scan error(%v)", err)
return
}
v.Dimension, _ = d.parseDimensions(dimStr)
// 2 state map to 1
if avrState == archive.VideoStatusDelete {
v.Status = archive.VideoStatusDelete
} else {
v.Status = vState
}
cvm[v.Cid] = v
vm[v.Filename] = v
}
return
}
// NewVideoByCID get video by cid.
func (d *Dao) NewVideoByCID(c context.Context, cid int64) (v *archive.Video, err error) {
row := d.rddb.QueryRow(c, _newvideoCidSQL, cid)
v = &archive.Video{}
var (
avrState, vState int16
dimStr string
)
if err = row.Scan(&v.ID, &v.Filename, &v.Cid, &v.Aid, &v.Title, &v.Desc, &v.SrcType, &v.Duration, &v.Filesize, &v.Resolutions,
&v.Playurl, &v.FailCode, &v.Index, &v.Attribute, &v.XcodeState, &avrState, &vState, &v.CTime, &v.MTime, &dimStr); err != nil {
if err == sql.ErrNoRows {
v = nil
err = nil
}
log.Error("row.Scan error(%v)", err)
return
}
v.Dimension, _ = d.parseDimensions(dimStr)
// 2 state map to 1
if avrState == archive.VideoStatusDelete {
v.Status = archive.VideoStatusDelete
} else {
v.Status = vState
}
return
}
// NewVideosByCID multi get video by cids.
func (d *Dao) NewVideosByCID(c context.Context, cids []int64) (vm map[int64]map[int64]*archive.Video, err error) {
rows, err := d.rddb.Query(c, fmt.Sprintf(_newVideosCidSQL, xstr.JoinInts(cids)))
if err != nil {
log.Error("db.Query() error(%v)", err)
return
}
defer rows.Close()
vm = make(map[int64]map[int64]*archive.Video)
for rows.Next() {
var (
avrState, vState int16
dimStr string
)
v := &archive.Video{}
if err = rows.Scan(&v.ID, &v.Filename, &v.Cid, &v.Aid, &v.Title, &v.Desc, &v.SrcType, &v.Duration, &v.Filesize, &v.Resolutions,
&v.Playurl, &v.FailCode, &v.Index, &v.Attribute, &v.XcodeState, &avrState, &vState, &v.CTime, &v.MTime, &dimStr); err != nil {
log.Error("rows.Scan error(%v)", err)
return
}
v.Dimension, _ = d.parseDimensions(dimStr)
// 2 state map to 1
if avrState == archive.VideoStatusDelete {
v.Status = archive.VideoStatusDelete
} else {
v.Status = vState
}
if vv, ok := vm[v.Aid]; !ok {
vm[v.Aid] = map[int64]*archive.Video{
v.Cid: v,
}
} else {
vv[v.Cid] = v
}
}
return
}
// NewVideosByFn multi get video by filenames.
func (d *Dao) NewVideosByFn(c context.Context, fns []string) (vm map[int64]map[string]*archive.Video, err error) {
var (
buf bytes.Buffer
hash64s []int64
)
for _, fn := range fns {
buf.WriteByte('\'')
buf.WriteString(fn)
buf.WriteString("',")
hash64s = append(hash64s, int64(farm.Hash64([]byte(fn))))
}
buf.Truncate(buf.Len() - 1)
rows, err := d.rddb.Query(c, fmt.Sprintf(_newVideosFnSQL, xstr.JoinInts(hash64s), buf.String()))
if err != nil {
log.Error("db.Query() error(%v)", err)
return
}
defer rows.Close()
vm = make(map[int64]map[string]*archive.Video)
for rows.Next() {
var (
avrState, vState int16
dimStr string
)
v := &archive.Video{}
if err = rows.Scan(&v.ID, &v.Filename, &v.Cid, &v.Aid, &v.Title, &v.Desc, &v.SrcType, &v.Duration, &v.Filesize, &v.Resolutions,
&v.Playurl, &v.FailCode, &v.Index, &v.Attribute, &v.XcodeState, &avrState, &vState, &v.CTime, &v.MTime, &dimStr); err != nil {
log.Error("rows.Scan error(%v)", err)
return
}
v.Dimension, _ = d.parseDimensions(dimStr)
// 2 state map to 1
if avrState == archive.VideoStatusDelete {
v.Status = archive.VideoStatusDelete
} else {
v.Status = vState
}
if vv, ok := vm[v.Aid]; !ok {
vm[v.Aid] = map[string]*archive.Video{
v.Filename: v,
}
} else {
vv[v.Filename] = v
}
}
return
}
// CheckNewVideosTimeout check 48 timeout by add filenames.
func (d *Dao) CheckNewVideosTimeout(c context.Context, fns []string) (has bool, filename string, err error) {
var (
buf bytes.Buffer
hash64s []int64
)
for _, fn := range fns {
buf.WriteByte('\'')
buf.WriteString(fn)
buf.WriteString("',")
hash64s = append(hash64s, int64(farm.Hash64([]byte(fn))))
}
buf.Truncate(buf.Len() - 1)
rows, err := d.rddb.Query(c, fmt.Sprintf(_newVideosTimeoutSQL, xstr.JoinInts(hash64s), buf.String()))
if err != nil {
log.Error("db.Query() error(%v)", err)
return
}
defer rows.Close()
now := time.Now().Unix()
for rows.Next() {
v := &archive.VideoFn{}
if err = rows.Scan(&v.Cid, &v.Filename, &v.CTime, &v.MTime); err != nil {
log.Error("rows.Scan error(%v)", err)
return
}
if now-v.CTime.Time().Unix() > archive.VideoFilenameTimeout {
log.Error("this video filename(%v) timeout (%+v)", v.Filename, v)
has = true
filename = v.Filename
err = nil
return
}
}
return
}
// NewVideosReason get videos audit reason.
func (d *Dao) NewVideosReason(c context.Context, aid int64) (res map[int64]string, err error) {
rows, err := d.rddb.Query(c, _newVidReasonSQL, aid)
if err != nil {
log.Error("d.vdoRsnStmt.Query(%d)|error(%v)", aid, err)
return
}
defer rows.Close()
res = make(map[int64]string)
for rows.Next() {
var (
vid int64
reason string
)
if err = rows.Scan(&vid, &reason); err != nil {
log.Error("rows.Scan error(%v)", err)
return
}
res[vid] = reason
}
return
}
// parseDimensions 解析分辨率
func (d *Dao) parseDimensions(dim string) (dimensions *archive.Dimension, err error) {
dimensions = &archive.Dimension{}
if dim == "" || dim == "0,0,0" {
return
}
dims, err := xstr.SplitInts(dim)
if err != nil {
log.Error("d.parseDimensions() xstr.SplitInts(%s) error(%v)", dim, err)
return
}
if len(dims) != 3 {
return
}
dimensions = &archive.Dimension{
Width: dims[0],
Height: dims[1],
Rotate: dims[2],
}
return
}

View File

@@ -0,0 +1,308 @@
package archive
import (
"context"
"testing"
. "github.com/smartystreets/goconvey/convey"
"go-common/app/service/main/videoup/model/archive"
)
func TestDao_TxAddVideoCid(t *testing.T) {
var (
c = context.Background()
tx, _ = d.BeginTran(c)
v = &archive.Video{
Aid: 23333,
Cid: 12121,
Title: "sssss",
}
)
Convey("TxAddVideoCid", t, func(ctx C) {
_, err := d.TxAddVideoCid(tx, v)
if err != nil {
tx.Rollback()
} else {
tx.Commit()
}
So(err, ShouldBeNil)
})
}
func TestDao_AddNewVideo(t *testing.T) {
var (
c = context.Background()
v = &archive.Video{
Aid: 23333,
Cid: 12121,
Title: "sssss",
}
)
Convey("AddNewVideo", t, func(ctx C) {
_, err := d.AddNewVideo(c, v)
So(err, ShouldBeNil)
})
}
func TestDao_TxAddNewVideo(t *testing.T) {
var (
c = context.Background()
v = &archive.Video{
Aid: 23333,
Cid: 12121,
Title: "sssss",
}
tx, _ = d.BeginTran(c)
)
Convey("TxAddNewVideo", t, func(ctx C) {
_, err := d.TxAddNewVideo(tx, v)
if err != nil {
tx.Rollback()
} else {
tx.Commit()
}
So(err, ShouldBeNil)
})
}
func TestDao_TxAddVideoRelation(t *testing.T) {
var (
c = context.Background()
v = &archive.Video{
Aid: 23333,
Cid: 12121,
Title: "sssss",
}
tx, _ = d.BeginTran(c)
)
Convey("TxAddVideoRelation", t, func(ctx C) {
_, err := d.TxAddVideoRelation(tx, v)
if err != nil {
tx.Rollback()
} else {
tx.Commit()
}
So(err, ShouldBeNil)
})
}
func TestDao_TxUpVideoRelation(t *testing.T) {
var (
c = context.Background()
v = &archive.Video{
Aid: 23333,
Cid: 12121,
Title: "sssss",
}
tx, _ = d.BeginTran(c)
)
Convey("TxUpVideoRelation", t, func(ctx C) {
_, err := d.TxUpVideoRelation(tx, v)
if err != nil {
tx.Rollback()
} else {
tx.Commit()
}
So(err, ShouldBeNil)
})
}
func TestDao_TxUpRelationState(t *testing.T) {
var (
c = context.Background()
tx, _ = d.BeginTran(c)
)
Convey("TxUpRelationState", t, func(ctx C) {
_, err := d.TxUpRelationState(tx, 23333, 1212, 0)
if err != nil {
tx.Rollback()
} else {
tx.Commit()
}
So(err, ShouldBeNil)
})
}
func TestDao_TxUpVdoStatus(t *testing.T) {
var (
c = context.Background()
tx, _ = d.BeginTran(c)
)
Convey("TxUpVdoStatus", t, func(ctx C) {
_, err := d.TxUpVdoStatus(tx, 1212, 0)
if err != nil {
tx.Rollback()
} else {
tx.Commit()
}
So(err, ShouldBeNil)
})
}
func TestDao_TxUpNewVideo(t *testing.T) {
var (
c = context.Background()
tx, _ = d.BeginTran(c)
v = &archive.Video{
Aid: 23333,
Cid: 12121,
Title: "sssss",
}
)
Convey("TxUpNewVideo", t, func(ctx C) {
_, err := d.TxUpNewVideo(tx, v)
if err != nil {
tx.Rollback()
} else {
tx.Commit()
}
So(err, ShouldBeNil)
})
}
func TestDao_NewCidsByFns(t *testing.T) {
var (
c = context.Background()
vs = []*archive.Video{{
Filename: "1212121243gf",
}}
)
Convey("NewCidsByFns", t, func(ctx C) {
_, err := d.NewCidsByFns(c, vs)
So(err, ShouldBeNil)
})
}
func TestDao_CheckNewVideosTimeout(t *testing.T) {
var (
c = context.Background()
fs = []string{"1212121243gf"}
)
Convey("CheckNewVideosTimeout", t, func(ctx C) {
_, _, err := d.CheckNewVideosTimeout(c, fs)
So(err, ShouldBeNil)
})
}
func TestDao_ParseDimensions(t *testing.T) {
Convey("CheckNewVideosTimeout", t, func(ctx C) {
_, err := d.parseDimensions("1,2,3")
So(err, ShouldBeNil)
})
}
func TestArchiveNewVideoFn(t *testing.T) {
var (
c = context.Background()
filename = "23333333333"
)
Convey("NewVideoFn", t, func(ctx C) {
_, err := d.NewVideoFn(c, filename)
ctx.Convey("Then err should be nil.v should not be nil.", func(ctx C) {
ctx.So(err, ShouldBeNil)
})
})
}
func TestArchiveNewVideoByFn(t *testing.T) {
var (
c = context.Background()
filename = "23333333333"
)
Convey("NewVideoByFn", t, func(ctx C) {
_, err := d.NewVideoByFn(c, filename)
ctx.Convey("Then err should be nil.v should not be nil.", func(ctx C) {
ctx.So(err, ShouldBeNil)
})
})
}
func TestArchiveSimpleArcVideos(t *testing.T) {
var (
c = context.Background()
aid = int64(2333333)
)
Convey("SimpleArcVideos", t, func(ctx C) {
_, err := d.SimpleArcVideos(c, aid)
ctx.Convey("Then err should be nil.vs should not be nil.", func(ctx C) {
ctx.So(err, ShouldBeNil)
})
})
}
func TestArchiveNewVideos(t *testing.T) {
var (
c = context.Background()
aid = int64(2333333)
)
Convey("NewVideos", t, func(ctx C) {
_, err := d.NewVideos(c, aid)
ctx.Convey("Then err should be nil.vs should not be nil.", func(ctx C) {
ctx.So(err, ShouldBeNil)
})
})
}
func TestArchiveNewVideoMap(t *testing.T) {
var (
c = context.Background()
aid = int64(23333)
)
Convey("NewVideoMap", t, func(ctx C) {
_, _, err := d.NewVideoMap(c, aid)
ctx.Convey("Then err should be nil.vm,cvm should not be nil.", func(ctx C) {
ctx.So(err, ShouldBeNil)
})
})
}
func TestArchiveNewVideoByCID(t *testing.T) {
var (
c = context.Background()
cid = int64(23333)
)
Convey("NewVideoByCID", t, func(ctx C) {
_, err := d.NewVideoByCID(c, cid)
ctx.Convey("Then err should be nil.v should not be nil.", func(ctx C) {
ctx.So(err, ShouldBeNil)
})
})
}
func TestArchiveNewVideosByCID(t *testing.T) {
var (
c = context.Background()
cids = []int64{23333}
)
Convey("NewVideosByCID", t, func(ctx C) {
_, err := d.NewVideosByCID(c, cids)
ctx.Convey("Then err should be nil.vm should not be nil.", func(ctx C) {
ctx.So(err, ShouldBeNil)
})
})
}
func TestArchiveNewVideosByFn(t *testing.T) {
var (
c = context.Background()
fns = []string{"23333"}
)
Convey("NewVideosByFn", t, func(ctx C) {
_, err := d.NewVideosByFn(c, fns)
ctx.Convey("Then err should be nil.vm should not be nil.", func(ctx C) {
ctx.So(err, ShouldBeNil)
})
})
}
func TestArchiveNewVideosReason(t *testing.T) {
var (
c = context.Background()
aid = int64(23333)
)
Convey("NewVideosReason", t, func(ctx C) {
_, err := d.NewVideosReason(c, aid)
ctx.Convey("Then err should be nil.res should not be nil.", func(ctx C) {
ctx.So(err, ShouldBeNil)
})
})
}

View File

@@ -0,0 +1,87 @@
package archive
import (
"context"
"go-common/app/service/main/videoup/model/archive"
"go-common/library/database/sql"
"go-common/library/log"
"time"
)
//私单业务
const (
_inPorderSQL = `INSERT INTO archive_porder (aid,industry_id,brand_id,brand_name,official,show_type,show_front,advertiser,agent,state) VALUES (?,?,?,?,?,?,1,?,?,0) ON DUPLICATE KEY UPDATE industry_id=?,brand_id=?,brand_name=?,official=?,show_type=?,advertiser=?,agent=?`
_selPorderSQL = `select industry_id,brand_id,brand_name,official,show_type,advertiser,agent,state from archive_porder where aid=?`
_pconfigSQL = `select id, type, name from porder_config where state = 0 order by rank desc,type asc`
_parcsSQL = `select aid,industry_id,brand_id,brand_name,official,show_type,advertiser,agent,state,show_front,ctime,mtime from archive_porder WHERE ctime BETWEEN ? AND ? order by id desc`
)
// TxUpPorder add or update archive_porder
func (d *Dao) TxUpPorder(tx *sql.Tx, aid int64, ap *archive.ArcParam) (rows int64, err error) {
if ap.Porder.Official == 1 {
ap.Porder.BrandName = ""
}
res, err := tx.Exec(_inPorderSQL, aid, ap.Porder.IndustryID, ap.Porder.BrandID, ap.Porder.BrandName, ap.Porder.Official, ap.Porder.ShowType, ap.Porder.Advertiser, ap.Porder.Agent, ap.Porder.IndustryID, ap.Porder.BrandID, ap.Porder.BrandName, ap.Porder.Official, ap.Porder.ShowType, ap.Porder.Advertiser, ap.Porder.Agent)
if err != nil {
log.Error("d.TxUpPorder.Exec error(%v)", err)
return
}
rows, err = res.RowsAffected()
return
}
// Porder get archive Proder
func (d *Dao) Porder(c context.Context, aid int64) (p *archive.Porder, err error) {
row := d.rddb.QueryRow(c, _selPorderSQL, aid)
p = &archive.Porder{}
if err = row.Scan(&p.IndustryID, &p.BrandID, &p.BrandName, &p.Official, &p.ShowType, &p.Advertiser, &p.Agent, &p.State); err != nil {
if err != sql.ErrNoRows {
log.Error("row.Scan error(%v)", err)
return
}
err = nil
}
return
}
// PorderCfgList fn
func (d *Dao) PorderCfgList(c context.Context) (pcfgs []*archive.Pconfig, err error) {
rows, err := d.rddb.Query(c, _pconfigSQL)
if err != nil {
log.Error("d.db.Query(%s)|error(%v)", _pconfigSQL, err)
return
}
defer rows.Close()
for rows.Next() {
cfg := &archive.Pconfig{}
if err = rows.Scan(&cfg.ID, &cfg.Tp, &cfg.Name); err != nil {
log.Error("rows.Scan error(%v)", err)
return
}
pcfgs = append(pcfgs, cfg)
}
return
}
// PorderArcList fn
func (d *Dao) PorderArcList(c context.Context, begin, end time.Time) (res []*archive.PorderArc, err error) {
res = []*archive.PorderArc{}
rows, err := d.rddb.Query(c, _parcsSQL, begin, end)
if err != nil {
log.Error("PorderArcList error(%v)", err)
return
}
defer rows.Close()
for rows.Next() {
r := &archive.PorderArc{}
if err = rows.Scan(&r.AID, &r.IndustryID, &r.BrandID, &r.BrandName, &r.Official, &r.ShowType, &r.Advertiser, &r.Agent, &r.State, &r.ShowFront, &r.Ctime, &r.Mtime); err != nil {
log.Error("row.Scan() error(%v)", err)
res = nil
return
}
res = append(res, r)
}
err = rows.Err()
return
}

View File

@@ -0,0 +1,68 @@
package archive
import (
"context"
"testing"
"time"
. "github.com/smartystreets/goconvey/convey"
"go-common/app/service/main/videoup/model/archive"
)
func TestDao_TxUpPorder(t *testing.T) {
var (
c = context.Background()
tx, _ = d.BeginTran(c)
ap = &archive.ArcParam{
Porder: &archive.Porder{},
}
)
Convey("TxUpPorder", t, func(ctx C) {
_, err := d.TxUpPorder(tx, 23333, ap)
if err != nil {
tx.Rollback()
} else {
tx.Commit()
}
So(err, ShouldBeNil)
})
}
func TestArchivePorder(t *testing.T) {
var (
c = context.Background()
aid = int64(23333)
)
Convey("Porder", t, func(ctx C) {
_, err := d.Porder(c, aid)
ctx.Convey("Then err should be nil.p should not be nil.", func(ctx C) {
ctx.So(err, ShouldBeNil)
})
})
}
func TestArchivePorderCfgList(t *testing.T) {
var (
c = context.Background()
)
Convey("PorderCfgList", t, func(ctx C) {
_, err := d.PorderCfgList(c)
ctx.Convey("Then err should be nil.pcfgs should not be nil.", func(ctx C) {
ctx.So(err, ShouldBeNil)
})
})
}
func TestArchivePorderArcList(t *testing.T) {
var (
c = context.Background()
begin = time.Now()
end = time.Now()
)
Convey("PorderArcList", t, func(ctx C) {
_, err := d.PorderArcList(c, begin, end)
ctx.Convey("Then err should be nil.res should not be nil.", func(ctx C) {
ctx.So(err, ShouldBeNil)
})
})
}

View File

@@ -0,0 +1,66 @@
package archive
import (
"context"
"fmt"
"go-common/library/cache/redis"
"go-common/library/log"
)
const (
_videoJamTime = "va_v_jam_time"
_editlock = "edit_lock_aid_%d"
)
func lockKey(aid int64) string {
return fmt.Sprintf(_editlock, aid)
}
// GetVideoJam get video traffic jam time
func (d *Dao) GetVideoJam(c context.Context) (seconds int, err error) {
var conn = d.redis.Get(c)
defer conn.Close()
if seconds, err = redis.Int(conn.Do("GET", _videoJamTime)); err != nil {
log.Error("conn.Do(GET,%s) error(%v)", _videoJamTime, err)
}
return
}
//SetNXLock redis lock.
func (d *Dao) SetNXLock(c context.Context, aid int64, times int64) (res bool, err error) {
var (
key = lockKey(aid)
conn = d.redis.Get(c)
)
defer conn.Close()
if res, err = redis.Bool(conn.Do("SETNX", key, "1")); err != nil {
if err == redis.ErrNil {
err = nil
} else {
res = true
log.Error("conn.Do(SETNX(%s)) error(%v)", key, err)
return
}
}
if res {
if _, err = redis.Bool(conn.Do("EXPIRE", key, times)); err != nil {
log.Error("conn.Do(EXPIRE, %s, %d) error(%v)", key, times, err)
return
}
}
return
}
//DelLock del lock.
func (d *Dao) DelLock(c context.Context, aid int64) (err error) {
var (
key = lockKey(aid)
conn = d.redis.Get(c)
)
defer conn.Close()
if _, err = conn.Do("DEL", key); err != nil {
log.Error("conn.Do(del,%v) err(%v)", key, err)
}
return
}

View File

@@ -0,0 +1,61 @@
package archive
import (
"context"
"testing"
"github.com/smartystreets/goconvey/convey"
)
func TestArchivelockKey(t *testing.T) {
var (
aid = int64(0)
)
convey.Convey("lockKey", t, func(ctx convey.C) {
p1 := lockKey(aid)
ctx.Convey("Then p1 should not be nil.", func(ctx convey.C) {
ctx.So(p1, convey.ShouldNotBeNil)
})
})
}
func TestArchiveGetVideoJam(t *testing.T) {
var (
c = context.Background()
)
convey.Convey("GetVideoJam", t, func(ctx convey.C) {
seconds, err := d.GetVideoJam(c)
ctx.Convey("Then err should be nil.seconds should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
ctx.So(seconds, convey.ShouldNotBeNil)
})
})
}
func TestArchiveSetNXLock(t *testing.T) {
var (
c = context.Background()
aid = int64(0)
times = int64(0)
)
convey.Convey("SetNXLock", t, func(ctx convey.C) {
res, err := d.SetNXLock(c, aid, times)
ctx.Convey("Then err should be nil.res should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
ctx.So(res, convey.ShouldNotBeNil)
})
})
}
func TestArchiveDelLock(t *testing.T) {
var (
c = context.Background()
aid = int64(0)
)
convey.Convey("DelLock", t, func(ctx convey.C) {
err := d.DelLock(c, aid)
ctx.Convey("Then err should be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
})
})
}

View File

@@ -0,0 +1,22 @@
package archive
import (
"go-common/app/service/main/videoup/model/archive"
"go-common/library/database/sql"
"go-common/library/log"
)
const (
_inRelationSQL = "INSERT INTO archive_video_relation (aid,cid,title,description,index_order) VALUES (?,?,?,?,?)"
)
// TxAddRelation insert archive_video_relation.
func (d *Dao) TxAddRelation(tx *sql.Tx, v *archive.Video) (vid int64, err error) {
res, err := tx.Exec(_inRelationSQL, v.Aid, v.Cid, v.Title, v.Desc, v.Index)
if err != nil {
log.Error("d.inRelation.Exec error(%v)", err)
return
}
vid, err = res.LastInsertId()
return
}

View File

@@ -0,0 +1,39 @@
package archive
import (
"context"
"go-common/app/service/main/videoup/model/archive"
"go-common/library/database/sql"
"go-common/library/log"
)
const (
_inArcReportSQL = "INSERT IGNORE INTO archive_report (mid,aid,type,reason,pics,ctime,mtime) VALUES(?,?,?,?,?,?,?)"
_arcReportSQL = "SELECT aid,mid,type,reason,pics,ctime,mtime FROM archive_report WHERE aid=? AND mid=? LIMIT 1"
)
// AddArcReport insert archive_report.
func (d *Dao) AddArcReport(c context.Context, aa *archive.ArcReport) (id int64, err error) {
res, err := d.db.Exec(c, _inArcReportSQL, aa.Mid, aa.Aid, aa.Type, aa.Reason, aa.Pics, aa.CTime, aa.MTime)
if err != nil {
log.Error("_inArcReport.Exec error(%v)", err)
return
}
return res.LastInsertId()
}
// ArcReport get archive_report by aid and mid.
func (d *Dao) ArcReport(c context.Context, aid, mid int64) (aa *archive.ArcReport, err error) {
row := d.rddb.QueryRow(c, _arcReportSQL, aid, mid)
aa = &archive.ArcReport{}
if err = row.Scan(&aa.Aid, &aa.Mid, &aa.Type, &aa.Reason, &aa.Pics, &aa.CTime, &aa.MTime); err != nil {
if err == sql.ErrNoRows {
err = nil
aa = nil
} else {
log.Error("row.Scan error(%v)", err)
}
}
return
}

View File

@@ -0,0 +1,121 @@
package archive
import (
"context"
"database/sql"
"fmt"
"go-common/app/service/main/videoup/model/archive"
xsql "go-common/library/database/sql"
"go-common/library/time"
"reflect"
"testing"
"github.com/bouk/monkey"
"github.com/smartystreets/goconvey/convey"
)
func TestArchiveArcReport(t *testing.T) {
var (
c = context.Background()
aid = int64(2333)
mid = int64(23333)
)
convey.Convey("ArcReport", t, func(ctx convey.C) {
_, err := d.ArcReport(c, aid, mid)
ctx.Convey("Then err should be nil.aa should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
})
})
}
func TestTxAddRelation(t *testing.T) {
var (
c = context.Background()
err error
tx, _ = d.BeginTran(c)
v = &archive.Video{
Aid: int64(10110817),
Cid: int64(10134702),
Title: "iamtitle",
Index: 1,
}
)
convey.Convey("TestTxAddRelation", t, func(ctx convey.C) {
guard := monkey.PatchInstanceMethod(reflect.TypeOf(tx),
"Exec",
func(_ *xsql.Tx, _ string, _ ...interface{}) (sql.Result, error) {
return nil, fmt.Errorf("tx.Exec error")
})
defer guard.Unpatch()
_, err = d.TxAddRelation(tx, v)
ctx.Convey("TestArchivePOIAdd.", func(ctx convey.C) {
ctx.So(err, convey.ShouldNotBeNil)
})
})
}
func TestTxUpForbid(t *testing.T) {
var (
c = context.Background()
err error
aid = int64(10110817)
fid = int64(1)
tx, _ = d.BeginTran(c)
)
convey.Convey("TestTxUpForbid", t, func(ctx convey.C) {
guard := monkey.PatchInstanceMethod(reflect.TypeOf(tx),
"Exec",
func(_ *xsql.Tx, _ string, _ ...interface{}) (sql.Result, error) {
return nil, fmt.Errorf("tx.Exec error")
})
defer guard.Unpatch()
_, err = d.TxUpForbid(tx, aid, fid)
ctx.Convey("TxUpForbid.", func(ctx convey.C) {
ctx.So(err, convey.ShouldNotBeNil)
})
})
}
func TestTxUpForbidAttr(t *testing.T) {
var (
c = context.Background()
err error
tx, _ = d.BeginTran(c)
af = &archive.ForbidAttr{}
)
convey.Convey("TestTxUpForbidAttr", t, func(ctx convey.C) {
guard := monkey.PatchInstanceMethod(reflect.TypeOf(tx),
"Exec",
func(_ *xsql.Tx, _ string, _ ...interface{}) (sql.Result, error) {
return nil, fmt.Errorf("tx.Exec error")
})
defer guard.Unpatch()
_, err = d.TxUpForbidAttr(tx, af)
ctx.Convey("TxUpForbidAttr.", func(ctx convey.C) {
ctx.So(err, convey.ShouldNotBeNil)
})
})
}
func TestTxAddDelay(t *testing.T) {
var (
c = context.Background()
err error
tx, _ = d.BeginTran(c)
aid = int64(2333)
mid = int64(23333)
state = int8(1)
tp = int8(2)
dtime time.Time
)
convey.Convey("TestTxAddDelay", t, func(ctx convey.C) {
guard := monkey.PatchInstanceMethod(reflect.TypeOf(tx),
"Exec",
func(_ *xsql.Tx, _ string, _ ...interface{}) (sql.Result, error) {
return nil, fmt.Errorf("tx.Exec error")
})
defer guard.Unpatch()
_, err = d.TxAddDelay(tx, mid, aid, state, tp, dtime)
ctx.Convey("TxAddDelay.", func(ctx convey.C) {
ctx.So(err, convey.ShouldNotBeNil)
})
})
}

View File

@@ -0,0 +1,90 @@
package archive
import (
"context"
"go-common/app/service/main/videoup/model/archive"
"go-common/library/database/sql"
"go-common/library/log"
)
const (
_upStateStaffSQL = "UPDATE archive_staff SET state =? where id=?"
_inStaffSQL = "INSERT into archive_staff(aid,mid,staff_mid,staff_title,staff_title_id,state) VALUES (?,?,?,?,?,?) ON DUPLICATE KEY UPDATE staff_title=?,staff_title_id=?,state=?"
_staffsSQL = "SELECT id,aid,mid,staff_mid,staff_title,staff_title_id,state FROM archive_staff WHERE aid=? AND state=?"
_staffByIDSQL = "SELECT id,aid,mid,staff_mid,staff_title,staff_title_id,state FROM archive_staff WHERE id=?"
_staffByAIdMIDSQL = "SELECT id,aid,mid,staff_mid,staff_title,staff_title_id,state FROM archive_staff WHERE aid=? AND staff_mid=? limit 1"
)
// TxAddStaff tx.
func (d *Dao) TxAddStaff(tx *sql.Tx, param *archive.Staff) (id int64, err error) {
res, err := tx.Exec(_inStaffSQL, param.AID, param.MID, param.StaffMID, param.StaffTitle, param.StaffTitleID, param.State, param.StaffTitle, param.StaffTitleID, param.State)
if err != nil {
log.Error("d.TxAddStaff.Exec() error(%v)", err)
return
}
id, err = res.LastInsertId()
return
}
// TxUpStaffState tx .
func (d *Dao) TxUpStaffState(tx *sql.Tx, state int8, id int64) (rows int64, err error) {
res, err := tx.Exec(_upStateStaffSQL, state, id)
if err != nil {
log.Error("d.TxUpStaffState.Exec() error(%v)", err)
return
}
rows, err = res.RowsAffected()
return
}
// Staffs get .
func (d *Dao) Staffs(c context.Context, AID int64) (fs []*archive.Staff, err error) {
rows, err := d.db.Query(c, _staffsSQL, AID, archive.STATEON)
if err != nil {
log.Error("d.db.Staffs aid(%d) error(%v)", AID, err)
return
}
defer rows.Close()
for rows.Next() {
f := &archive.Staff{}
if err = rows.Scan(&f.ID, &f.AID, &f.MID, &f.StaffMID, &f.StaffTitle, &f.StaffTitleID, &f.State); err != nil {
log.Error("rows.Scan error(%v)", err)
return
}
fs = append(fs, f)
}
return
}
// Staff get .
func (d *Dao) Staff(c context.Context, ID int64) (s *archive.Staff, err error) {
row := d.db.QueryRow(c, _staffByIDSQL, ID)
s = &archive.Staff{}
if err = row.Scan(&s.ID, &s.AID, &s.MID, &s.StaffMID, &s.StaffTitle, &s.StaffTitleID, &s.State); err != nil {
if err == sql.ErrNoRows {
s = nil
err = nil
} else {
log.Error("row.Scan error(%v)", err)
}
return
}
return
}
// StaffByAidAndMid get .
func (d *Dao) StaffByAidAndMid(c context.Context, AID, StaffMid int64) (s *archive.Staff, err error) {
row := d.db.QueryRow(c, _staffByAIdMIDSQL, AID, StaffMid)
s = &archive.Staff{}
if err = row.Scan(&s.ID, &s.AID, &s.MID, &s.StaffMID, &s.StaffTitle, &s.StaffTitleID, &s.State); err != nil {
if err == sql.ErrNoRows {
s = nil
err = nil
} else {
log.Error("row.Scan error(%v)", err)
}
return
}
return
}

View File

@@ -0,0 +1,159 @@
package archive
import (
"context"
bsql "database/sql"
"fmt"
"go-common/app/service/main/videoup/model/archive"
"go-common/library/database/sql"
"go-common/library/log"
"go-common/library/xstr"
)
const (
_inApplySQL = `INSERT INTO archive_staff_apply (type,as_id,apply_aid,apply_up_mid,apply_staff_mid,apply_title,apply_title_id,state,deal_state) VALUES (?,?,?,?,?,?,?,?,?) ON DUPLICATE KEY UPDATE type=?,apply_title=?,apply_title_id=?,state=?,deal_state=?,as_id=?`
_selApplySQL = `SELECT sa.id,sa.type,sa.as_id,sa.apply_aid,sa.apply_up_mid,sa.apply_staff_mid,sa.apply_title,sa.apply_title_id,sa.state,sa.deal_state,s.state as staff_state,s.staff_title FROM archive_staff_apply sa LEFT JOIN archive_staff s on s.id=sa.as_id where sa.id=?`
_selApplysSQL = `SELECT sa.id,sa.type,sa.as_id,sa.apply_aid,sa.apply_up_mid,sa.apply_staff_mid,sa.apply_title,sa.apply_title_id,sa.state,sa.deal_state,s.state as staff_state,s.staff_title FROM archive_staff_apply sa LEFT JOIN archive_staff s on s.id=sa.as_id where sa.id IN(%s)`
_selApplysByAIDSQL = `SELECT sa.id,sa.type,sa.as_id,sa.apply_aid,sa.apply_up_mid,sa.apply_staff_mid,sa.apply_title,sa.apply_title_id,sa.state,sa.deal_state,s.state as staff_state,s.staff_title FROM archive_staff_apply sa LEFT JOIN archive_staff s on s.id=sa.as_id where sa.apply_aid =?`
_selApplysByAIDSANDMIDSQL = `SELECT sa.id,sa.type,sa.as_id,sa.apply_aid,sa.apply_up_mid,sa.apply_staff_mid,sa.apply_title,sa.apply_title_id,sa.state,sa.deal_state,s.state as staff_state,s.staff_title FROM archive_staff_apply sa LEFT JOIN archive_staff s on s.id=sa.as_id where sa.apply_aid IN(%s) AND sa.apply_staff_mid=%d`
_selApplysByMIDSTAFFSQL = `SELECT sa.id,sa.type,sa.as_id,sa.apply_aid,sa.apply_up_mid,sa.apply_staff_mid,sa.apply_title,sa.apply_title_id,sa.state,sa.deal_state,s.state as staff_state,s.staff_title FROM archive_staff_apply sa LEFT JOIN archive_staff s on s.id=sa.as_id where sa.apply_up_mid=? AND sa.apply_staff_mid =?`
_midCountSQL = `select count(*) as count from archive_staff_apply where apply_staff_mid=?`
)
// Apply get archive Apply
func (d *Dao) Apply(c context.Context, ID int64) (p *archive.StaffApply, err error) {
row := d.rddb.QueryRow(c, _selApplySQL, ID)
p = &archive.StaffApply{}
var title bsql.NullString
var state bsql.NullInt64
if err = row.Scan(&p.ID, &p.Type, &p.ASID, &p.ApplyAID, &p.ApplyUpMID, &p.ApplyStaffMID, &p.ApplyTitle, &p.ApplyTitleID, &p.State, &p.DealState, &state, &title); err != nil {
if err == sql.ErrNoRows {
p = nil
err = nil
} else {
log.Error("row.Scan error(%v)", err)
}
return
}
p.StaffTitle = title.String
p.StaffState = int8(state.Int64)
return
}
// MidCount get
func (d *Dao) MidCount(c context.Context, ID int64) (count int64, err error) {
row := d.rddb.QueryRow(c, _midCountSQL, ID)
if err = row.Scan(&count); err != nil {
if err != sql.ErrNoRows {
log.Error("row.Scan error(%v)", err)
return
}
err = nil
}
return
}
// Applys get .
func (d *Dao) Applys(c context.Context, ids []int64) (as []*archive.StaffApply, err error) {
rows, err := d.db.Query(c, fmt.Sprintf(_selApplysSQL, xstr.JoinInts(ids)))
if err != nil {
log.Error("d.db.Applys ids(%+v) error(%v)", ids, err)
return
}
defer rows.Close()
for rows.Next() {
var title bsql.NullString
var state bsql.NullInt64
s := &archive.StaffApply{}
if err = rows.Scan(&s.ID, &s.Type, &s.ASID, &s.ApplyAID, &s.ApplyUpMID, &s.ApplyStaffMID, &s.ApplyTitle, &s.ApplyTitleID, &s.State, &s.DealState, &state, &title); err != nil {
log.Error("rows.Scan error(%v)", err)
return
}
s.StaffTitle = title.String
s.StaffState = int8(state.Int64)
as = append(as, s)
}
return
}
// FilterApplys get .
func (d *Dao) FilterApplys(c context.Context, aids []int64, mid int64) (as []*archive.StaffApply, err error) {
rows, err := d.db.Query(c, fmt.Sprintf(_selApplysByAIDSANDMIDSQL, xstr.JoinInts(aids), mid))
if err != nil {
log.Error("d.db.FilterApplys(%v,%d) error(%v)", aids, mid, err)
return
}
defer rows.Close()
for rows.Next() {
var title bsql.NullString
var state bsql.NullInt64
s := &archive.StaffApply{}
if err = rows.Scan(&s.ID, &s.Type, &s.ASID, &s.ApplyAID, &s.ApplyUpMID, &s.ApplyStaffMID, &s.ApplyTitle, &s.ApplyTitleID, &s.State, &s.DealState, &state, &title); err != nil {
log.Error("rows.Scan error(%v)", err)
return
}
s.StaffTitle = title.String
s.StaffState = int8(state.Int64)
as = append(as, s)
}
return
}
// ApplysByAID get .
func (d *Dao) ApplysByAID(c context.Context, aid int64) (as []*archive.StaffApply, err error) {
rows, err := d.db.Query(c, _selApplysByAIDSQL, aid)
if err != nil {
log.Error("d.db.ApplysByAID aid(%d) error(%v)", aid, err)
return
}
defer rows.Close()
for rows.Next() {
var title bsql.NullString
var state bsql.NullInt64
s := &archive.StaffApply{}
if err = rows.Scan(&s.ID, &s.Type, &s.ASID, &s.ApplyAID, &s.ApplyUpMID, &s.ApplyStaffMID, &s.ApplyTitle, &s.ApplyTitleID, &s.State, &s.DealState, &state, &title); err != nil {
log.Error("rows.Scan error(%v)", err)
return
}
s.StaffTitle = title.String
s.StaffState = int8(state.Int64)
as = append(as, s)
}
return
}
// ApplysByMIDAndStaff get .
func (d *Dao) ApplysByMIDAndStaff(c context.Context, upMID, staffMID int64) (as []*archive.StaffApply, err error) {
rows, err := d.db.Query(c, _selApplysByMIDSTAFFSQL, upMID, staffMID)
if err != nil {
log.Error("d.db.ApplysByAID upMID(%d) staffMID(%d) error(%v)", upMID, staffMID, err)
return
}
defer rows.Close()
for rows.Next() {
var title bsql.NullString
var state bsql.NullInt64
s := &archive.StaffApply{}
if err = rows.Scan(&s.ID, &s.Type, &s.ASID, &s.ApplyAID, &s.ApplyUpMID, &s.ApplyStaffMID, &s.ApplyTitle, &s.ApplyTitleID, &s.State, &s.DealState, &state, &title); err != nil {
log.Error("rows.Scan error(%v)", err)
return
}
s.StaffTitle = title.String
s.StaffState = int8(state.Int64)
as = append(as, s)
}
return
}
// TxAddApply tx.
func (d *Dao) TxAddApply(tx *sql.Tx, param *archive.ApplyParam) (id int64, err error) {
res, err := tx.Exec(_inApplySQL, param.Type, param.ASID, param.ApplyAID, param.ApplyUpMID, param.ApplyStaffMID, param.ApplyTitle, param.ApplyTitleID, param.State, param.DealState, param.Type, param.ApplyTitle, param.ApplyTitleID, param.State, param.DealState, param.ASID)
if err != nil {
log.Error("d.TxAddApply.Exec() error(%v)", err)
return
}
id, err = res.LastInsertId()
return
}

View File

@@ -0,0 +1,70 @@
package archive
import (
"context"
"testing"
"github.com/davecgh/go-spew/spew"
"github.com/smartystreets/goconvey/convey"
)
func TestStaffMid(t *testing.T) {
var (
c = context.Background()
)
convey.Convey("MidCount", t, func(ctx convey.C) {
count, err := d.MidCount(c, 2880441)
spew.Dump(count)
ctx.Convey("Then err should be nil.fs should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
})
})
}
func TestApply(t *testing.T) {
var (
c = context.Background()
)
convey.Convey("Apply", t, func(ctx convey.C) {
data, err := d.Apply(c, 1)
if err == nil {
spew.Dump(data)
}
ctx.Convey("Then err should be nil.fs should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
})
})
}
func TestApplys(t *testing.T) {
var (
c = context.Background()
)
convey.Convey("Applys", t, func(ctx convey.C) {
data, err := d.Applys(c, []int64{1, 11})
if err == nil {
spew.Dump(data)
}
ctx.Convey("Then err should be nil.fs should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
})
})
}
func TestFilterApplys(t *testing.T) {
var (
c = context.Background()
)
convey.Convey("FilterApplys", t, func(ctx convey.C) {
data, err := d.FilterApplys(c, []int64{23213, 4052032}, 4052032)
if err == nil {
spew.Dump(data)
}
if err != nil {
spew.Dump(err)
}
ctx.Convey("Then err should be nil.fs should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
})
})
}

View File

@@ -0,0 +1,32 @@
package archive
import (
"context"
"go-common/app/service/main/videoup/model/archive"
"go-common/library/log"
)
const (
_tpsSQL = "SELECT id,pid,name,description FROM archive_type"
)
// TypeMapping is second types opposite first types.
func (d *Dao) TypeMapping(c context.Context) (tmap map[int16]*archive.Type, err error) {
rows, err := d.rddb.Query(c, _tpsSQL)
if err != nil {
log.Error("d.tpsStmt.Query error(%v)", err)
return
}
defer rows.Close()
tmap = make(map[int16]*archive.Type)
for rows.Next() {
t := &archive.Type{}
if err = rows.Scan(&t.ID, &t.PID, &t.Name, &t.Desc); err != nil {
log.Error("rows.Scan error(%v)", err)
return
}
tmap[t.ID] = t
}
return
}

View File

@@ -0,0 +1,20 @@
package archive
import (
"context"
"testing"
"github.com/smartystreets/goconvey/convey"
)
func TestArchiveTypeMapping(t *testing.T) {
var (
c = context.Background()
)
convey.Convey("TypeMapping", t, func(ctx convey.C) {
_, err := d.TypeMapping(c)
ctx.Convey("Then err should be nil.tmap should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
})
})
}

View File

@@ -0,0 +1,104 @@
package archive
import (
"fmt"
"strings"
"go-common/app/service/main/videoup/model/archive"
"go-common/library/database/sql"
"go-common/library/log"
)
const (
// insert
_inVideoSQL = `INSERT INTO archive_video (id,aid,eptitle,description,filename,src_type,cid,index_order,attribute,duration,filesize,resolutions,playurl,failinfo,xcode_state,status)
VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)`
_inAuditsSQL = "INSERT INTO archive_video_audit (vid,aid,tid,oname,reason) VALUES %s"
// update
_upVideoSQL = `UPDATE archive_video SET eptitle=?,description=?,index_order=?,status=? WHERE id=?`
_upVdoStatusSQL = "UPDATE archive_video SET status=? WHERE aid=? AND filename=?"
_upVdoXcodeSQL = "UPDATE archive_video SET xcode_state=? WHERE aid=? AND filename=?"
_upVdoAttrSQL = "UPDATE archive_video SET attribute=? WHERE aid=? AND filename=?"
_upVdoCidSQL = "UPDATE archive_video SET cid=? WHERE aid=? AND filename=?"
)
// TxAddVideo insert archive video.
func (d *Dao) TxAddVideo(tx *sql.Tx, v *archive.Video) (id int64, err error) {
res, err := tx.Exec(_inVideoSQL, v.ID, v.Aid, v.Title, v.Desc, v.Filename, v.SrcType, v.Cid, v.Index, v.Attribute, v.Duration, v.Filesize, v.Resolutions, v.Playurl, v.FailCode, v.XcodeState, v.Status)
if err != nil {
log.Error("d.inVideo.Exec error(%v)", err)
return
}
id, err = res.LastInsertId()
return
}
// TxUpVideo update video.
func (d *Dao) TxUpVideo(tx *sql.Tx, v *archive.Video) (rows int64, err error) {
res, err := tx.Exec(_upVideoSQL, v.Title, v.Desc, v.Index, v.Status, v.ID)
if err != nil {
log.Error("d.upVideo.Exec(%v) error(%v)", v, err)
return
}
rows, err = res.RowsAffected()
return
}
// TxUpVideoStatus update video status.
func (d *Dao) TxUpVideoStatus(tx *sql.Tx, aid int64, filename string, status int16) (rows int64, err error) {
res, err := tx.Exec(_upVdoStatusSQL, status, aid, filename)
if err != nil {
log.Error("d.upVideoStatus.Exec error(%v)", err)
return
}
rows, err = res.RowsAffected()
return
}
// TxUpVideoXcode update video fail_code.
func (d *Dao) TxUpVideoXcode(tx *sql.Tx, aid int64, filename string, xCodeState int8) (rows int64, err error) {
res, err := tx.Exec(_upVdoXcodeSQL, xCodeState, aid, filename)
if err != nil {
log.Error("d.upVdoXcode.Exec error(%v)", err)
return
}
rows, err = res.RowsAffected()
return
}
// TxUpVideoAttr update video attribute.
func (d *Dao) TxUpVideoAttr(tx *sql.Tx, aid int64, filename string, attribute int32) (rows int64, err error) {
res, err := tx.Exec(_upVdoAttrSQL, attribute, aid, filename)
if err != nil {
log.Error("d.upVideoAttr.Exec error(%v)", err)
return
}
rows, err = res.RowsAffected()
return
}
// TxUpVideoCid update video attribute.
func (d *Dao) TxUpVideoCid(tx *sql.Tx, aid int64, filename string, cid int64) (rows int64, err error) {
res, err := tx.Exec(_upVdoCidSQL, cid, aid, filename)
if err != nil {
log.Error("d.upVideoCid.Exec error(%v)", err)
return
}
rows, err = res.RowsAffected()
return
}
// TxAddAudit insert video audit.
func (d *Dao) TxAddAudit(tx *sql.Tx, vs []*archive.Video) (rows int64, err error) {
var args = make([]string, 0, len(vs))
for _, v := range vs {
args = append(args, fmt.Sprintf(`(%d,%d,%d,'%s','%s')`, v.ID, v.Aid, 0, "videoup-service", ""))
}
res, err := tx.Exec(fmt.Sprintf(_inAuditsSQL, strings.Join(args, ",")))
if err != nil {
log.Error("d.inAudit.Exec error(%v)", err)
return
}
rows, err = res.RowsAffected()
return
}

View File

@@ -0,0 +1,142 @@
package archive
import (
"context"
"testing"
. "github.com/smartystreets/goconvey/convey"
"go-common/app/service/main/videoup/model/archive"
"math/rand"
"time"
)
func TestDao_TxAddVideo(t *testing.T) {
var (
c = context.Background()
tx, _ = d.BeginTran(c)
v = &archive.Video{
ID: 123,
Aid: 2333,
Title: "UT测试",
}
)
rand.Seed(time.Now().Unix())
v.ID = int64(rand.Intn(999999999) + 1000000000)
v.Aid = int64(rand.Intn(999999999) + 1000000000)
Convey("TxAddVideo", t, func(ctx C) {
_, err := d.TxAddVideo(tx, v)
if err != nil {
tx.Rollback()
} else {
tx.Commit()
}
So(err, ShouldBeNil)
})
}
func TestDao_TxUpVideo(t *testing.T) {
var (
c = context.Background()
tx, _ = d.BeginTran(c)
v = &archive.Video{
ID: 123,
Aid: 2333,
Title: "UT测试",
}
)
Convey("TxUpVideo", t, func(ctx C) {
_, err := d.TxUpVideo(tx, v)
if err != nil {
tx.Rollback()
} else {
tx.Commit()
}
So(err, ShouldBeNil)
})
}
func TestDao_TxUpVideoStatus(t *testing.T) {
var (
c = context.Background()
tx, _ = d.BeginTran(c)
)
Convey("TxUpVideoStatus", t, func(ctx C) {
_, err := d.TxUpVideoStatus(tx, 2333, "sadasdadsds", 0)
if err != nil {
tx.Rollback()
} else {
tx.Commit()
}
So(err, ShouldBeNil)
})
}
func TestDao_TxUpVideoXcode(t *testing.T) {
var (
c = context.Background()
tx, _ = d.BeginTran(c)
)
Convey("TxUpVideoXcode", t, func(ctx C) {
_, err := d.TxUpVideoXcode(tx, 2333, "sadasdadsds", 0)
if err != nil {
tx.Rollback()
} else {
tx.Commit()
}
So(err, ShouldBeNil)
})
}
func TestDao_TxUpVideoAttr(t *testing.T) {
var (
c = context.Background()
tx, _ = d.BeginTran(c)
)
Convey("TxUpVideoAttr", t, func(ctx C) {
_, err := d.TxUpVideoAttr(tx, 2333, "sadasdadsds", 0)
if err != nil {
tx.Rollback()
} else {
tx.Commit()
}
So(err, ShouldBeNil)
})
}
func TestDao_TxUpVideoCid(t *testing.T) {
var (
c = context.Background()
tx, _ = d.BeginTran(c)
)
Convey("TxUpVideoCid", t, func(ctx C) {
_, err := d.TxUpVideoCid(tx, 2333, "sadasdadsds", 1213)
if err != nil {
tx.Rollback()
} else {
tx.Commit()
}
So(err, ShouldBeNil)
})
}
func TestDao_TxAddAudit(t *testing.T) {
rand.Seed(time.Now().Unix())
vid := int64(rand.Intn(999999) + 1000000000)
var (
c = context.Background()
tx, _ = d.BeginTran(c)
vs = []*archive.Video{{
ID: vid,
Aid: 2333,
Title: "UT测试",
}}
)
Convey("TxAddAudit", t, func(ctx C) {
_, err := d.TxAddAudit(tx, vs)
if err != nil {
tx.Rollback()
} else {
tx.Commit()
}
So(err, ShouldBeNil)
})
}