Create & Init Project...

This commit is contained in:
2019-04-22 18:49:16 +08:00
commit fc4fa37393
25440 changed files with 4054998 additions and 0 deletions

View File

@@ -0,0 +1,56 @@
package(default_visibility = ["//visibility:public"])
load(
"@io_bazel_rules_go//go:def.bzl",
"go_test",
"go_library",
)
go_test(
name = "go_default_test",
srcs = [
"dao_test.go",
"mysql_test.go",
],
embed = [":go_default_library"],
rundir = ".",
tags = ["automanaged"],
deps = [
"//app/service/main/rank/conf:go_default_library",
"//library/time:go_default_library",
"//vendor/github.com/smartystreets/goconvey/convey:go_default_library",
],
)
go_library(
name = "go_default_library",
srcs = [
"dao.go",
"mysql.go",
],
importpath = "go-common/app/service/main/rank/dao",
tags = ["automanaged"],
visibility = ["//visibility:public"],
deps = [
"//app/service/main/rank/conf:go_default_library",
"//app/service/main/rank/model:go_default_library",
"//library/database/sql:go_default_library",
"//library/log:go_default_library",
"//library/time:go_default_library",
"//library/xstr:go_default_library",
],
)
filegroup(
name = "package-srcs",
srcs = glob(["**"]),
tags = ["automanaged"],
visibility = ["//visibility:private"],
)
filegroup(
name = "all-srcs",
srcs = [":package-srcs"],
tags = ["automanaged"],
visibility = ["//visibility:public"],
)

View File

@@ -0,0 +1,46 @@
package dao
import (
"context"
"go-common/app/service/main/rank/conf"
xsql "go-common/library/database/sql"
)
// Dao dao
type Dao struct {
c *conf.Config
dbArchive *xsql.DB
dbStat *xsql.DB
dbTV *xsql.DB
}
// New init mysql db
func New(c *conf.Config) (dao *Dao) {
dao = &Dao{
c: c,
dbArchive: xsql.NewMySQL(c.MySQL.BilibiliArchive),
dbStat: xsql.NewMySQL(c.MySQL.ArchiveStat),
dbTV: xsql.NewMySQL(c.MySQL.BilibiliTV),
}
return
}
// Close close the resource.
func (d *Dao) Close() {
d.dbArchive.Close()
d.dbStat.Close()
d.dbTV.Close()
}
// Ping dao ping
func (d *Dao) Ping(c context.Context) error {
var err error
if err = d.dbArchive.Ping(c); err != nil {
return err
}
if err = d.dbStat.Ping(c); err != nil {
return err
}
return d.dbTV.Ping(c)
}

View File

@@ -0,0 +1,36 @@
package dao
import (
"flag"
"os"
"testing"
"go-common/app/service/main/rank/conf"
)
var (
d *Dao
)
func TestMain(m *testing.M) {
if os.Getenv("DEPLOY_ENV") != "" {
flag.Set("app_id", "main.search.rank-service")
flag.Set("conf_token", "8da2368e2495e20a841b5125bf00b761")
flag.Set("tree_id", "56749")
flag.Set("conf_version", "server-1")
flag.Set("deploy_env", "uat")
flag.Set("conf_host", "config.bilibili.co")
flag.Set("conf_path", "/tmp")
flag.Set("region", "sh")
flag.Set("zone", "sh001")
} else {
flag.Set("conf", "../cmd/test.toml")
}
flag.Parse()
if err := conf.Init(); err != nil {
panic(err)
}
d = New(conf.Conf)
m.Run()
os.Exit(0)
}

View File

@@ -0,0 +1,191 @@
package dao
import (
"context"
"database/sql"
"fmt"
"go-common/app/service/main/rank/model"
"go-common/library/log"
xtime "go-common/library/time"
"go-common/library/xstr"
)
const (
_maxArchiveIDSQL = `SELECT MAX(id) FROM archive`
_archiveMetasSQL = `SELECT id,typeid,pubtime FROM archive WHERE id>? ORDER BY id LIMIT ?`
_archiveMetasMtimeSQL = `SELECT id,typeid,pubtime FROM archive WHERE id>? AND mtime BETWEEN ? AND ? ORDER BY mtime,id LIMIT ?`
_archiveTypesSQL = `SELECT id,pid FROM archive_type WHERE id in (%s)`
_archiveStatsSQL = `SELECT aid,click FROM archive_stat_%s WHERE aid in (%s)`
_archiveStatsMtimeSQL = `SELECT id,aid,click FROM archive_stat_%s WHERE id > ? AND mtime BETWEEN ? AND ? ORDER BY mtime,id LIMIT ?`
_archiveTVsSQL = `SELECT aid,result,deleted,valid FROM ugc_archive WHERE aid in (%s)`
_archiveTVsMtimeSQL = `SELECT id,aid,result,deleted,valid FROM ugc_archive WHERE id > ? AND mtime BETWEEN ? AND ? ORDER BY mtime,id LIMIT ?`
_archiveStatSharding = 100
)
// MaxOid .
func (d *Dao) MaxOid(c context.Context) (oid int64, err error) {
row := d.dbArchive.QueryRow(c, _maxArchiveIDSQL)
if err = row.Scan(&oid); err != nil {
if err == sql.ErrNoRows {
err = nil
} else {
log.Error("row.Scan error(%v)", err)
}
}
return
}
// ArchiveMetas .
func (d *Dao) ArchiveMetas(c context.Context, id int64, limit int) ([]*model.ArchiveMeta, error) {
rows, err := d.dbArchive.Query(c, _archiveMetasSQL, id, limit)
if err != nil {
log.Error("d.dbArchive.Query(%s,%d,%d) error()", _archiveMetasSQL, id, limit, err)
return nil, err
}
defer rows.Close()
as := make([]*model.ArchiveMeta, 0)
for rows.Next() {
a := new(model.ArchiveMeta)
if err = rows.Scan(&a.ID, &a.Typeid, &a.Pubtime); err != nil {
log.Error("rows.Scan() error(%v)", err)
return nil, err
}
as = append(as, a)
}
return as, rows.Err()
}
// ArchiveMetasIncrs .
func (d *Dao) ArchiveMetasIncrs(c context.Context, aid int64, begin, end xtime.Time, limit int) ([]*model.ArchiveMeta, error) {
rows, err := d.dbArchive.Query(c, _archiveMetasMtimeSQL, aid, begin, end, limit)
if err != nil {
log.Error("d.dbArchive.Query(%s,%d,%s,%s,%d) error()", _archiveMetasMtimeSQL, aid, begin, end, limit, err)
return nil, err
}
defer rows.Close()
as := make([]*model.ArchiveMeta, 0)
for rows.Next() {
a := new(model.ArchiveMeta)
if err = rows.Scan(&a.ID, &a.Typeid, &a.Pubtime); err != nil {
log.Error("rows.Scan() error(%v)", err)
return nil, err
}
as = append(as, a)
}
return as, rows.Err()
}
// ArchiveTypes .
func (d *Dao) ArchiveTypes(c context.Context, ids []int64) (map[int64]*model.ArchiveType, error) {
idsStr := xstr.JoinInts(ids)
rows, err := d.dbArchive.Query(c, fmt.Sprintf(_archiveTypesSQL, idsStr))
if err != nil {
log.Error("d.dbArchive.Query(%s) error(%v)", fmt.Sprintf(_archiveTypesSQL, idsStr), err)
return nil, err
}
defer rows.Close()
as := make(map[int64]*model.ArchiveType)
for rows.Next() {
a := new(model.ArchiveType)
if err = rows.Scan(&a.ID, &a.Pid); err != nil {
log.Error("rows.Scan() error(%v)", err)
return nil, err
}
as[a.ID] = a
}
return as, rows.Err()
}
// ArchiveStats .
func (d *Dao) ArchiveStats(c context.Context, aids []int64) (map[int64]*model.ArchiveStat, error) {
tableMap := make(map[int64][]int64)
for _, aid := range aids {
mod := aid % _archiveStatSharding
tableMap[mod] = append(tableMap[mod], aid)
}
as := make(map[int64]*model.ArchiveStat)
for tbl, aids := range tableMap {
aidsStr := xstr.JoinInts(aids)
rows, err := d.dbStat.Query(c, fmt.Sprintf(_archiveStatsSQL, fmt.Sprintf("%02d", tbl), aidsStr))
if err != nil {
log.Error("d.dbStat.Query(%s) error(%v)", fmt.Sprintf(_archiveStatsSQL, fmt.Sprintf("%02d", tbl), aidsStr), err)
return nil, err
}
defer rows.Close()
for rows.Next() {
a := new(model.ArchiveStat)
if err = rows.Scan(&a.Aid, &a.Click); err != nil {
log.Error("rows.Scan() error(%v)", err)
return nil, err
}
as[a.Aid] = a
}
if err = rows.Err(); err != nil {
return nil, err
}
}
return as, nil
}
// ArchiveStatsIncrs .
func (d *Dao) ArchiveStatsIncrs(c context.Context, tbl int, id int64, begin, end xtime.Time, limit int) ([]*model.ArchiveStat, error) {
rows, err := d.dbStat.Query(c, fmt.Sprintf(_archiveStatsMtimeSQL, fmt.Sprintf("%02d", tbl)), id, begin, end, limit)
if err != nil {
log.Error("d.dbStat.Query(%s,%d,%s,%s,%d) error(%v)", fmt.Sprintf(_archiveStatsMtimeSQL, fmt.Sprintf("%02d", tbl)), id, begin, end, limit, err)
return nil, err
}
defer rows.Close()
as := make([]*model.ArchiveStat, 0)
for rows.Next() {
a := new(model.ArchiveStat)
if err = rows.Scan(&a.ID, &a.Aid, &a.Click); err != nil {
log.Error("rows.Scan() error(%v)", err)
return nil, err
}
as = append(as, a)
}
return as, rows.Err()
}
// ArchiveTVs .
func (d *Dao) ArchiveTVs(c context.Context, aids []int64) (map[int64]*model.ArchiveTv, error) {
aidsStr := xstr.JoinInts(aids)
rows, err := d.dbTV.Query(c, fmt.Sprintf(_archiveTVsSQL, aidsStr))
if err != nil {
log.Error("d.dbTV.Query(%s) error(%v)", fmt.Sprintf(_archiveTVsSQL, aidsStr), err)
return nil, err
}
defer rows.Close()
as := make(map[int64]*model.ArchiveTv)
for rows.Next() {
a := new(model.ArchiveTv)
if err = rows.Scan(&a.Aid, &a.Result, &a.Deleted, &a.Valid); err != nil {
log.Error("rows.Scan() error(%v)", err)
return nil, err
}
as[a.Aid] = a
}
return as, rows.Err()
}
// ArchiveTVsIncrs .
func (d *Dao) ArchiveTVsIncrs(c context.Context, id int64, begin, end xtime.Time, limit int) ([]*model.ArchiveTv, error) {
rows, err := d.dbTV.Query(c, _archiveTVsMtimeSQL, id, begin, end, limit)
if err != nil {
log.Error("d.dbTV.Query(%s,%d,%s,%s,%d) error(%v)", _archiveTVsMtimeSQL, id, begin, end, limit, err)
return nil, err
}
defer rows.Close()
as := make([]*model.ArchiveTv, 0)
for rows.Next() {
a := new(model.ArchiveTv)
if err = rows.Scan(&a.ID, &a.Aid, &a.Result, &a.Deleted, &a.Valid); err != nil {
log.Error("rows.Scan() error(%v)", err)
return nil, err
}
as = append(as, a)
}
return as, rows.Err()
}

View File

@@ -0,0 +1,132 @@
package dao
import (
"context"
"testing"
xtime "go-common/library/time"
"github.com/smartystreets/goconvey/convey"
)
func TestDaoMaxOid(t *testing.T) {
var (
c = context.Background()
)
convey.Convey("MaxOid", t, func(ctx convey.C) {
oid, err := d.MaxOid(c)
ctx.Convey("Then err should be nil.oid should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
ctx.So(oid, convey.ShouldNotBeNil)
})
})
}
func TestDaoArchiveMetas(t *testing.T) {
var (
c = context.Background()
id = int64(1)
limit = int(1)
)
convey.Convey("ArchiveMetas", t, func(ctx convey.C) {
p1, err := d.ArchiveMetas(c, id, limit)
ctx.Convey("Then err should be nil.p1 should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
ctx.So(p1, convey.ShouldNotBeNil)
})
})
}
func TestDaoArchiveMetasIncrs(t *testing.T) {
var (
c = context.Background()
id = int64(1)
begin xtime.Time
end xtime.Time
limit = int(1)
)
convey.Convey("ArchiveMetasIncrs", t, func(ctx convey.C) {
p1, err := d.ArchiveMetasIncrs(c, id, begin, end, limit)
ctx.Convey("Then err should be nil.p1 should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
ctx.So(p1, convey.ShouldNotBeNil)
})
})
}
func TestDaoArchiveTypes(t *testing.T) {
var (
c = context.Background()
ids = []int64{1}
)
convey.Convey("ArchiveTypes", t, func(ctx convey.C) {
p1, err := d.ArchiveTypes(c, ids)
ctx.Convey("Then err should be nil.p1 should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
ctx.So(p1, convey.ShouldNotBeNil)
})
})
}
func TestDaoArchiveStats(t *testing.T) {
var (
c = context.Background()
aids = []int64{1}
)
convey.Convey("ArchiveStats", t, func(ctx convey.C) {
p1, err := d.ArchiveStats(c, aids)
ctx.Convey("Then err should be nil.p1 should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
ctx.So(p1, convey.ShouldNotBeNil)
})
})
}
func TestDaoArchiveStatsIncrs(t *testing.T) {
var (
c = context.Background()
tbl = int(1)
id = int64(1)
begin xtime.Time
end xtime.Time
limit = int(1)
)
convey.Convey("ArchiveStatsIncrs", t, func(ctx convey.C) {
p1, err := d.ArchiveStatsIncrs(c, tbl, id, begin, end, limit)
ctx.Convey("Then err should be nil.p1 should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
ctx.So(p1, convey.ShouldNotBeNil)
})
})
}
func TestDaoArchiveTVs(t *testing.T) {
var (
c = context.Background()
aids = []int64{1}
)
convey.Convey("ArchiveTVs", t, func(ctx convey.C) {
p1, err := d.ArchiveTVs(c, aids)
ctx.Convey("Then err should be nil.p1 should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
ctx.So(p1, convey.ShouldNotBeNil)
})
})
}
func TestDaoArchiveTVsIncrs(t *testing.T) {
var (
c = context.Background()
id = int64(1)
begin xtime.Time
end xtime.Time
limit = int(1)
)
convey.Convey("ArchiveTVsIncrs", t, func(ctx convey.C) {
p1, err := d.ArchiveTVsIncrs(c, id, begin, end, limit)
ctx.Convey("Then err should be nil.p1 should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
ctx.So(p1, convey.ShouldNotBeNil)
})
})
}