Create & Init Project...

This commit is contained in:
2019-04-22 18:49:16 +08:00
commit fc4fa37393
25440 changed files with 4054998 additions and 0 deletions

View File

@@ -0,0 +1,21 @@
filegroup(
name = "package-srcs",
srcs = glob(["**"]),
tags = ["automanaged"],
visibility = ["//visibility:private"],
)
filegroup(
name = "all-srcs",
srcs = [
":package-srcs",
"//app/admin/main/search/cmd:all-srcs",
"//app/admin/main/search/conf:all-srcs",
"//app/admin/main/search/dao:all-srcs",
"//app/admin/main/search/http:all-srcs",
"//app/admin/main/search/model:all-srcs",
"//app/admin/main/search/service:all-srcs",
],
tags = ["automanaged"],
visibility = ["//visibility:public"],
)

View File

@@ -0,0 +1,260 @@
# 运营后台搜索项目-后台
v2.1.10
1. 支持combo not
v2.1.9
1. fix archive_check time
v2.1.8
1. 添加es集群ower接口
v.2.1.2
1. 用户行为日志索引多集群支持
v.2.1.1
1. SetSniff false while ops log cluster
v2.1.0
1. 删除无用的v2接口 tag_update/pgc/account
v2.0.2
1. update 大数字转int64
v1.9.5
1. 支持随机种子random seed
2. IgnoreUnavailable & AllowNoIndices
v1.9.4
1. 支持like level middle
2. 优化middle
v1.9.3
1. sven搜索目录修改
v1.9.2
1. 日志显示ip 排除ip为空字符串
v1.9.1
1. 日志显示ip 数据统计
v1.9.0
1. 删除无用的v2接口, tag/blocked/dm/vip/pgc
v1.8.9
1. sven接口格式修改
v1.8.8
1. 创作中心fix
v1.8.7
1. 新增es统计接口
2. ping改成异步
3. 新增创作中心稿件接口
v1.8.6
1. 行为日志支持like查询
v1.8.5
1. 稿件title查询集群修改
v1.8.4
1. 稿件title查询
v1.8.3
1. 支持自由组合过滤combo
v.1.8.2
1. fix workflow param
v1.8.0
1. remove music search api
v1.7.2
1. QueryBasic变清真
2. 支持query mode nested
3. 电竞日历优化
v1.7.1
1. fix app struct sleep type, int => float64
v1.7.0
1. 对接sven
v1.6.4
1. 换成bm的binding
2. 定制化搜索时支持调用QueryBasic
v1.6.3
1. 支持like level
2. 支持enhanced
3. 查询结果使用QueryResult支持slice或map的情况
4. like过滤掉特殊字符
5. 删除Appid
6. 支持upsert
v1.6.2
1. 规规一波
v1.6.1
1. workflow返回oid
v1.5.9
1. search/query去掉appid参数
v1.5.8
1. 支持queryExtra即查询体基础上自定义
2. 去除queryConf自定义部分的business
v1.5.7
1. search/query接口去掉校验business
v1.4.4
1. 更新es包。支持Collapse。
v1.4.3
1. 从db读取配置
v1.4.2
1. 弹幕举报group接口
v1.4.1
1. 查询体支持or和not
2. 修复日志panic bug
v1.4.0
1. 新增通用查询体Query
2. 支持接口Debug可分别在dsl前后debug
v1.3.9
1. workflow接口
v1.3.8
1. workflow上线
v1.3.7
1. 日志平台默认只查两个索引、优化
v1.3.6
1. 日志支持数组
v1.3.5
1. 修复日志bug
v1.3.4
1. 修复archive check
2. 精准搜索
v1.3.3
1. workflow |改&
v1.3.2
1. workflow 新增接口
v1.3.1
1. log group by数量fix
v1.3.0
1. log group by
2. workflow fix
v1.2.9
1. 增加owner
v1.2.8
1. dm增加参数
v1.2.7
1. log配置化
v1.2.6
1. log支持位配置化
v1.2.5
1. log新增不分表索引
v1.2.4
1. 去除account校验
v1.2.3
1. 修复log鉴权
v1.2.2
1. 修改日志集群
v1.2.1
1. copyright修改匹配度
v1.2.0
1. copyright修改查询方式
v1.1.9
1. copyright上线
v1.1.8
1. 日志平台修改权限点
2. 新增copyright接口
v1.1.7
1. 日志平台新增权限点
2. pgc接口
3. vip新增索引字段
v1.1.6
1. 增加打分
v1.1.5
1. 修复匹配度百分号位置
v1.1.4
1. 弹幕关键字搜索降低匹配模糊度
v1.1.3
1. 弹幕监控上线
v1.1.2
1. 移除stats
v1.1.1
1. 添加workflow接口参数
2. 合并后台管理接口
v1.1.0
1. 修改集群地址
v1.0.9
1. 弹幕接口上线
v1.0.8
1. 增加搜索条件
v1.0.7
1. workflow_group_common
2. workflow_chall_common
v1.0.6
1. search-interface迁到search-admin
2. 增加关键字高亮
v1.0.5
1. 解决冲突
v1.0.4
1. 修改接口参数
v1.0.3
1. blocked接口重构
v1.0.3
1. workflow模糊匹配精确度提高到80%
v1.0.2
1. 风纪委上线
v1.0.1
1. 修改tag_rounds参数
v1.0.0
1. 初始化项目,更新依赖
2. 预发和上线

View File

@@ -0,0 +1,14 @@
# Owner
liweijia
zhapuyu
renwei
guanhuaxin
# Author
guanhuaxin
# Reviewer
guanhuaxin
daizhichen
zhapuyu
libingqi

View File

@@ -0,0 +1,19 @@
# See the OWNERS docs at https://go.k8s.io/owners
approvers:
- guanhuaxin
- liweijia
- renwei
- zhapuyu
labels:
- admin
- admin/main/search
- main
options:
no_parent_owners: true
reviewers:
- daizhichen
- guanhuaxin
- libingqi
- wangjian
- zhapuyu

View File

@@ -0,0 +1,13 @@
#### search-service
##### 项目简介
> 1.主站运营搜索服务
##### 编译环境
> 请只用golang v1.7.x以上版本编译执行。
##### 依赖包
> 1.公共包go-common
##### 特别说明
> 1.model目录可能会被其他项目引用请谨慎更改并通知各方。

View File

@@ -0,0 +1,42 @@
load(
"@io_bazel_rules_go//go:def.bzl",
"go_library",
"go_binary",
)
package(default_visibility = ["//visibility:public"])
filegroup(
name = "package-srcs",
srcs = glob(["**"]),
tags = ["automanaged"],
visibility = ["//visibility:private"],
)
filegroup(
name = "all-srcs",
srcs = [":package-srcs"],
tags = ["automanaged"],
visibility = ["//visibility:public"],
)
go_library(
name = "go_default_library",
srcs = ["main.go"],
data = ["search-admin-test.toml"],
importpath = "go-common/app/admin/main/search/cmd",
tags = ["automanaged"],
deps = [
"//app/admin/main/search/conf:go_default_library",
"//app/admin/main/search/http:go_default_library",
"//app/admin/main/search/service:go_default_library",
"//library/log:go_default_library",
"//library/net/trace:go_default_library",
],
)
go_binary(
name = "cmd",
embed = [":go_default_library"],
tags = ["automanaged"],
)

View File

@@ -0,0 +1,43 @@
package main
import (
"flag"
"os"
"os/signal"
"syscall"
"go-common/app/admin/main/search/conf"
"go-common/app/admin/main/search/http"
"go-common/app/admin/main/search/service"
"go-common/library/log"
"go-common/library/net/trace"
)
func main() {
flag.Parse()
if err := conf.Init(); err != nil {
panic(err)
}
log.Init(conf.Conf.Log)
defer log.Close()
trace.Init(conf.Conf.Tracer)
defer trace.Close()
svr := service.New(conf.Conf)
http.Init(conf.Conf, svr)
log.Info("search-admin start")
c := make(chan os.Signal, 1)
signal.Notify(c, syscall.SIGHUP, syscall.SIGQUIT, syscall.SIGTERM, syscall.SIGINT)
for {
s := <-c
log.Info("search-admin get a signal %s", s.String())
switch s {
case syscall.SIGQUIT, syscall.SIGTERM, syscall.SIGSTOP, syscall.SIGINT:
log.Info("search-admin exit")
return
case syscall.SIGHUP:
// TODO reload
default:
return
}
}
}

View File

@@ -0,0 +1,175 @@
version = "0.0.1"
user = "nobody"
pid = "/tmp/search-admin.pid"
dir = "./"
perf = "127.0.0.1:8086"
env = "dev"
[log]
dir = "/data/log/search-admin/"
[auth]
managerHost = "http://manager.bilibili.co"
dashboardHost = "http://dashboard-mng.bilibili.co"
dashboardCaller = "manager-go"
[auth.DsHTTPClient]
key = "manager-go"
secret = "949bbb2dd3178252638c2407578bc7ad"
dial = "50ms"
timeout = "500ms"
keepAlive = "60s"
[auth.DsHTTPClient.breaker]
window = "3s"
sleep = "100ms"
bucket = 10
ratio = 0.5
request = 100
[auth.MaHTTPClient]
key = "f6433799dbd88751"
secret = "36f8ddb1806207fe07013ab6a77a3935"
dial = "50ms"
timeout = "500ms"
keepAlive = "60s"
[auth.MaHTTPClient.breaker]
window = "3s"
sleep = "100ms"
bucket = 10
ratio = 0.5
request = 100
[auth.session]
sessionIDLength = 32
cookieLifeTime = 86400
cookieName = "mng-go"
domain = ".bilibili.co"
[auth.session.Memcache]
name = "go-business/auth"
proto = "unix"
addr = "/tmp/shylf-manager-admin-mc.sock"
active = 10
idle = 10
dialTimeout = "1s"
readTimeout = "1s"
writeTimeout = "1s"
idleTimeout = "80s"
[bm]
addr = "0.0.0.0:7311"
maxListen = 1000
timeout = "1s"
[identify]
whiteAccessKey = ""
whiteMid = 0
csrfOn = true
[identify.app]
key = "7c7ac0db1aa05587"
secret = "9a6d62d93290c5f771ad381e9ca23f26"
[identify.host]
auth = "http://passport.bilibili.com"
secret = "http://open.bilibili.com"
[identify.HTTPClient]
key = "7c7ac0db1aa05587"
secret = "9a6d62d93290c5f771ad381e9ca23f26"
dial = "1s"
timeout = "1s"
keepAlive = "60s"
timer = 1000
[identify.HTTPClient.breaker]
window ="10s"
sleep ="10ms"
bucket = 10
ratio = 0.5
request = 100
[pagination]
pageNum = 1
pageSize = 30
maxPageNum = 5000
maxPageSize = 1000
[db]
[db.search]
addr = "172.16.33.205:3308"
dsn = "test:test@tcp(172.16.33.205:3308)/bilibili_search?timeout=10s&readTimeout=10s&writeTimeout=10s&parseTime=true&loc=Local&charset=utf8"
active = 5
idle = 5
idleTimeout = "4h"
queryTimeout = "10s"
execTimeout = "10s"
tranTimeout = "10s"
[db.search.breaker]
window = "10s"
sleep = "100ms"
bucket = 10
ratio = 0.5
request = 100
[es]
[es.pcie_dm_in]
addr = ["http://172.22.33.127:9201"]
cluster = "pcie_dm_in"
owner = "guanhuaxin,daizhichen,libingqi,zhapuyu"
[es.pcie_dm_out]
addr = ["http://172.22.33.127:9201"]
owner = "guanhuaxin,daizhichen,libingqi"
[es.log]
addr = ["http://172.22.33.120:9201"]
owner = "guanhuaxin,daizhichen,libingqi"
[es.log_live]
addr = ["http://172.22.33.120:9201"]
[es.pcie_pub_out01]
addr = ["http://172.22.33.120:9201"]
[es.pcie_fav]
addr = ["http://172.22.33.120:9201"]
[es.ssd_reply_in]
addr = ["http://172.22.33.120:9201"]
[es.ssd_reply_out]
addr = ["http://172.22.33.120:9201"]
[es.ssd_archive]
addr = ["http://172.22.33.120:9201"]
[es.ssd_pub_in01]
addr = ["http://172.22.33.120:9201"]
[es.ssd_pub_in02]
addr = ["http://172.18.33.71:9200"]
[es.open]
addr = ["http://172.18.33.71:9200"]
[es.ops_log]
addr = ["http://ops-log.bilibili.co/elasticsearch/"]
[es.creative_reply]
addr = ["http://172.18.33.71:9200"]
[redis]
name = "search-api"
proto = "tcp"
addr = "172.18.33.71:6379"
active = 10
idle = 5
dialTimeout = "1s"
readTimeout = "1s"
writeTimeout = "1s"
idleTimeout = "10s"
expire = "24h"
[sms]
phone = "17621966518"
token = "f5a658b2-5926-4b71-96c3-7d3777b7d256"
interval = 300
[prop]
manager = "http://uat-manager.bilibili.co/"
api = "http://api.bilibili.co/"
[httpClient]
key = "ad4bb9b8f5d9d4a7"
secret = "6912080d78d58be7cb94f57d50d438f6"
dial = "1s"
timeout = "10s"
keepAlive = "60s"
timer = 1000
[httpClient.breaker]
window = "10s"
sleep = "100ms"
bucket = 10
ratio = 0.5
request = 100

View File

@@ -0,0 +1,37 @@
load(
"@io_bazel_rules_go//go:def.bzl",
"go_library",
)
package(default_visibility = ["//visibility:public"])
filegroup(
name = "package-srcs",
srcs = glob(["**"]),
tags = ["automanaged"],
visibility = ["//visibility:private"],
)
filegroup(
name = "all-srcs",
srcs = [":package-srcs"],
tags = ["automanaged"],
visibility = ["//visibility:public"],
)
go_library(
name = "go_default_library",
srcs = ["conf.go"],
importpath = "go-common/app/admin/main/search/conf",
tags = ["automanaged"],
deps = [
"//library/conf:go_default_library",
"//library/database/sql:go_default_library",
"//library/ecode/tip:go_default_library",
"//library/log:go_default_library",
"//library/net/http/blademaster:go_default_library",
"//library/net/http/blademaster/middleware/permit:go_default_library",
"//library/net/trace:go_default_library",
"//vendor/github.com/BurntSushi/toml:go_default_library",
],
)

View File

@@ -0,0 +1,128 @@
package conf
import (
"errors"
"flag"
"go-common/library/conf"
"go-common/library/database/sql"
ecode "go-common/library/ecode/tip"
"go-common/library/log"
bm "go-common/library/net/http/blademaster"
"go-common/library/net/http/blademaster/middleware/permit"
"go-common/library/net/trace"
"github.com/BurntSushi/toml"
)
var (
confPath string
// Conf .
Conf = &Config{}
client *conf.Client
)
// Pagination .
type Pagination struct {
PageNum int
PageSize int
MaxPageNum int
MaxPageSize int
}
// Config .
type Config struct {
Auth *permit.Config
// base
// xlog
Log *log.Config
// tracer
Tracer *trace.Config
// http
BM *bm.ServerConfig
// es cluster
Es map[string]*EsInfo
// ecode
Ecode *ecode.Config
Pagination *Pagination
// sms
SMS *SMS
// db
DB *DB
// httpclient
HTTPClient *bm.ClientConfig
Prop *Properties
}
// Properties .
type Properties struct {
Manager string
API string
}
// EsInfo .
type EsInfo struct {
Addr []string
Cluster string
Owner string
}
// SMS config
type SMS struct {
Phone string
Token string
Interval int64
}
func init() {
flag.StringVar(&confPath, "conf", "", "default config path")
}
// Init init config.
func Init() (err error) {
if confPath != "" {
_, err = toml.DecodeFile(confPath, &Conf)
return
}
err = remote()
return
}
func remote() (err error) {
if client, err = conf.New(); err != nil {
return
}
if err = load(); err != nil {
return
}
go func() {
for range client.Event() {
log.Info("config reload")
if load() != nil {
log.Error("config reload error (%v)", err)
}
}
}()
return
}
func load() (err error) {
var (
s string
ok bool
tmpConf = &Config{}
)
if s, ok = client.Toml2(); !ok {
return errors.New("load config center error")
}
if _, err = toml.Decode(s, tmpConf); err != nil {
return errors.New("could not decode config")
}
*Conf = *tmpConf
return
}
// DB is the workflow db config model
type DB struct {
Search *sql.Config
}

View File

@@ -0,0 +1,72 @@
load(
"@io_bazel_rules_go//go:def.bzl",
"go_test",
"go_library",
)
package(default_visibility = ["//visibility:public"])
filegroup(
name = "package-srcs",
srcs = glob(["**"]),
tags = ["automanaged"],
visibility = ["//visibility:private"],
)
filegroup(
name = "all-srcs",
srcs = [":package-srcs"],
tags = ["automanaged"],
visibility = ["//visibility:public"],
)
go_test(
name = "go_default_test",
srcs = [
"archive_test.go",
"dao_test.go",
"es_test.go",
"log_test.go",
"mng_test.go",
"mng_v2_test.go",
"query_extra_test.go",
"query_test.go",
],
embed = [":go_default_library"],
rundir = ".",
tags = ["automanaged"],
deps = [
"//app/admin/main/search/conf:go_default_library",
"//app/admin/main/search/model:go_default_library",
"//vendor/github.com/smartystreets/goconvey/convey:go_default_library",
"//vendor/gopkg.in/olivere/elastic.v5:go_default_library",
],
)
go_library(
name = "go_default_library",
srcs = [
"archive.go",
"dao.go",
"es.go",
"log.go",
"mng.go",
"mng_v2.go",
"query.go",
"query_extra.go",
],
importpath = "go-common/app/admin/main/search/dao",
tags = ["automanaged"],
deps = [
"//app/admin/main/search/conf:go_default_library",
"//app/admin/main/search/model:go_default_library",
"//library/database/sql:go_default_library",
"//library/ecode:go_default_library",
"//library/log:go_default_library",
"//library/net/http/blademaster:go_default_library",
"//library/stat/prom:go_default_library",
"//library/sync/errgroup:go_default_library",
"//vendor/github.com/pkg/errors:go_default_library",
"//vendor/gopkg.in/olivere/elastic.v5:go_default_library",
],
)

View File

@@ -0,0 +1,401 @@
package dao
import (
"context"
"encoding/json"
"fmt"
"time"
"go-common/app/admin/main/search/model"
"gopkg.in/olivere/elastic.v5"
)
// ArchiveCheck search archive check from ES.
func (d *Dao) ArchiveCheck(c context.Context, p *model.ArchiveCheckParams) (res *model.SearchResult, err error) {
query := elastic.NewBoolQuery()
if len(p.Bsp.KWs) > 0 {
for _, v := range p.Bsp.KWs {
if p.Bsp.Pattern == "equal" {
query = query.Must(elastic.NewMultiMatchQuery(v, p.Bsp.KwFields...).Type("best_fields").TieBreaker(0.3).MinimumShouldMatch("100%"))
} else {
query = query.Should(elastic.NewMultiMatchQuery(v, p.Bsp.KwFields...).Type("best_fields").TieBreaker(0.3).MinimumShouldMatch("80%")).MinimumNumberShouldMatch(1)
}
}
} else if p.Bsp.KW != "" { //高级搜索比下面的高
query = query.Must(elastic.NewMultiMatchQuery(p.Bsp.KW, p.Bsp.KwFields...).Type("best_fields").TieBreaker(0.3).MinimumShouldMatch("100%"))
}
if p.FromIP != "" {
query = query.Must(elastic.NewQueryStringQuery("*" + p.FromIP + "*").AllowLeadingWildcard(true).Field("from_ip"))
}
if len(p.Aids) > 0 {
interfaceSlice := make([]interface{}, len(p.Aids))
for i, d := range p.Aids {
interfaceSlice[i] = d
}
query = query.Filter(elastic.NewTermsQuery("aid", interfaceSlice...))
}
if len(p.TypeIds) > 0 {
interfaceSlice := make([]interface{}, len(p.TypeIds))
for i, d := range p.TypeIds {
interfaceSlice[i] = d
}
query = query.Filter(elastic.NewTermsQuery("typeid", interfaceSlice...))
}
if len(p.Attrs) > 0 {
interfaceSlice := make([]interface{}, len(p.Attrs))
for i, d := range p.Attrs {
interfaceSlice[i] = d
}
query = query.Filter(elastic.NewTermsQuery("attribute", interfaceSlice...))
}
if len(p.States) > 0 {
interfaceSlice := make([]interface{}, len(p.States))
for i, d := range p.States {
interfaceSlice[i] = d
}
query = query.Filter(elastic.NewTermsQuery("state", interfaceSlice...))
}
if len(p.Mids) > 0 {
interfaceSlice := make([]interface{}, len(p.Mids))
for i, d := range p.Mids {
interfaceSlice[i] = d
}
query = query.Filter(elastic.NewTermsQuery("mid", interfaceSlice...))
}
if p.MidFrom > 0 {
query = query.Filter(elastic.NewRangeQuery("mid").Gte(p.MidFrom))
}
if p.MidTo > 0 {
query = query.Filter(elastic.NewRangeQuery("mid").Lte(p.MidTo))
}
if p.DurationFrom > 0 {
query = query.Filter(elastic.NewRangeQuery("duration").Gte(p.DurationFrom))
}
if p.DurationTo > 0 {
query = query.Filter(elastic.NewRangeQuery("duration").Lte(p.DurationTo))
}
if p.TimeFrom != "" && (p.Time == "ctime" || p.Time == "mtime" || p.Time == "pubtime") {
query = query.Filter(elastic.NewRangeQuery(p.Time).Gte(p.TimeFrom))
}
if p.TimeTo != "" && (p.Time == "ctime" || p.Time == "mtime" || p.Time == "pubtime") {
query = query.Filter(elastic.NewRangeQuery(p.Time).Lte(p.TimeTo))
}
if res, err = d.searchResult(c, "ssd_archive", "archivecheck", query, p.Bsp); err != nil {
PromError(fmt.Sprintf("es:%s ", p.Bsp.AppID), "%v", err)
}
return
}
// Video search video from ES (deprecated).
func (d *Dao) Video(c context.Context, p *model.VideoParams) (res *model.SearchResult, err error) {
query := elastic.NewBoolQuery()
if p.Bsp.KW != "" {
query = query.Must(elastic.NewMultiMatchQuery(p.Bsp.KW, p.Bsp.KwFields...).Type("best_fields").TieBreaker(0.3))
}
if len(p.VIDs) > 0 {
interfaceSlice := make([]interface{}, len(p.VIDs))
for i, d := range p.VIDs {
interfaceSlice[i] = d
}
query = query.Filter(elastic.NewTermsQuery("vid", interfaceSlice...))
}
if len(p.AIDs) > 0 {
interfaceSlice := make([]interface{}, len(p.AIDs))
for i, d := range p.AIDs {
interfaceSlice[i] = d
}
query = query.Filter(elastic.NewTermsQuery("aid", interfaceSlice...))
}
if len(p.CIDs) > 0 {
interfaceSlice := make([]interface{}, len(p.CIDs))
for i, d := range p.CIDs {
interfaceSlice[i] = d
}
query = query.Filter(elastic.NewTermsQuery("cid", interfaceSlice...))
}
if len(p.TIDs) > 0 {
interfaceSlice := make([]interface{}, len(p.TIDs))
for i, d := range p.TIDs {
interfaceSlice[i] = d
}
query = query.Filter(elastic.NewTermsQuery("arc_typeid", interfaceSlice...))
}
if len(p.FileNames) > 0 {
interfaceSlice := make([]interface{}, len(p.FileNames))
for i, d := range p.FileNames {
interfaceSlice[i] = d
}
query = query.Filter(elastic.NewTermsQuery("filename", interfaceSlice...))
}
if len(p.RelationStates) > 0 {
interfaceSlice := make([]interface{}, len(p.RelationStates))
for i, d := range p.RelationStates {
interfaceSlice[i] = d
}
query = query.Filter(elastic.NewTermsQuery("relation_state", interfaceSlice...))
}
if len(p.ArcMids) > 0 {
interfaceSlice := make([]interface{}, len(p.ArcMids))
for i, d := range p.ArcMids {
interfaceSlice[i] = d
}
query = query.Filter(elastic.NewTermsQuery("arc_mid", interfaceSlice...))
}
if len(p.ArcMids) > 0 {
interfaceSlice := make([]interface{}, len(p.ArcMids))
for i, d := range p.ArcMids {
interfaceSlice[i] = d
}
query = query.Filter(elastic.NewTermsQuery("arc_mid", interfaceSlice...))
}
if p.TagID > 0 {
query = query.Filter(elastic.NewTermQuery("tag_id", p.TagID))
}
if len(p.Status) > 0 {
interfaceSlice := make([]interface{}, len(p.Status))
for i, d := range p.Status {
interfaceSlice[i] = d
}
query = query.Filter(elastic.NewTermsQuery("status", interfaceSlice...))
}
if len(p.XCodeState) > 0 {
interfaceSlice := make([]interface{}, len(p.XCodeState))
for i, d := range p.XCodeState {
interfaceSlice[i] = d
}
query = query.Filter(elastic.NewTermsQuery("xcode_state", interfaceSlice...))
}
// 不再查库过滤arc_mid
if p.UserType > 0 {
query = query.Filter(elastic.NewTermQuery("user_type", p.UserType))
}
if p.DurationFrom > 0 {
query = query.Filter(elastic.NewRangeQuery("duration").Gte(p.DurationFrom))
}
if p.DurationTo > 0 {
query = query.Filter(elastic.NewRangeQuery("duration").Lte(p.DurationTo))
}
if p.OrderType == 1 {
diffs := time.Now().Unix() - 1420041600
days := fmt.Sprintf("%dd", diffs/(3600*24))
score := elastic.NewFunctionScoreQuery().Add(elastic.NewTermQuery("user_type", 1), elastic.NewExponentialDecayFunction().FieldName("arc_senddate").Origin("2015-01-01 00:00:00").Scale(days).Offset("1d").Decay(0.8).Weight(float64(10000))).Add(nil, elastic.NewExponentialDecayFunction().FieldName("arc_senddate").Origin("2015-01-01 00:00:00").Scale(days).Offset("1d").Decay(0.8).Weight(float64(1)))
query = query.Must(score)
p.Bsp.Order = []string{}
}
if res, err = d.searchResult(c, "ssd_archive", "archive_video", query, p.Bsp); err != nil {
PromError(fmt.Sprintf("es:%s ", p.Bsp.AppID), "%v", err)
}
return
}
// TaskQa .
func (d *Dao) TaskQa(c context.Context, p *model.TaskQa) (res *model.SearchResult, err error) {
query := elastic.NewBoolQuery()
if p.Bsp.KW != "" {
query = query.Must(elastic.NewMultiMatchQuery(p.Bsp.KW, p.Bsp.KwFields...).Type("best_fields").TieBreaker(0.3))
}
if len(p.Ids) > 0 {
interfaceSlice := make([]interface{}, len(p.Ids))
for i, d := range p.Ids {
interfaceSlice[i] = d
}
query = query.Filter(elastic.NewTermsQuery("id", interfaceSlice...))
}
if len(p.TaskIds) > 0 {
interfaceSlice := make([]interface{}, len(p.TaskIds))
for i, d := range p.TaskIds {
interfaceSlice[i] = d
}
query = query.Filter(elastic.NewTermsQuery("task_id", interfaceSlice...))
}
if len(p.Uids) > 0 {
interfaceSlice := make([]interface{}, len(p.Uids))
for i, d := range p.Uids {
interfaceSlice[i] = d
}
query = query.Filter(elastic.NewTermsQuery("uid", interfaceSlice...))
}
if len(p.ArcTagIds) > 0 {
interfaceSlice := make([]interface{}, len(p.ArcTagIds))
for i, d := range p.ArcTagIds {
interfaceSlice[i] = d
}
query = query.Filter(elastic.NewTermsQuery("arc_tagid", interfaceSlice...))
}
if len(p.AuditTagIds) > 0 {
interfaceSlice := make([]interface{}, len(p.AuditTagIds))
for i, d := range p.AuditTagIds {
interfaceSlice[i] = d
}
query = query.Filter(elastic.NewTermsQuery("audit_tagid", interfaceSlice...))
}
if len(p.UpGroups) > 0 {
interfaceSlice := make([]interface{}, len(p.UpGroups))
for i, d := range p.UpGroups {
interfaceSlice[i] = d
}
query = query.Filter(elastic.NewTermsQuery("up_groups", interfaceSlice...))
}
if len(p.ArcTitles) > 0 {
interfaceSlice := make([]interface{}, len(p.ArcTitles))
for i, d := range p.ArcTitles {
interfaceSlice[i] = d
}
query = query.Filter(elastic.NewTermsQuery("arc_title", interfaceSlice...))
}
if len(p.ArcTypeIds) > 0 {
interfaceSlice := make([]interface{}, len(p.ArcTypeIds))
for i, d := range p.ArcTypeIds {
interfaceSlice[i] = d
}
query = query.Filter(elastic.NewTermsQuery("arc_typeid", interfaceSlice...))
}
if len(p.States) > 0 {
interfaceSlice := make([]interface{}, len(p.States))
for i, d := range p.States {
interfaceSlice[i] = d
}
query = query.Filter(elastic.NewTermsQuery("state", interfaceSlice...))
}
if len(p.AuditStatuses) > 0 {
interfaceSlice := make([]interface{}, len(p.AuditStatuses))
for i, d := range p.AuditStatuses {
interfaceSlice[i] = d
}
query = query.Filter(elastic.NewTermsQuery("audit_status", interfaceSlice...))
}
if p.FansFrom != "" {
query = query.Filter(elastic.NewRangeQuery("fans").Gte(p.FansFrom))
}
if p.FansTo != "" {
query = query.Filter(elastic.NewRangeQuery("fans").Lte(p.FansTo))
}
if p.CtimeFrom != "" {
query = query.Filter(elastic.NewRangeQuery("ctime").Gte(p.CtimeFrom))
}
if p.CtimeTo != "" {
query = query.Filter(elastic.NewRangeQuery("ctime").Lte(p.CtimeTo))
}
if p.FtimeFrom != "" {
query = query.Filter(elastic.NewRangeQuery("ftime").Gte(p.FtimeFrom))
}
if p.FtimeTo != "" {
query = query.Filter(elastic.NewRangeQuery("ftime").Lte(p.FtimeTo))
}
if res, err = d.searchResult(c, "ssd_archive", p.Bsp.AppID, query, p.Bsp); err != nil {
PromError(fmt.Sprintf("es:%s ", p.Bsp.AppID), "%v", err)
}
return
}
// ArchiveCommerce .
func (d *Dao) ArchiveCommerce(c context.Context, p *model.ArchiveCommerce) (res *model.SearchResult, err error) {
query := elastic.NewBoolQuery()
if p.Bsp.KW != "" {
query = query.Must(elastic.NewMultiMatchQuery(p.Bsp.KW, p.Bsp.KwFields...).Type("best_fields").TieBreaker(0.3))
}
if len(p.Ids) > 0 {
interfaceSlice := make([]interface{}, len(p.Ids))
for i, d := range p.Ids {
interfaceSlice[i] = d
}
query = query.Filter(elastic.NewTermsQuery("id", interfaceSlice...))
}
if len(p.Mids) > 0 {
interfaceSlice := make([]interface{}, len(p.Mids))
for i, d := range p.Mids {
interfaceSlice[i] = d
}
query = query.Filter(elastic.NewTermsQuery("mid", interfaceSlice...))
}
if len(p.PTypeIds) > 0 {
interfaceSlice := make([]interface{}, len(p.PTypeIds))
for i, d := range p.PTypeIds {
interfaceSlice[i] = d
}
query = query.Filter(elastic.NewTermsQuery("ptypeid", interfaceSlice...))
}
if len(p.TypeIds) > 0 {
interfaceSlice := make([]interface{}, len(p.TypeIds))
for i, d := range p.TypeIds {
interfaceSlice[i] = d
}
query = query.Filter(elastic.NewTermsQuery("typeid", interfaceSlice...))
}
if len(p.States) > 0 {
interfaceSlice := make([]interface{}, len(p.States))
for i, d := range p.States {
interfaceSlice[i] = d
}
query = query.Filter(elastic.NewTermsQuery("state", interfaceSlice...))
}
if len(p.Copyrights) > 0 {
interfaceSlice := make([]interface{}, len(p.Copyrights))
for i, d := range p.Copyrights {
interfaceSlice[i] = d
}
query = query.Filter(elastic.NewTermsQuery("copyright", interfaceSlice...))
}
if len(p.OrderIds) > 0 {
interfaceSlice := make([]interface{}, len(p.OrderIds))
for i, d := range p.OrderIds {
interfaceSlice[i] = d
}
query = query.Filter(elastic.NewTermsQuery("order_id", interfaceSlice...))
}
if p.IsOrder == 1 {
query = query.Filter(elastic.NewRangeQuery("order_id").Gt(0))
}
if p.IsOrder == 0 {
query = query.MustNot(elastic.NewRangeQuery("order_id").Gt(0))
}
if p.IsOriginal == 1 {
query = query.Filter(elastic.NewTermsQuery("copyright", 1))
}
if p.IsOriginal == 0 {
query = query.MustNot(elastic.NewTermsQuery("copyright", 1))
}
if p.Action == "get_ptypeids" {
if res, err = d.ArchiveCommercePTypeIds(c, query); err != nil {
PromError(fmt.Sprintf("es:%s ", p.Bsp.AppID), "%v", err)
}
return
}
if res, err = d.searchResult(c, "ssd_archive", "archive_commerce_v", query, p.Bsp); err != nil {
PromError(fmt.Sprintf("es:%s ", p.Bsp.AppID), "%v", err)
}
return
}
// ArchiveCommercePTypeIds .
func (d *Dao) ArchiveCommercePTypeIds(c context.Context, query *elastic.BoolQuery) (res *model.SearchResult, err error) {
res = &model.SearchResult{
Result: []json.RawMessage{},
Page: &model.Page{},
}
aggs := elastic.NewTermsAggregation()
aggs = aggs.Field("ptypeid").Size(1000)
if _, ok := d.esPool["ssd_archive"]; !ok {
PromError(fmt.Sprintf("es:集群不存在%s", "ssd_archive"), "s.dao.searchResult indexName:%s", "ssd_archive")
res = &model.SearchResult{Debug: fmt.Sprintf("es:集群不存在%s, %s", "ssd_archive", res.Debug)}
return
}
searchResult, err := d.esPool["ssd_archive"].Search().Index("archive_commerce_v").Query(query).Aggregation("group_by_ptypeid", aggs).Size(0).Do(context.Background())
if err != nil {
PromError(fmt.Sprintf("es:执行查询失败%s ", "ArchiveCommercePTypeIds"), "dao.log.ArchiveCommercePTypeIds(%v)", err)
return
}
result, ok := searchResult.Aggregations.Terms("group_by_ptypeid")
if !ok {
PromError(fmt.Sprintf("es:Unmarshal%s ", "log"), "es:Unmarshal%v", err)
return
}
for _, v := range result.Buckets {
res.Result = append(res.Result, []byte(v.Key.(string)))
}
res.Page.Pn = 1
res.Page.Ps = 1000
res.Page.Total = int64(len(res.Result))
return
}

View File

@@ -0,0 +1,151 @@
package dao
import (
"context"
"go-common/app/admin/main/search/model"
"testing"
"github.com/smartystreets/goconvey/convey"
"gopkg.in/olivere/elastic.v5"
)
func TestDaoArchiveCheck(t *testing.T) {
convey.Convey("ArchiveCheck", t, func(ctx convey.C) {
var (
c = context.Background()
p = &model.ArchiveCheckParams{
Bsp: &model.BasicSearchParams{},
Aids: []int64{0},
TypeIds: []int64{0},
Attrs: []int64{0},
States: []int64{0},
Mids: []int64{0},
MidFrom: 1,
MidTo: 1,
DurationFrom: 1,
DurationTo: 1,
TimeFrom: "0001-01-01 00:00:00",
TimeTo: "0001-01-01 00:00:00",
Time: "ctime",
}
)
ctx.Convey("When everything goes positive", func(ctx convey.C) {
_, err := d.ArchiveCheck(c, p)
ctx.Convey("Then err should be nil.res should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
//ctx.So(res, convey.ShouldNotBeNil)
})
})
})
}
func TestDaoVideo(t *testing.T) {
convey.Convey("Video", t, func(ctx convey.C) {
var (
c = context.Background()
p = &model.VideoParams{
Bsp: &model.BasicSearchParams{},
VIDs: []int64{0},
AIDs: []int64{0},
CIDs: []int64{0},
TIDs: []int64{0},
FileNames: []string{""},
RelationStates: []int64{0},
ArcMids: []int64{0},
TagID: 1,
Status: []int64{0},
XCodeState: []int64{0},
UserType: 0,
DurationFrom: 1,
DurationTo: 1,
OrderType: 1,
}
)
ctx.Convey("When everything goes positive", func(ctx convey.C) {
_, err := d.Video(c, p)
ctx.Convey("Then err should be nil.res should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
//ctx.So(res, convey.ShouldNotBeNil)
})
})
})
}
func TestDaoTaskQa(t *testing.T) {
convey.Convey("TaskQa", t, func(ctx convey.C) {
var (
c = context.Background()
p = &model.TaskQa{
Bsp: &model.BasicSearchParams{
AppID: "task_qa",
},
Ids: []int64{0},
TaskIds: []string{""},
Uids: []string{""},
ArcTagIds: []string{""},
AuditTagIds: []int64{0},
UpGroups: []string{""},
ArcTitles: []string{""},
ArcTypeIds: []string{""},
States: []string{""},
AuditStatuses: []string{""},
FansFrom: "0",
FansTo: "0",
CtimeFrom: "0001-01-01 00:00:00",
CtimeTo: "0001-01-01 00:00:00",
FtimeFrom: "0001-01-01 00:00:00",
}
)
ctx.Convey("When everything goes positive", func(ctx convey.C) {
res, err := d.TaskQa(c, p)
ctx.Convey("Then err should be nil.res should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
ctx.So(res, convey.ShouldNotBeNil)
})
})
})
}
func TestDaoArchiveCommerce(t *testing.T) {
convey.Convey("ArchiveCommerce", t, func(ctx convey.C) {
var (
c = context.Background()
p = &model.ArchiveCommerce{
Bsp: &model.BasicSearchParams{},
Ids: []string{"0"},
Mids: []string{"0"},
PTypeIds: []string{"0"},
TypeIds: []string{"0"},
States: []string{"0"},
Copyrights: []string{"0"},
OrderIds: []string{"0"},
IsOrder: 1,
IsOriginal: 1,
Action: "get_ptypeids",
}
)
ctx.Convey("When everything goes positive", func(ctx convey.C) {
res, err := d.ArchiveCommerce(c, p)
ctx.Convey("Then err should be nil.res should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
ctx.So(res, convey.ShouldNotBeNil)
})
})
})
}
func TestDaoArchiveCommercePTypeIds(t *testing.T) {
convey.Convey("ArchiveCommercePTypeIds", t, func(ctx convey.C) {
var (
c = context.Background()
query = &elastic.BoolQuery{}
)
ctx.Convey("When everything goes positive", func(ctx convey.C) {
res, err := d.ArchiveCommercePTypeIds(c, query)
ctx.Convey("Then err should be nil.res should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
ctx.So(res, convey.ShouldNotBeNil)
})
})
})
}

View File

@@ -0,0 +1,111 @@
package dao
import (
"context"
"go-common/app/admin/main/search/conf"
"go-common/library/database/sql"
"go-common/library/log"
bm "go-common/library/net/http/blademaster"
"go-common/library/stat/prom"
"go-common/library/sync/errgroup"
"gopkg.in/olivere/elastic.v5"
)
const (
_managerDep = "/x/admin/manager/users/udepts"
_managerUnames = "/x/admin/manager/users/unames"
_managerIP = "/x/location/infos"
)
// Dao .
type Dao struct {
c *conf.Config
esPool map[string]*elastic.Client
db *sql.DB
client *bm.Client
managerDep string
managerUnames string
managerIP string
queryConfStmt *sql.Stmt
}
// New init dao
func New(c *conf.Config) (d *Dao) {
d = &Dao{
c: c,
db: sql.NewMySQL(c.DB.Search),
client: bm.NewClient(c.HTTPClient),
managerDep: c.Prop.Manager + _managerDep,
managerUnames: c.Prop.Manager + _managerUnames,
managerIP: c.Prop.API + _managerIP,
}
d.esPool = newEsPool(c, d)
d.NewLog()
go d.NewLogProcess()
d.queryConfStmt = d.db.Prepared(_queryConfSQL)
return
}
// BulkItem .
type BulkItem interface {
IndexName() string
IndexType() string
IndexID() string
}
// BulkMapItem .
type BulkMapItem interface {
IndexName() string
IndexType() string
IndexID() string
PField() map[string]interface{}
}
// newEsCluster cluster action
func newEsPool(c *conf.Config, d *Dao) (esCluster map[string]*elastic.Client) {
esCluster = make(map[string]*elastic.Client)
for esName, e := range c.Es {
cof := []elastic.ClientOptionFunc{}
cof = append(cof, elastic.SetURL(e.Addr...))
if esName == "ops_log" {
cof = append(cof, elastic.SetSniff(false))
}
client, err := elastic.NewClient(cof...)
if err != nil {
PromError("es:集群连接失败", "cluster: %s, %v", esName, err)
continue
}
esCluster[esName] = client
}
return
}
// PromError prometheus error count.
func PromError(name, format string, args ...interface{}) {
prom.BusinessErrCount.Incr(name)
log.Error(format, args...)
}
// Ping health
func (d *Dao) Ping(c context.Context) (err error) {
group := errgroup.Group{}
group.Go(func() (err error) {
err = d.db.Ping(context.Background())
if err != nil {
PromError("DB:Ping", "DB:Ping error(%v)", err)
}
return
})
for name, client := range d.esPool {
group.Go(func() (err error) {
_, _, err = client.Ping(d.c.Es[name].Addr[0]).Do(context.Background())
if err != nil {
PromError("Es:Ping", "%s:Ping error(%v)", name, err)
}
return
})
}
return group.Wait()
}

View File

@@ -0,0 +1,34 @@
package dao
import (
"flag"
"go-common/app/admin/main/search/conf"
"os"
"testing"
)
var (
d *Dao
)
func TestMain(m *testing.M) {
if os.Getenv("DEPLOY_ENV") != "" {
flag.Set("app_id", "main.search.search-admin")
flag.Set("conf_token", "TH0EaGzhzup2cfu0S7yE7qUhZg1aYxlR")
flag.Set("tree_id", "7627")
flag.Set("conf_version", "docker-1")
flag.Set("deploy_env", "uat")
flag.Set("conf_host", "config.bilibili.co")
flag.Set("conf_path", "/tmp")
flag.Set("region", "sh")
flag.Set("zone", "sh001")
} else {
flag.Set("conf", "../cmd/search-admin-test.toml")
}
flag.Parse()
if err := conf.Init(); err != nil {
panic(err)
}
d = New(conf.Conf)
os.Exit(m.Run())
}

View File

@@ -0,0 +1,505 @@
package dao
import (
"bytes"
"context"
"encoding/json"
"fmt"
"io"
"strings"
"go-common/app/admin/main/search/model"
"go-common/library/log"
"gopkg.in/olivere/elastic.v5"
)
// UpdateMapBulk (Deprecated).
func (d *Dao) UpdateMapBulk(c context.Context, esName string, bulkData []BulkMapItem) (err error) {
bulkRequest := d.esPool[esName].Bulk()
for _, b := range bulkData {
request := elastic.NewBulkUpdateRequest().Index(b.IndexName()).Type(b.IndexType()).Id(b.IndexID()).Doc(b.PField()).DocAsUpsert(true)
bulkRequest.Add(request)
}
if _, err = bulkRequest.Do(c); err != nil {
log.Error("esName(%s) bulk error(%v)", esName, err)
}
return
}
// UpdateBulk (Deprecated).
func (d *Dao) UpdateBulk(c context.Context, esName string, bulkData []BulkItem) (err error) {
bulkRequest := d.esPool[esName].Bulk()
for _, b := range bulkData {
request := elastic.NewBulkUpdateRequest().Index(b.IndexName()).Type(b.IndexType()).Id(b.IndexID()).Doc(b).DocAsUpsert(true)
bulkRequest.Add(request)
}
if _, err = bulkRequest.Do(c); err != nil {
log.Error("esName(%s) bulk error(%v)", esName, err)
}
return
}
// UpsertBulk 为了替换UpdateMapBulk和UpdateBulk .
func (d *Dao) UpsertBulk(c context.Context, esCluster string, up *model.UpsertParams) (err error) {
es, ok := d.esPool[esCluster]
if !ok {
log.Error("esCluster(%s) not exists", esCluster)
return
}
bulkRequest := es.Bulk()
for _, b := range up.UpsertBody {
request := elastic.NewBulkUpdateRequest().Index(b.IndexName).Type(b.IndexType).Id(b.IndexID).Doc(b.Doc)
if up.Insert {
request.DocAsUpsert(true)
}
//fmt.Println(request)
bulkRequest.Add(request)
}
if _, err = bulkRequest.Do(c); err != nil {
log.Error("esCluster(%s) bulk error(%v)", esCluster, err)
}
return
}
// searchResult get result from ES. (Deprecated) v3迁移完要删掉.
func (d *Dao) searchResult(c context.Context, esClusterName, indexName string, query elastic.Query, bsp *model.BasicSearchParams) (res *model.SearchResult, err error) {
res = &model.SearchResult{Debug: ""}
if bsp.Debug {
if src, e := query.Source(); e == nil {
if data, er := json.Marshal(src); er == nil {
res = &model.SearchResult{Debug: string(data)}
} else {
err = er
log.Error("searchResult query.Source.json.Marshal error(%v)", err)
return
}
} else {
err = e
log.Error("searchResult query.Source error(%v)", err)
return
}
}
if _, ok := d.esPool[esClusterName]; !ok {
PromError(fmt.Sprintf("es:集群不存在%s", esClusterName), "s.dao.searchResult indexName:%s", indexName)
res = &model.SearchResult{Debug: fmt.Sprintf("es:集群不存在%s, %s", esClusterName, res.Debug)}
return
}
// multi sort
sorterSlice := []elastic.Sorter{}
if bsp.KW != "" && bsp.ScoreFirst {
sorterSlice = append(sorterSlice, elastic.NewScoreSort().Desc())
}
for i, d := range bsp.Order {
if len(bsp.Sort) < i+1 {
if bsp.Sort[0] == "desc" {
sorterSlice = append(sorterSlice, elastic.NewFieldSort(d).Desc())
} else {
sorterSlice = append(sorterSlice, elastic.NewFieldSort(d).Asc())
}
} else {
if bsp.Sort[i] == "desc" {
sorterSlice = append(sorterSlice, elastic.NewFieldSort(d).Desc())
} else {
sorterSlice = append(sorterSlice, elastic.NewFieldSort(d).Asc())
}
}
}
if bsp.KW != "" && !bsp.ScoreFirst {
sorterSlice = append(sorterSlice, elastic.NewScoreSort().Desc())
}
// source
fsc := elastic.NewFetchSourceContext(true).Include(bsp.Source...)
// highlight
hl := elastic.NewHighlight()
if bsp.Highlight && len(bsp.KwFields) > 0 {
for _, v := range bsp.KwFields {
hl = hl.Fields(elastic.NewHighlighterField(v))
}
hl = hl.PreTags("<em class=\"keyword\">").PostTags("</em>")
}
// from + size = 10,000
from := (bsp.Pn - 1) * bsp.Ps
size := bsp.Ps
if (from + size) > 10000 {
from = 10000 - size
}
// do
searchResult, err := d.esPool[esClusterName].
Search().Index(indexName).
Highlight(hl).
Query(query).
SortBy(sorterSlice...).
From(from).
Size(size).
Pretty(true).
FetchSourceContext(fsc).
Do(context.Background())
if err != nil {
PromError(fmt.Sprintf("es:执行查询失败%s ", esClusterName), "%v", err)
res = &model.SearchResult{Debug: res.Debug + "es:执行查询失败"}
return
}
var data []json.RawMessage
b := bytes.Buffer{}
b.WriteString("{")
b.WriteString("}")
for _, hit := range searchResult.Hits.Hits {
var t json.RawMessage
e := json.Unmarshal(*hit.Source, &t)
if e != nil {
PromError(fmt.Sprintf("es:%s 索引有脏数据", esClusterName), "s.dao.SearchArchiveCheck(%d,%d) error(%v) ", bsp.Pn*bsp.Ps, bsp.Ps, e)
continue
}
data = append(data, t)
// highlight
if len(hit.Highlight) > 0 {
b, _ := json.Marshal(hit.Highlight)
h := []byte(string(b))
data = append(data, h)
} else if bsp.Highlight {
data = append(data, b.Bytes()) //保证在高亮情况下,肯定有一对数据
}
}
if len(data) == 0 {
data = []json.RawMessage{}
}
res = &model.SearchResult{
Order: strings.Join(bsp.Order, ","),
Sort: strings.Join(bsp.Sort, ","),
Result: data,
Debug: res.Debug,
Page: &model.Page{
Pn: bsp.Pn,
Ps: bsp.Ps,
Total: searchResult.Hits.TotalHits,
},
}
return
}
// QueryResult query result from ES.
func (d *Dao) QueryResult(c context.Context, query elastic.Query, sp *model.QueryParams, qbDebug *model.QueryDebugResult) (res *model.QueryResult, qrDebug *model.QueryDebugResult, err error) {
qrDebug = &model.QueryDebugResult{}
if qbDebug != nil {
qrDebug = qbDebug
}
esCluster := sp.AppIDConf.ESCluster
if _, ok := d.esPool[esCluster]; !ok {
qrDebug.AddErrMsg("es:集群不存在" + esCluster)
return
}
if sp.DebugLevel != 0 {
qrDebug.Mapping, err = d.esPool[esCluster].GetMapping().Index(sp.QueryBody.From).Do(context.Background())
}
// 低级别debug在dsl执行前退出
if sp.DebugLevel == 1 {
return
}
// multi sort
sorterSlice := []elastic.Sorter{}
if len(sp.QueryBody.Where.Like) > 0 && sp.QueryBody.OrderScoreFirst { // like 长度 > 0但里面是空的也是个问题
sorterSlice = append(sorterSlice, elastic.NewScoreSort().Desc())
}
for _, i := range sp.QueryBody.Order {
for k, v := range i {
if v == "asc" {
sorterSlice = append(sorterSlice, elastic.NewFieldSort(k).Asc())
} else {
sorterSlice = append(sorterSlice, elastic.NewFieldSort(k).Desc())
}
}
}
if len(sp.QueryBody.Where.Like) > 0 && sp.QueryBody.OrderScoreFirst {
sorterSlice = append(sorterSlice, elastic.NewScoreSort().Desc())
}
// source
fsc := elastic.NewFetchSourceContext(true).Include(sp.QueryBody.Fields...)
// highlight
hl := elastic.NewHighlight()
if sp.QueryBody.Highlight && len(sp.QueryBody.Where.Like) > 0 {
for _, v := range sp.QueryBody.Where.Like {
for _, field := range v.KWFields {
hl = hl.Fields(elastic.NewHighlighterField(field))
}
}
hl = hl.PreTags("<em class=\"keyword\">").PostTags("</em>")
}
// from + size = 10,000
maxRows := 10000
if b, ok := model.PermConf["oht"][sp.Business]; ok && b == "true" {
maxRows = 100000
}
from := (sp.QueryBody.Pn - 1) * sp.QueryBody.Ps
size := sp.QueryBody.Ps
if (from + size) > maxRows {
from = maxRows - size
}
// Scroll
if sp.QueryBody.Scroll == true {
var (
tList []json.RawMessage
tLen int
ScrollID = ""
)
res = &model.QueryResult{}
esCluster := sp.AppIDConf.ESCluster
eSearch, ok := d.esPool[esCluster]
if !ok {
PromError(fmt.Sprintf("es:集群不存在%s", esCluster), "s.dao.searchResult indexName:%s", esCluster)
return
}
fsc := elastic.NewFetchSourceContext(true).Include(sp.QueryBody.Fields...)
// multi sort
sorterSlice := []elastic.Sorter{}
if len(sp.QueryBody.Where.Like) > 0 && sp.QueryBody.OrderScoreFirst { // like 长度 > 0但里面是空的也是个问题
sorterSlice = append(sorterSlice, elastic.NewScoreSort().Desc())
}
for _, i := range sp.QueryBody.Order {
for k, v := range i {
if v == "asc" {
sorterSlice = append(sorterSlice, elastic.NewFieldSort(k).Asc())
} else {
sorterSlice = append(sorterSlice, elastic.NewFieldSort(k).Desc())
}
}
}
if len(sp.QueryBody.Where.Like) > 0 && !sp.QueryBody.OrderScoreFirst {
sorterSlice = append(sorterSlice, elastic.NewScoreSort().Desc())
}
for {
searchResult, err := eSearch.Scroll().Index(sp.QueryBody.From).
Query(query).FetchSourceContext(fsc).Size(sp.QueryBody.Ps).Scroll("1m").ScrollId(ScrollID).SortBy(sorterSlice...).Do(c)
if err == io.EOF {
break
} else if err != nil {
PromError(fmt.Sprintf("es:执行查询失败%s ", "Scroll"), "es:执行查询失败%v", err)
break
}
ScrollID = searchResult.ScrollId
for _, hit := range searchResult.Hits.Hits {
var t json.RawMessage
if err = json.Unmarshal(*hit.Source, &t); err != nil {
PromError(fmt.Sprintf("es:Unmarshal%s ", "Scroll"), "es:Unmarshal%v", err)
break
}
tList = append(tList, t)
tLen++
if tLen >= sp.QueryBody.Pn*sp.QueryBody.Ps {
goto ClearScroll
}
}
}
ClearScroll:
go eSearch.ClearScroll().ScrollId(ScrollID).Do(context.Background())
if res.Result, err = json.Marshal(tList); err != nil {
PromError(fmt.Sprintf("es:Unmarshal%s ", "Scroll"), "es:Unmarshal%v", err)
return
}
return
}
// do
searchPrepare := d.esPool[esCluster].
Search().Index(sp.QueryBody.From).
Highlight(hl).
Query(query).
SortBy(sorterSlice...).
From(from).
Size(size).
FetchSourceContext(fsc).IgnoreUnavailable(true).AllowNoIndices(true)
if ec, ok := model.PermConf["es_cache"][sp.Business]; ok && ec == "true" {
searchPrepare.RequestCache(true)
}
if rt, ok := model.PermConf["routing"][sp.Business]; ok {
routing := make([]string, 0, 1)
if sp.QueryBody.Where.EQ != nil {
if eq, ok := sp.QueryBody.Where.EQ[rt]; ok {
routing = append(routing, fmt.Sprintf("%v", eq))
}
}
if sp.QueryBody.Where.In != nil {
if in, ok := sp.QueryBody.Where.In[rt]; ok {
for _, v := range in {
routing = append(routing, fmt.Sprintf("%v", v))
}
}
}
if len(routing) == 0 {
qrDebug.AddErrMsg("es:路由不存在" + rt)
return
}
searchPrepare.Routing(routing...)
}
if sp.DebugLevel == 2 {
searchPrepare.Profile(true)
}
// Enhanced
for _, v := range sp.QueryBody.Where.Enhanced {
aggKey := v.Mode + "_" + v.Field
switch v.Mode {
case model.EnhancedModeGroupBy:
aggs := elastic.NewTermsAggregation()
aggs = aggs.Field(v.Field).Size(1000) //要和业务方确定具体值
searchPrepare.Aggregation(aggKey, aggs)
case model.EnhancedModeCollapse, model.EnhancedModeDistinct:
collapse := elastic.NewCollapseBuilder(v.Field).MaxConcurrentGroupRequests(1)
innerHit := elastic.NewInnerHit().Name("last_one").Size(1)
for _, v := range v.Order {
for field, sort := range v {
if sort == "desc" {
innerHit.Sort(field, false)
} else {
innerHit.Sort(field, true)
}
}
}
if len(v.Order) > 0 {
collapse.InnerHit(innerHit)
}
searchPrepare.Collapse(collapse)
case model.EnhancedModeSum:
aggs := elastic.NewSumAggregation()
aggs = aggs.Field(v.Field)
searchPrepare.Aggregation(aggKey, aggs)
case model.EnhancedModeDistinctCount:
aggs := elastic.NewCardinalityAggregation()
aggs = aggs.Field(v.Field)
searchPrepare.Aggregation(aggKey, aggs)
}
}
searchResult, err := searchPrepare.Do(context.Background())
if err != nil {
qrDebug.AddErrMsg(fmt.Sprintf("es:执行查询失败%s. %v", esCluster, err))
PromError(fmt.Sprintf("es:执行查询失败%s ", esCluster), "%v", err)
return
}
// data
data := json.RawMessage{}
docHits := []json.RawMessage{}
docBuckets := map[string][]map[string]*json.RawMessage{}
b := bytes.Buffer{}
b.WriteString("{")
b.WriteString("}")
for _, hit := range searchResult.Hits.Hits {
var t json.RawMessage
e := json.Unmarshal(*hit.Source, &t)
if e != nil {
PromError(fmt.Sprintf("es:%s 索引有脏数据", esCluster), "s.dao.SearchArchiveCheck(%d,%d) error(%v) ", sp.QueryBody.Pn*sp.QueryBody.Ps, sp.QueryBody.Ps, e)
continue
}
docHits = append(docHits, t)
// highlight
if len(hit.Highlight) > 0 {
b, _ := json.Marshal(hit.Highlight)
docHits = append(docHits, b)
} else if sp.QueryBody.Highlight {
docHits = append(docHits, b.Bytes()) //保证在高亮情况下,肯定有一对数据
}
}
if len(docHits) > 0 {
if doc, er := json.Marshal(docHits); er != nil {
qrDebug.AddErrMsg(fmt.Sprintf("es:Unmarshal docHits es:Unmarshal%v ", er))
PromError(fmt.Sprintf("es:Unmarshal%s ", "docHits"), "es:Unmarshal%v", er)
} else {
data = doc
}
} else {
h := bytes.Buffer{}
h.WriteString("[")
h.WriteString("]")
data = h.Bytes()
}
// data overwrite
for _, v := range sp.QueryBody.Where.Enhanced {
key := v.Mode + "_" + v.Field
switch v.Mode {
case model.EnhancedModeGroupBy:
result, ok := searchResult.Aggregations.Terms(key)
if !ok {
PromError(fmt.Sprintf("es:Unmarshal%s ", key), "es:Unmarshal%v", err)
continue
}
for _, b := range result.Buckets {
docBuckets[key] = append(docBuckets[key], b.Aggregations)
}
data = b.Bytes() //保证无数据情况下,有正常返回
case model.EnhancedModeSum:
result, ok := searchResult.Aggregations.Sum(key)
if !ok {
PromError(fmt.Sprintf("es:Unmarshal%s ", key), "es:Unmarshal%v", err)
continue
}
docBuckets[key] = append(docBuckets[key], result.Aggregations)
data = b.Bytes() //保证无数据情况下,有正常返回
case model.EnhancedModeDistinctCount:
result, ok := searchResult.Aggregations.Cardinality(key)
if !ok {
PromError(fmt.Sprintf("es:Unmarshal%s ", key), "es:Unmarshal%v", err)
continue
}
docBuckets[key] = append(docBuckets[key], result.Aggregations)
data = b.Bytes() //保证无数据情况下,有正常返回
default:
// other modes...
}
}
if len(docBuckets) > 0 {
if doc, er := json.Marshal(docBuckets); er != nil {
qrDebug.AddErrMsg(fmt.Sprintf("es:Unmarshal docBuckets es:Unmarshal%v", er))
PromError(fmt.Sprintf("es:Unmarshal%s ", "docBuckets"), "es:Unmarshal%v", er)
} else {
data = doc
}
}
order := []string{}
sort := []string{}
for _, i := range sp.QueryBody.Order {
for k, v := range i {
order = append(order, k)
sort = append(sort, v)
}
}
res = &model.QueryResult{
Order: strings.Join(order, ","),
Sort: strings.Join(sort, ","),
Result: data,
Page: &model.Page{
Pn: sp.QueryBody.Pn,
Ps: sp.QueryBody.Ps,
Total: searchResult.Hits.TotalHits,
},
}
//默认的debug高级别debug在dsl执行后退出
if sp.DebugLevel == 2 {
qrDebug.Profile = searchResult.Profile
return
}
return
}
// BulkIndex .
func (d *Dao) BulkIndex(c context.Context, esName string, bulkData []BulkItem) (err error) {
bulkRequest := d.esPool[esName].Bulk()
for _, b := range bulkData {
request := elastic.NewBulkIndexRequest().Index(b.IndexName()).Type(b.IndexType()).Id(b.IndexID()).Doc(b)
bulkRequest.Add(request)
}
if _, err = bulkRequest.Do(c); err != nil {
log.Error("esName(%s) bulk error(%v)", esName, err)
}
return
}
// ExistIndex .
func (d *Dao) ExistIndex(c context.Context, esClusterName, indexName string) (exist bool, err error) {
if _, ok := d.esPool[esClusterName]; !ok {
PromError(fmt.Sprintf("es:集群不存在%s", esClusterName), "s.dao.searchResult indexName:%s", indexName)
err = fmt.Errorf("集群不存在")
return
}
exist, err = d.esPool[esClusterName].IndexExists(indexName).Do(c)
return
}

View File

@@ -0,0 +1,136 @@
package dao
import (
"context"
"go-common/app/admin/main/search/model"
"testing"
"github.com/smartystreets/goconvey/convey"
"gopkg.in/olivere/elastic.v5"
)
func TestDaoUpdateMapBulk(t *testing.T) {
convey.Convey("UpdateMapBulk", t, func(ctx convey.C) {
var (
c = context.Background()
esName = ""
bulkData = []BulkMapItem{}
)
ctx.Convey("When everything goes positive", func(ctx convey.C) {
//err :=
d.UpdateMapBulk(c, esName, bulkData)
ctx.Convey("Then err should be nil.", func(ctx convey.C) {
//ctx.So(err, convey.ShouldBeNil)
})
})
})
}
func TestDaoUpdateBulk(t *testing.T) {
convey.Convey("UpdateBulk", t, func(ctx convey.C) {
var (
c = context.Background()
esName = ""
bulkData = []BulkItem{}
)
ctx.Convey("When everything goes positive", func(ctx convey.C) {
//err :=
d.UpdateBulk(c, esName, bulkData)
ctx.Convey("Then err should be nil.", func(ctx convey.C) {
//ctx.So(err, convey.ShouldBeNil)
})
})
})
}
func TestDaoUpsertBulk(t *testing.T) {
convey.Convey("UpsertBulk", t, func(ctx convey.C) {
var (
c = context.Background()
esCluster = ""
up = &model.UpsertParams{}
)
ctx.Convey("When everything goes positive", func(ctx convey.C) {
err := d.UpsertBulk(c, esCluster, up)
ctx.Convey("Then err should be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
})
})
})
}
func TestDaosearchResult(t *testing.T) {
convey.Convey("searchResult", t, func(ctx convey.C) {
var (
c = context.Background()
esClusterName = ""
indexName = ""
query elastic.Query
bsp = &model.BasicSearchParams{}
)
ctx.Convey("When everything goes positive", func(ctx convey.C) {
res, err := d.searchResult(c, esClusterName, indexName, query, bsp)
ctx.Convey("Then err should be nil.res should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
ctx.So(res, convey.ShouldNotBeNil)
})
})
})
}
func TestDaoQueryResult(t *testing.T) {
convey.Convey("QueryResult", t, func(ctx convey.C) {
var (
c = context.Background()
query elastic.Query
sp = &model.QueryParams{
AppIDConf: &model.QueryConfDetail{
ESCluster: "",
},
}
qbDebug = &model.QueryDebugResult{}
)
ctx.Convey("When everything goes positive", func(ctx convey.C) {
_, qrDebug, err := d.QueryResult(c, query, sp, qbDebug)
ctx.Convey("Then err should be nil.res,qrDebug should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
ctx.So(qrDebug, convey.ShouldNotBeNil)
//ctx.So(res, convey.ShouldNotBeNil)
})
})
})
}
func TestDaoBulkIndex(t *testing.T) {
convey.Convey("BulkIndex", t, func(ctx convey.C) {
var (
c = context.Background()
esName = ""
bulkData = []BulkItem{}
)
ctx.Convey("When everything goes positive", func(ctx convey.C) {
//err :=
d.BulkIndex(c, esName, bulkData)
ctx.Convey("Then err should be nil.", func(ctx convey.C) {
//ctx.So(err, convey.ShouldBeNil)
})
})
})
}
func TestDaoExistIndex(t *testing.T) {
convey.Convey("ExistIndex", t, func(ctx convey.C) {
var (
c = context.Background()
esClusterName = ""
indexName = ""
)
ctx.Convey("When everything goes positive", func(ctx convey.C) {
exist, _ := d.ExistIndex(c, esClusterName, indexName)
ctx.Convey("Then err should be nil.exist should not be nil.", func(ctx convey.C) {
//ctx.So(err, convey.ShouldBeNil)
ctx.So(exist, convey.ShouldNotBeNil)
})
})
})
}

View File

@@ -0,0 +1,474 @@
package dao
import (
"context"
"encoding/json"
"fmt"
"net/url"
"strconv"
"strings"
"time"
"go-common/app/admin/main/search/model"
"go-common/library/log"
"github.com/pkg/errors"
"gopkg.in/olivere/elastic.v5"
)
const (
_sql = "SELECT id, name, index_format, index_cluster, additional_mapping, permission_point FROM digger_"
_count = "INSERT INTO digger_count (`business`,`type`,`time`,`count`) values (?, 'inc', ?, 1) ON DUPLICATE KEY UPDATE count=count+1"
_percent = "INSERT INTO digger_count (`business`,`type`,`time`,`name`,`count`) values (?, 'inc', ?, ?, 1) ON DUPLICATE KEY UPDATE count=count+1"
)
var (
logAuditBusiness map[int]*model.Business
logUserActionBusiness map[int]*model.Business
)
func (d *Dao) NewLogProcess() {
for {
if err := d.NewLog(); err != nil {
time.Sleep(time.Second)
continue
}
time.Sleep(time.Minute)
}
}
// NewLog .
func (d *Dao) NewLog() (err error) {
if logAuditBusiness, err = d.initMapping("log_audit"); err != nil {
return
}
for k, v := range logAuditBusiness {
if _, ok := d.esPool[v.IndexCluster]; !ok {
log.Error("logAudit esPool no exist(%v)", k)
delete(logAuditBusiness, k)
}
}
if logUserActionBusiness, err = d.initMapping("log_user_action"); err != nil {
return
}
for k, v := range logUserActionBusiness {
if _, ok := d.esPool[v.IndexCluster]; !ok {
log.Error("logUserAction esPool no exist(%v)", k)
delete(logUserActionBusiness, k)
}
}
return
}
// GetLogInfo .
func (d *Dao) GetLogInfo(appID string, id int) (business *model.Business, ok bool) {
switch appID {
case "log_audit":
business, ok = logAuditBusiness[id]
return
case "log_user_action":
business, ok = logUserActionBusiness[id]
return
}
return &model.Business{}, false
}
func (d *Dao) initMapping(appID string) (business map[int]*model.Business, err error) {
defaultMapping := map[string]string{}
switch appID {
case "log_audit":
defaultMapping = model.LogAuditDefaultMapping
case "log_user_action":
defaultMapping = model.LogUserActionDefaultMapping
}
business = map[int]*model.Business{}
rows, err := d.db.Query(context.Background(), _sql+appID)
if err != nil {
return
}
defer rows.Close()
for rows.Next() {
var value = &model.Business{
AppID: appID,
Mapping: map[string]string{},
}
if err = rows.Scan(&value.ID, &value.Name, &value.IndexFormat, &value.IndexCluster, &value.AdditionalMapping, &value.PermissionPoint); err != nil {
log.Error("Log New DB (%v)(%v)", appID, err)
continue
}
if appID == "log_audit" {
value.IndexCluster = "log"
}
for k, v := range defaultMapping {
value.Mapping[k] = v
}
if value.AdditionalMapping != "" {
var additionalMappingDict map[string]string
if err = json.Unmarshal([]byte(value.AdditionalMapping), &additionalMappingDict); err != nil {
log.Error("Log New Json (%v)(%v)", value.ID, err)
continue
}
for k, v := range additionalMappingDict {
value.Mapping[k] = v
}
}
business[value.ID] = value
}
err = rows.Err()
return
}
/**
获取es索引名 多个用逗号分隔
按日分最多7天
按周分最多2月
按月分最多6月
按年分最多3年
*/
func (d *Dao) logIndexName(c context.Context, p *model.LogParams, business *model.Business) (res string, err error) {
var (
sTime = time.Now()
eTime = time.Now()
resArr []string
)
if p.CTimeFrom != "" {
sTime, err = time.Parse("2006-01-02 15:04:05", p.CTimeFrom)
if err != nil {
log.Error("d.LogAuditIndexName(%v)", p.CTimeFrom)
return
}
}
if p.CTimeTo != "" {
eTime, err = time.Parse("2006-01-02 15:04:05", p.CTimeTo)
if err != nil {
log.Error("d.LogAuditIndexName(p.CTimeTo)(%v)", p.CTimeTo)
return
}
}
resDict := map[string]bool{}
if strings.Contains(business.IndexFormat, "02") {
for a := 0; a <= 60; a++ {
resDict[getLogAuditIndexName(p.Business, business.AppID, business.IndexFormat, eTime)] = true
eTime = eTime.AddDate(0, 0, -1)
if (p.CTimeFrom == "" && a >= 1) || (p.CTimeFrom != "" && sTime.After(eTime)) {
break
}
}
} else if strings.Contains(business.IndexFormat, "week") {
for a := 0; a <= 366; a++ {
resDict[getLogAuditIndexName(p.Business, business.AppID, business.IndexFormat, eTime)] = true
eTime = eTime.AddDate(0, 0, -1)
if (p.CTimeFrom == "" && a >= 1) || (p.CTimeFrom != "" && sTime.After(eTime)) {
resDict[getLogAuditIndexName(p.Business, business.AppID, business.IndexFormat, sTime)] = true
break
}
}
} else if strings.Contains(business.IndexFormat, "01") {
// 1月31日时AddDate(0, -1, 0)会出现错误
year, month, _ := eTime.Date()
hour, min, sec := eTime.Clock()
eTime = time.Date(year, month, 1, hour, min, sec, 0, eTime.Location())
for a := 0; a <= 360; a++ {
resDict[getLogAuditIndexName(p.Business, business.AppID, business.IndexFormat, eTime)] = true
eTime = eTime.AddDate(0, -1, 0)
if (p.CTimeFrom == "" && a >= 1) || p.CTimeFrom != "" && sTime.After(eTime) {
break
}
}
} else if strings.Contains(business.IndexFormat, "2006") {
// 2月29日时AddDate(-1, 0, 0)会出现错误
year, _, _ := eTime.Date()
hour, min, sec := eTime.Clock()
eTime = time.Date(year, 1, 1, hour, min, sec, 0, eTime.Location())
for a := 0; a <= 100; a++ {
resDict[getLogAuditIndexName(p.Business, business.AppID, business.IndexFormat, eTime)] = true
eTime = eTime.AddDate(-1, 0, 0)
if (p.CTimeFrom == "" && a >= 1) || (p.CTimeFrom != "" && sTime.After(eTime)) {
break
}
}
} else if business.IndexFormat == "all" {
resDict[getLogAuditIndexName(p.Business, business.AppID, business.IndexFormat, eTime)] = true
}
for k := range resDict {
if exist, e := d.ExistIndex(c, business.IndexCluster, k); exist && e == nil {
resArr = append(resArr, k)
}
}
res = strings.Join(resArr, ",")
return
}
func getLogAuditIndexName(business int, indexName string, format string, time time.Time) (index string) {
var (
week = map[int]string{
0: "0107",
1: "0815",
2: "1623",
3: "2431",
}
)
format = strings.Replace(time.Format(format), "week", week[time.Day()/8], -1)
index = indexName + "_" + strconv.Itoa(business) + "_" + format
return
}
func (d *Dao) getQuery(pr map[string][]interface{}, indexMapping map[string]string) (query *elastic.BoolQuery) {
query = elastic.NewBoolQuery()
for k, t := range indexMapping {
switch t {
case "int", "int64":
if v, ok := pr[k]; ok {
query = query.Filter(elastic.NewTermsQuery(k, v...))
}
if v, ok := pr[k+"_from"]; ok {
query = query.Filter(elastic.NewRangeQuery(k).Gte(v[0]))
}
if v, ok := pr[k+"_to"]; ok {
query = query.Filter(elastic.NewRangeQuery(k).Lte(v[0]))
}
case "string":
if v, ok := pr[k]; ok {
query = query.Filter(elastic.NewTermsQuery(k, v...))
}
if v, ok := pr[k+"_like"]; ok {
likeMap := []model.QueryBodyWhereLike{
{
KWFields: []string{k},
KW: []string{fmt.Sprintf("%v", v)},
Level: model.LikeLevelHigh,
},
}
if o, e := d.queryBasicLike(likeMap, ""); e == nil {
query = query.Must(o...)
}
}
case "time":
if v, ok := pr[k+"_from"]; ok {
query = query.Filter(elastic.NewRangeQuery(k).Gte(v[0]))
}
if v, ok := pr[k+"_to"]; ok {
query = query.Filter(elastic.NewRangeQuery(k).Lte(v[0]))
}
case "int_to_bin":
if v, ok := pr[k]; ok {
var arr []elastic.Query
for _, i := range v {
item, err := strconv.ParseUint(i.(string), 10, 64)
if err != nil {
break
}
arr = append(arr, elastic.NewTermsQuery(k, 1<<(item-1)))
}
query = query.Filter(arr...)
}
case "array":
if v, ok := pr[k+"_and"]; ok {
for _, n := range v {
query = query.Filter(elastic.NewTermsQuery(k, n))
}
}
if v, ok := pr[k+"_or"]; ok {
query = query.Filter(elastic.NewTermsQuery(k, v...))
}
}
}
return query
}
// LogAudit .
func (d *Dao) LogAudit(c context.Context, pr map[string][]interface{}, sp *model.LogParams, business *model.Business) (res *model.SearchResult, err error) {
var indexName string
res = &model.SearchResult{
Result: []json.RawMessage{},
Page: &model.Page{},
}
query := d.getQuery(pr, business.Mapping)
indexName, err = d.logIndexName(c, sp, business)
if err != nil {
log.Error("d.LogAudit.logIndexName(%v)(%v)", err, indexName)
return
}
if indexName == "" {
return
}
if res, err = d.searchResult(c, business.IndexCluster, indexName, query, sp.Bsp); err != nil {
PromError(fmt.Sprintf("es:%s ", sp.Bsp.AppID), "%v", err)
}
return
}
// LogAuditGroupBy .
func (d *Dao) LogAuditGroupBy(c context.Context, pr map[string][]interface{}, sp *model.LogParams, business *model.Business) (res *model.SearchResult, err error) {
res = &model.SearchResult{
Result: []json.RawMessage{},
Page: &model.Page{},
}
var (
indexName = ""
searchResult *elastic.SearchResult
)
group := pr["group"][0].(string)
if _, ok := d.esPool[business.IndexCluster]; !ok {
PromError(fmt.Sprintf("es:集群不存在%s", "LogAuditGroupBy"), "s.dao.LogAuditGroupBy indexName:%s", "LogAuditGroupBy")
return
}
query := d.getQuery(pr, business.Mapping)
indexName, err = d.logIndexName(c, sp, business)
if err != nil {
log.Error("d.LogAuditGroupBy.logIndexName(%v)(%v)", err, indexName)
return
}
if indexName == "" {
return
}
collapse := elastic.NewCollapseBuilder(group).MaxConcurrentGroupRequests(1)
searchResult, err = d.esPool[business.IndexCluster].Search().Index(indexName).Type("base").Query(query).
Sort("ctime", false).Collapse(collapse).Size(1000).Do(c)
if err != nil {
log.Error("d.LogAuditGroupBy(%v)", err)
return
}
for _, hit := range searchResult.Hits.Hits {
var t json.RawMessage
err = json.Unmarshal(*hit.Source, &t)
if err != nil {
log.Error("es:%s 返回不是json!!!", business.IndexCluster)
return
}
res.Result = append(res.Result, t)
}
res.Page.Ps = sp.Bsp.Ps
res.Page.Pn = sp.Bsp.Pn
res.Page.Total = int64(len(res.Result))
return
}
// LogAuditDelete .
func (d *Dao) LogAuditDelete(c context.Context, pr map[string][]interface{}, sp *model.LogParams, business *model.Business) (res *model.SearchResult, err error) {
var (
indexName string
searchResult *elastic.BulkIndexByScrollResponse
)
res = &model.SearchResult{
Result: []json.RawMessage{},
Page: &model.Page{},
}
query := d.getQuery(pr, business.Mapping)
indexName, err = d.logIndexName(c, sp, business)
if err != nil {
log.Error("d.LogAuditDelete.logIndexName(%v)(%v)", err, indexName)
return
}
if indexName == "" {
return
}
searchResult, err = d.esPool[business.IndexCluster].DeleteByQuery().Index(indexName).Type("base").Query(query).Size(10000).Do(c)
if err != nil {
log.Error("d.LogAuditDelete.DeleteByQuery(%v)(%v)", err, indexName)
return
}
res.Page.Total = searchResult.Total
return
}
// LogUserAction .
func (d *Dao) LogUserAction(c context.Context, pr map[string][]interface{}, sp *model.LogParams, business *model.Business) (res *model.SearchResult, err error) {
var indexName string
res = &model.SearchResult{
Result: []json.RawMessage{},
Page: &model.Page{},
}
query := d.getQuery(pr, business.Mapping)
indexName, err = d.logIndexName(c, sp, business)
if err != nil {
log.Error("d.LogUserAction.logIndexName(%v)(%v)", err, indexName)
return
}
if indexName == "" {
return
}
if res, err = d.searchResult(c, business.IndexCluster, indexName, query, sp.Bsp); err != nil {
PromError(fmt.Sprintf("es:%s ", sp.Bsp.AppID), "%v", err)
}
return
}
// LogUserActionDelete .
func (d *Dao) LogUserActionDelete(c context.Context, pr map[string][]interface{}, sp *model.LogParams, business *model.Business) (res *model.SearchResult, err error) {
var (
indexName string
searchResult *elastic.BulkIndexByScrollResponse
)
res = &model.SearchResult{
Result: []json.RawMessage{},
Page: &model.Page{},
}
query := d.getQuery(pr, business.Mapping)
indexName, err = d.logIndexName(c, sp, business)
if err != nil {
log.Error("d.LogUserActionDelete.logIndexName(%v)(%v)", err, indexName)
return
}
if indexName == "" {
return
}
searchResult, err = d.esPool[business.IndexCluster].DeleteByQuery().Index(indexName).Type("base").Query(query).Size(10000).Do(c)
if err != nil {
log.Error("d.LogUserActionDelete.DeleteByQuery(%v)(%v)", err, indexName)
return
}
res.Page.Total = searchResult.Total
return
}
// UDepTs .
func (d *Dao) UDepTs(c context.Context, uids []string) (res *model.UDepTsData, err error) {
params := url.Values{}
params.Set("uids", strings.Join(uids, ","))
if err = d.client.Get(c, d.managerDep, "", params, &res); err != nil {
err = errors.Wrapf(err, "d.httpSearch url(%s)", d.managerDep+"?"+params.Encode())
log.Error("d.httpSearch url(%s)", d.managerDep+"?"+params.Encode())
return
}
if res.Code != 0 {
err = errors.Wrapf(err, "response url(%s) code(%d)", d.managerDep+"?"+params.Encode(), res.Code)
log.Error("response url(%s) code(%d)", d.managerDep+"?"+params.Encode(), res.Code)
return
}
return
}
// IP .
func (d *Dao) IP(c context.Context, ip []string) (res *model.IPData, err error) {
params := url.Values{}
params.Set("ips", strings.Join(ip, ","))
if err = d.client.Get(c, d.managerIP, "", params, &res); err != nil {
err = errors.Wrapf(err, "d.httpSearch url(%s)", d.managerIP+"?"+params.Encode())
log.Error("d.httpSearch url(%s)", d.managerIP+"?"+params.Encode())
return
}
if res.Code != 0 {
err = errors.Wrapf(err, "response url(%s) code(%d)", d.managerDep+"?"+params.Encode(), res.Code)
log.Error("response url(%s) code(%d)", d.managerIP+"?"+params.Encode(), res.Code)
return
}
return
}
// LogCount .
func (d *Dao) LogCount(c context.Context, name string, business int, uid interface{}) {
date := time.Now().Format("2006-01-02")
if _, err := d.db.Exec(c, _count, name+"_access", date); err != nil {
log.Error("d.db.Exec err(%v)", err)
return
}
if _, err := d.db.Exec(c, _percent, name+"_uid", date, uid); err != nil {
log.Error("d.db.Exec err(%v)", err)
return
}
if _, err := d.db.Exec(c, _percent, name+"_business", date, business); err != nil {
log.Error("d.db.Exec err(%v)", err)
return
}
}

View File

@@ -0,0 +1,282 @@
package dao
import (
"context"
"testing"
"time"
"github.com/smartystreets/goconvey/convey"
"go-common/app/admin/main/search/model"
)
func TestDaoNewLog(t *testing.T) {
convey.Convey("NewLog", t, func(ctx convey.C) {
ctx.Convey("When everything goes positive", func(ctx convey.C) {
d.NewLog()
ctx.Convey("No return values", func(ctx convey.C) {
})
})
})
}
func TestDaoGetLogInfo(t *testing.T) {
convey.Convey("GetLogInfo", t, func(ctx convey.C) {
var (
appID = ""
id = int(0)
)
ctx.Convey("When everything goes positive", func(ctx convey.C) {
business, ok := d.GetLogInfo(appID, id)
ctx.Convey("Then business,ok should not be nil.", func(ctx convey.C) {
ctx.So(ok, convey.ShouldNotBeNil)
ctx.So(business, convey.ShouldNotBeNil)
})
})
})
}
func TestDaoinitMapping(t *testing.T) {
convey.Convey("initMapping", t, func(ctx convey.C) {
var (
appID = ""
)
ctx.Convey("When everything goes positive", func(ctx convey.C) {
business, err := d.initMapping(appID)
ctx.Convey("Then business should not be nil.", func(ctx convey.C) {
ctx.So(business, convey.ShouldNotBeNil)
ctx.So(err, convey.ShouldNotBeNil)
})
})
})
}
func TestDaologIndexName(t *testing.T) {
convey.Convey("logIndexName", t, func(ctx convey.C) {
var (
c = context.Background()
p = &model.LogParams{
CTimeFrom: "2010-01-01 00:00:00",
CTimeTo: "2020-01-01 00:00:00",
}
business = &model.Business{
ID: 0,
AppID: "log_audit",
}
)
ctx.Convey("2006", func(ctx convey.C) {
business.IndexFormat = "2006"
res, err := d.logIndexName(c, p, business)
ctx.Convey("Then err should be nil.res should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
ctx.So(res, convey.ShouldNotBeNil)
})
})
ctx.Convey("2006-01", func(ctx convey.C) {
business.IndexFormat = "2006-01"
res, err := d.logIndexName(c, p, business)
ctx.Convey("Then err should be nil.res should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
ctx.So(res, convey.ShouldNotBeNil)
})
})
ctx.Convey("2006-01-week", func(ctx convey.C) {
business.IndexFormat = "2006-01-week"
res, err := d.logIndexName(c, p, business)
ctx.Convey("Then err should be nil.res should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
ctx.So(res, convey.ShouldNotBeNil)
})
})
ctx.Convey("2006-01-02", func(ctx convey.C) {
business.IndexFormat = "2006-01-02"
res, err := d.logIndexName(c, p, business)
ctx.Convey("Then err should be nil.res should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
ctx.So(res, convey.ShouldNotBeNil)
})
})
ctx.Convey("all", func(ctx convey.C) {
business.IndexFormat = "all"
res, err := d.logIndexName(c, p, business)
ctx.Convey("Then err should be nil.res should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
ctx.So(res, convey.ShouldNotBeNil)
})
})
})
}
func TestDaogetLogAuditIndexName(t *testing.T) {
convey.Convey("getLogAuditIndexName", t, func(ctx convey.C) {
var (
business = int(0)
indexName = ""
format = ""
time = time.Now()
)
ctx.Convey("When everything goes positive", func(ctx convey.C) {
index := getLogAuditIndexName(business, indexName, format, time)
ctx.Convey("Then index should not be nil.", func(ctx convey.C) {
ctx.So(index, convey.ShouldNotBeNil)
})
})
})
}
func TestDaogetQuery(t *testing.T) {
convey.Convey("getQuery", t, func(ctx convey.C) {
var (
pr map[string][]interface{}
indexMapping map[string]string
)
ctx.Convey("When everything goes positive", func(ctx convey.C) {
query := d.getQuery(pr, indexMapping)
ctx.Convey("Then query should not be nil.", func(ctx convey.C) {
ctx.So(query, convey.ShouldNotBeNil)
})
})
})
}
func TestDaoLogAudit(t *testing.T) {
convey.Convey("LogAudit", t, func(ctx convey.C) {
var (
c = context.Background()
pr map[string][]interface{}
sp = &model.LogParams{}
business = &model.Business{}
)
ctx.Convey("When everything goes positive", func(ctx convey.C) {
res, err := d.LogAudit(c, pr, sp, business)
ctx.Convey("Then err should be nil.res should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
ctx.So(res, convey.ShouldNotBeNil)
})
})
})
}
func TestDaoLogAuditGroupBy(t *testing.T) {
convey.Convey("LogAuditGroupBy", t, func(ctx convey.C) {
var (
c = context.Background()
pr = map[string][]interface{}{
"group": {"group"},
}
sp = &model.LogParams{}
business = &model.Business{}
)
ctx.Convey("When everything goes positive", func(ctx convey.C) {
res, err := d.LogAuditGroupBy(c, pr, sp, business)
ctx.Convey("Then err should be nil.res should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
ctx.So(res, convey.ShouldNotBeNil)
})
})
})
}
func TestDaoLogAuditDelete(t *testing.T) {
convey.Convey("LogAuditDelete", t, func(ctx convey.C) {
var (
c = context.Background()
pr map[string][]interface{}
sp = &model.LogParams{}
business = &model.Business{}
)
ctx.Convey("When everything goes positive", func(ctx convey.C) {
res, err := d.LogAuditDelete(c, pr, sp, business)
ctx.Convey("Then err should be nil.res should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
ctx.So(res, convey.ShouldNotBeNil)
})
})
})
}
func TestDaoLogUserAction(t *testing.T) {
convey.Convey("LogUserAction", t, func(ctx convey.C) {
var (
c = context.Background()
pr map[string][]interface{}
sp = &model.LogParams{}
business = &model.Business{}
)
ctx.Convey("When everything goes positive", func(ctx convey.C) {
res, err := d.LogUserAction(c, pr, sp, business)
ctx.Convey("Then err should be nil.res should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
ctx.So(res, convey.ShouldNotBeNil)
})
})
})
}
func TestDaoLogUserActionDelete(t *testing.T) {
convey.Convey("LogUserActionDelete", t, func(ctx convey.C) {
var (
c = context.Background()
pr map[string][]interface{}
sp = &model.LogParams{}
business = &model.Business{}
)
ctx.Convey("When everything goes positive", func(ctx convey.C) {
res, err := d.LogUserActionDelete(c, pr, sp, business)
ctx.Convey("Then err should be nil.res should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
ctx.So(res, convey.ShouldNotBeNil)
})
})
})
}
func TestDaoUDepTs(t *testing.T) {
convey.Convey("UDepTs", t, func(ctx convey.C) {
var (
c = context.Background()
uids = []string{}
)
ctx.Convey("When everything goes positive", func(ctx convey.C) {
res, err := d.UDepTs(c, uids)
ctx.Convey("Then err should be nil.res should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
ctx.So(res, convey.ShouldNotBeNil)
})
})
})
}
func TestDaoIP(t *testing.T) {
convey.Convey("IP", t, func(ctx convey.C) {
var (
c = context.Background()
)
ip := []string{
"127.0.0.1",
}
ctx.Convey("When everything goes positive", func(ctx convey.C) {
//res, err :=
d.IP(c, ip)
ctx.Convey("Then err should be nil.res should not be nil.", func(ctx convey.C) {
//ctx.So(err, convey.ShouldBeNil)
//ctx.So(res, convey.ShouldNotBeNil)
})
})
})
}
func TestDaoLogCount(t *testing.T) {
convey.Convey("LogCount", t, func(ctx convey.C) {
var (
c = context.Background()
name = ""
business = int(0)
uid = interface{}(0)
)
ctx.Convey("When everything goes positive", func(ctx convey.C) {
d.LogCount(c, name, business, uid)
ctx.Convey("No return values", func(ctx convey.C) {
})
})
})
}

View File

@@ -0,0 +1,394 @@
package dao
import (
"context"
"encoding/json"
"fmt"
"net/url"
"strings"
"time"
"go-common/app/admin/main/search/model"
sqlx "go-common/library/database/sql"
)
const (
_mngBusinessListSQL = `select id,business,description,app_ids from digger_business where business like '%%%s%%' limit ?,?`
_mngBusinessListTotalSQL = `select count(*) from digger_business where business like '%%%s%%'`
_mngBusinessAllSQL = `select id,business,description,app_ids from digger_business`
_mngAddBusinessSQL = `insert into digger_business (business,description,app_ids) values (?,?,?)`
_mngUpdateBusinessSQL = `update digger_business set business=?,description=?,app_ids=? where id=?`
_mngBusinessInfoSQL = `select id,business,description,app_ids from digger_business where id=?`
_mngBusinessInfoByNameSQL = `select id,business,description,app_ids from digger_business where business=?`
_mngAssetListSQL = `select id,name,type,src,description from digger_asset %s limit ?,?`
_mngAssetTotalSQL = `select count(*) from digger_asset %s`
_mngAssetAllSQL = `select id,name,type,src,description from digger_asset`
_mngAssetInfoSQL = `select id,name,type,src,description from digger_asset where id=?`
_mngAssetInfoByNameSQL = `select id,name,type,src,description from digger_asset where name=?`
_mngAddAssetSQL = `insert into digger_asset (name,type,src,description) values (?,?,?,?)`
_mngUpdateAssetSQL = `update digger_asset set name=?,type=?,src=?,description=? where id=?`
_mngApplistSQL = `select id,business,appid,description,db_name,es_name,table_name,databus_name,table_prefix,table_format,index_prefix,
index_version,index_format,index_type,index_id,data_index_suffix,index_mapping,data_fields,data_extra,review_num,review_time,
sleep,size,sql_by_id,sql_by_mtime,sql_by_idmtime,databus_info,databus_index_id,query_max_indexes from digger_app where business=?`
_mngAppInfoSQL = `select id,business,appid,description,db_name,es_name,table_name,databus_name,table_prefix,table_format,index_prefix,
index_version,index_format,index_type,index_id,data_index_suffix,index_mapping,data_fields,data_extra,review_num,review_time,
sleep,size,sql_by_id,sql_by_mtime,sql_by_idmtime,databus_info,databus_index_id,query_max_indexes from digger_app where id=?`
_mngAppInfoByAppidSQL = `select id,business,appid,description,db_name,es_name,table_name,databus_name,table_prefix,table_format,index_prefix,
index_version,index_format,index_type,index_id,data_index_suffix,index_mapping,data_fields,data_extra,review_num,review_time,
sleep,size,sql_by_id,sql_by_mtime,sql_by_idmtime,databus_info,databus_index_id,query_max_indexes from digger_app where appid=?`
_mngAddAppSQL = `insert into digger_app (business,appid,description) values (?,?,?)`
_mngUpdateAppSQL = `update digger_app set business=?,appid=?,description=?,db_name=?,es_name=?,table_name=?,databus_name=?,table_prefix=?,table_format=?,index_prefix=?,
index_version=?,index_format=?,index_type=?,index_id=?,data_index_suffix=?,index_mapping=?,data_fields=?,data_extra=?,review_num=?,review_time=?,
sleep=?,size=?,sql_by_id=?,sql_by_mtime=?,sql_by_idmtime=?,databus_info=?,databus_index_id=?,query_max_indexes=? where id=?`
_mngUpdateAppAssetTableSQL = `update digger_app set table_prefix=?,table_format=? where table_name=?`
_mngUpdateAppAssetDatabusSQL = `update digger_app set databus_info=?,databus_index_id=? where databus_name=?`
_mngCountSQL = `select time,count from digger_count where business=? and type=? and time >= ?`
_mngPercentSQL = `select name,count from digger_count where business=? and type=? and time = ?`
)
// BusinessList .
func (d *Dao) BusinessList(ctx context.Context, name string, offset, limit int) (list []*model.MngBusiness, err error) {
sqlStr := fmt.Sprintf(_mngBusinessListSQL, name)
rows, err := d.db.Query(ctx, sqlStr, offset, limit)
if err != nil {
return
}
defer rows.Close()
for rows.Next() {
b := &model.MngBusiness{}
if err = rows.Scan(&b.ID, &b.Name, &b.Desc, &b.AppsJSON); err != nil {
return
}
b.Apps = make([]*model.MngBusinessApp, 0)
if b.AppsJSON != "" {
if err = json.Unmarshal([]byte(b.AppsJSON), &b.Apps); err != nil {
return
}
}
list = append(list, b)
}
err = rows.Err()
return
}
// BusinessTotal .
func (d *Dao) BusinessTotal(ctx context.Context, name string) (total int64, err error) {
sqlStr := fmt.Sprintf(_mngBusinessListTotalSQL, name)
err = d.db.QueryRow(ctx, sqlStr).Scan(&total)
return
}
// BusinessAll .
func (d *Dao) BusinessAll(ctx context.Context) (list []*model.MngBusiness, err error) {
rows, err := d.db.Query(ctx, _mngBusinessAllSQL)
if err != nil {
return
}
defer rows.Close()
for rows.Next() {
b := &model.MngBusiness{}
if err = rows.Scan(&b.ID, &b.Name, &b.Desc, &b.AppsJSON); err != nil {
return
}
b.Apps = make([]*model.MngBusinessApp, 0)
if b.AppsJSON != "" {
if err = json.Unmarshal([]byte(b.AppsJSON), &b.Apps); err != nil {
return
}
}
list = append(list, b)
}
err = rows.Err()
return
}
// AddBusiness .
func (d *Dao) AddBusiness(ctx context.Context, b *model.MngBusiness) (id int64, err error) {
res, err := d.db.Exec(ctx, _mngAddBusinessSQL, b.Name, b.Desc, b.AppsJSON)
if err != nil {
return
}
id, err = res.LastInsertId()
return
}
// UpdateBusiness .
func (d *Dao) UpdateBusiness(ctx context.Context, b *model.MngBusiness) (err error) {
_, err = d.db.Exec(ctx, _mngUpdateBusinessSQL, b.Name, b.Desc, b.AppsJSON, b.ID)
return
}
// BusinessInfo .
func (d *Dao) BusinessInfo(ctx context.Context, id int64) (info *model.MngBusiness, err error) {
info = new(model.MngBusiness)
if err = d.db.QueryRow(ctx, _mngBusinessInfoSQL, id).Scan(&info.ID, &info.Name, &info.Desc, &info.AppsJSON); err != nil {
if err == sqlx.ErrNoRows {
info = nil
err = nil
}
return
}
info.Apps = make([]*model.MngBusinessApp, 0)
if info.AppsJSON != "" {
err = json.Unmarshal([]byte(info.AppsJSON), &info.Apps)
}
return
}
// BusinessInfoByName .
func (d *Dao) BusinessInfoByName(ctx context.Context, name string) (info *model.MngBusiness, err error) {
info = new(model.MngBusiness)
if err = d.db.QueryRow(ctx, _mngBusinessInfoByNameSQL, name).Scan(&info.ID, &info.Name, &info.Desc, &info.AppsJSON); err != nil {
if err == sqlx.ErrNoRows {
info = nil
err = nil
}
return
}
info.Apps = make([]*model.MngBusinessApp, 0)
if info.AppsJSON != "" {
err = json.Unmarshal([]byte(info.AppsJSON), &info.Apps)
}
return
}
// AssetList .
func (d *Dao) AssetList(ctx context.Context, typ int, name string, offset, limit int) (list []*model.MngAsset, err error) {
where := " where 1 "
if typ > 0 {
where += fmt.Sprintf(" and type=%d ", typ)
}
if name != "" {
where += fmt.Sprintf(" and name like '%%%s%%'", name)
}
sqlStr := fmt.Sprintf(_mngAssetListSQL, where)
rows, err := d.db.Query(ctx, sqlStr, offset, limit)
if err != nil {
return
}
defer rows.Close()
for rows.Next() {
a := &model.MngAsset{}
if err = rows.Scan(&a.ID, &a.Name, &a.Type, &a.Config, &a.Desc); err != nil {
return
}
list = append(list, a)
}
err = rows.Err()
return
}
// AssetTotal .
func (d *Dao) AssetTotal(ctx context.Context, typ int, name string) (total int64, err error) {
where := " where 1 "
if typ > 0 {
where += fmt.Sprintf(" and type=%d ", typ)
}
if name != "" {
where += fmt.Sprintf(" and name like '%%%s%%'", name)
}
sqlStr := fmt.Sprintf(_mngAssetTotalSQL, where)
err = d.db.QueryRow(ctx, sqlStr).Scan(&total)
return
}
// AssetAll .
func (d *Dao) AssetAll(ctx context.Context) (list []*model.MngAsset, err error) {
rows, err := d.db.Query(ctx, _mngAssetAllSQL)
if err != nil {
return
}
defer rows.Close()
for rows.Next() {
a := &model.MngAsset{}
if err = rows.Scan(&a.ID, &a.Name, &a.Type, &a.Config, &a.Desc); err != nil {
return
}
list = append(list, a)
}
err = rows.Err()
return
}
// AssetInfo .
func (d *Dao) AssetInfo(ctx context.Context, id int64) (info *model.MngAsset, err error) {
info = new(model.MngAsset)
if err = d.db.QueryRow(ctx, _mngAssetInfoSQL, id).Scan(&info.ID, &info.Name, &info.Type, &info.Config, &info.Desc); err != nil {
if err == sqlx.ErrNoRows {
info = nil
err = nil
}
return
}
return
}
// AssetInfoByName .
func (d *Dao) AssetInfoByName(ctx context.Context, name string) (info *model.MngAsset, err error) {
info = new(model.MngAsset)
if err = d.db.QueryRow(ctx, _mngAssetInfoByNameSQL, name).Scan(&info.ID, &info.Name, &info.Type, &info.Config, &info.Desc); err != nil {
if err == sqlx.ErrNoRows {
info = nil
err = nil
}
return
}
return
}
// AddAsset .
func (d *Dao) AddAsset(ctx context.Context, b *model.MngAsset) (id int64, err error) {
res, err := d.db.Exec(ctx, _mngAddAssetSQL, b.Name, b.Type, b.Config, b.Desc)
if err != nil {
return
}
id, err = res.LastInsertId()
return
}
// UpdateAsset .
func (d *Dao) UpdateAsset(ctx context.Context, b *model.MngAsset) (err error) {
_, err = d.db.Exec(ctx, _mngUpdateAssetSQL, b.Name, b.Type, b.Config, b.Desc, b.ID)
return
}
// AppList .
func (d *Dao) AppList(ctx context.Context, business string) (list []*model.MngApp, err error) {
rows, err := d.db.Query(ctx, _mngApplistSQL, business)
if err != nil {
return
}
defer rows.Close()
for rows.Next() {
a := &model.MngApp{}
if err = rows.Scan(&a.ID, &a.Business, &a.AppID, &a.Desc, &a.DBName, &a.ESName, &a.TableName, &a.DatabusName, &a.TablePrefix, &a.TableFormat,
&a.IndexPrefix, &a.IndexVersion, &a.IndexFormat, &a.IndexType, &a.IndexID, &a.DataIndexSuffix, &a.IndexMapping,
&a.DataFields, &a.DataExtra, &a.ReviewNum, &a.ReviewTime, &a.Sleep, &a.Size, &a.SQLByID, &a.SQLByMtime,
&a.SQLByIDMtime, &a.DatabusInfo, &a.DatabusIndexID, &a.QueryMaxIndexes); err != nil {
return
}
list = append(list, a)
}
err = rows.Err()
return
}
// AppInfo .
func (d *Dao) AppInfo(ctx context.Context, id int64) (a *model.MngApp, err error) {
a = new(model.MngApp)
if err = d.db.QueryRow(ctx, _mngAppInfoSQL, id).Scan(&a.ID, &a.Business, &a.AppID, &a.Desc, &a.DBName, &a.ESName, &a.TableName, &a.DatabusName,
&a.TablePrefix, &a.TableFormat, &a.IndexPrefix, &a.IndexVersion, &a.IndexFormat, &a.IndexType, &a.IndexID, &a.DataIndexSuffix, &a.IndexMapping,
&a.DataFields, &a.DataExtra, &a.ReviewNum, &a.ReviewTime, &a.Sleep, &a.Size, &a.SQLByID, &a.SQLByMtime,
&a.SQLByIDMtime, &a.DatabusInfo, &a.DatabusIndexID, &a.QueryMaxIndexes); err != nil {
if err == sqlx.ErrNoRows {
a = nil
err = nil
}
return
}
return
}
// AppInfoByAppid .
func (d *Dao) AppInfoByAppid(ctx context.Context, appid string) (a *model.MngApp, err error) {
a = new(model.MngApp)
if err = d.db.QueryRow(ctx, _mngAppInfoByAppidSQL, appid).Scan(&a.ID, &a.Business, &a.AppID, &a.Desc, &a.DBName, &a.ESName, &a.TableName, &a.DatabusName,
&a.TablePrefix, &a.TableFormat, &a.IndexPrefix, &a.IndexVersion, &a.IndexFormat, &a.IndexType, &a.IndexID, &a.DataIndexSuffix, &a.IndexMapping,
&a.DataFields, &a.DataExtra, &a.ReviewNum, &a.ReviewTime, &a.Sleep, &a.Size, &a.SQLByID, &a.SQLByMtime,
&a.SQLByIDMtime, &a.DatabusInfo, &a.DatabusIndexID, &a.QueryMaxIndexes); err != nil {
if err == sqlx.ErrNoRows {
a = nil
err = nil
}
return
}
return
}
// AddApp .
func (d *Dao) AddApp(ctx context.Context, a *model.MngApp) (id int64, err error) {
res, err := d.db.Exec(ctx, _mngAddAppSQL, a.Business, a.AppID, a.Desc)
if err != nil {
return
}
id, err = res.LastInsertId()
return
}
// UpdateApp .
func (d *Dao) UpdateApp(ctx context.Context, a *model.MngApp) (err error) {
_, err = d.db.Exec(ctx, _mngUpdateAppSQL, a.Business, a.AppID, a.Desc, a.DBName, a.ESName, a.TableName, a.DatabusName, a.TablePrefix, a.TableFormat,
a.IndexPrefix, a.IndexVersion, a.IndexFormat, a.IndexType, a.IndexID, a.DataIndexSuffix, a.IndexMapping,
a.DataFields, a.DataExtra, a.ReviewNum, a.ReviewTime, a.Sleep, a.Size, a.SQLByID, a.SQLByMtime,
a.SQLByIDMtime, a.DatabusInfo, a.DatabusIndexID, a.QueryMaxIndexes, a.ID)
return
}
// UpdateAppAssetTable .
func (d *Dao) UpdateAppAssetTable(ctx context.Context, name string, t *model.MngAssetTable) (err error) {
_, err = d.db.Exec(ctx, _mngUpdateAppAssetTableSQL, t.TablePrefix, t.TableFormat, name)
return
}
// UpdateAppAssetDatabus .
func (d *Dao) UpdateAppAssetDatabus(ctx context.Context, name string, v *model.MngAssetDatabus) (err error) {
_, err = d.db.Exec(ctx, _mngUpdateAppAssetDatabusSQL, v.DatabusInfo, v.DatabusIndexID, name)
return
}
// MngCount .
func (d *Dao) MngCount(ctx context.Context, c *model.MngCount) (list []*model.MngCountRes, err error) {
list = []*model.MngCountRes{}
sTime := time.Now().AddDate(0, 0, -365).Format("2006-01-02")
rows, err := d.db.Query(ctx, _mngCountSQL, c.Business, c.Type, sTime)
if err != nil {
return
}
defer rows.Close()
for rows.Next() {
a := &model.MngCountRes{}
if err = rows.Scan(&a.Time, &a.Count); err != nil {
return
}
a.Time = a.Time[:10]
list = append(list, a)
}
err = rows.Err()
return
}
// MngPercent .
func (d *Dao) MngPercent(ctx context.Context, c *model.MngCount) (list []*model.MngPercentRes, err error) {
list = []*model.MngPercentRes{}
yesterday := time.Now().AddDate(0, 0, -1).Format("2006-01-02")
rows, err := d.db.Query(ctx, _mngPercentSQL, c.Business, c.Type, yesterday)
if err != nil {
return
}
defer rows.Close()
for rows.Next() {
a := &model.MngPercentRes{}
if err = rows.Scan(&a.Name, &a.Count); err != nil {
return
}
list = append(list, a)
}
err = rows.Err()
return
}
// Unames .
func (d *Dao) Unames(c context.Context, uids []string) (res *model.UnamesData, err error) {
params := url.Values{}
params.Set("uids", strings.Join(uids, ","))
if err = d.client.Get(c, d.managerUnames, "", params, &res); err != nil {
return
}
if res.Code != 0 {
return
}
return
}

View File

@@ -0,0 +1,396 @@
package dao
import (
"context"
"go-common/app/admin/main/search/model"
"testing"
"github.com/smartystreets/goconvey/convey"
)
func TestDaoBusinessList(t *testing.T) {
convey.Convey("BusinessList", t, func(ctx convey.C) {
var (
c = context.Background()
name = ""
offset = int(0)
limit = int(0)
)
ctx.Convey("When everything goes positive", func(ctx convey.C) {
_, err := d.BusinessList(c, name, offset, limit)
ctx.Convey("Then err should be nil.list should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
//ctx.So(list, convey.ShouldNotBeNil)
})
})
})
}
func TestDaoBusinessTotal(t *testing.T) {
convey.Convey("BusinessTotal", t, func(ctx convey.C) {
var (
c = context.Background()
name = ""
)
ctx.Convey("When everything goes positive", func(ctx convey.C) {
total, err := d.BusinessTotal(c, name)
ctx.Convey("Then err should be nil.total should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
ctx.So(total, convey.ShouldNotBeNil)
})
})
})
}
func TestDaoBusinessAll(t *testing.T) {
convey.Convey("BusinessAll", t, func(ctx convey.C) {
var (
c = context.Background()
)
ctx.Convey("When everything goes positive", func(ctx convey.C) {
list, err := d.BusinessAll(c)
ctx.Convey("Then err should be nil.list should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
ctx.So(list, convey.ShouldNotBeNil)
})
})
})
}
func TestDaoAddBusiness(t *testing.T) {
convey.Convey("AddBusiness", t, func(ctx convey.C) {
//var (
// c = context.Background()
// b = &model.MngBusiness{}
//)
//ctx.Convey("When everything goes positive", func(ctx convey.C) {
// id, _ := d.AddBusiness(c, b)
// ctx.Convey("Then err should be nil.id should not be nil.", func(ctx convey.C) {
// //ctx.So(err, convey.ShouldBeNil)
// ctx.So(id, convey.ShouldNotBeNil)
// })
//})
})
}
func TestDaoUpdateBusiness(t *testing.T) {
convey.Convey("UpdateBusiness", t, func(ctx convey.C) {
//var (
// c = context.Background()
// b = &model.MngBusiness{}
//)
//ctx.Convey("When everything goes positive", func(ctx convey.C) {
// err := d.UpdateBusiness(c, b)
// ctx.Convey("Then err should be nil.", func(ctx convey.C) {
// ctx.So(err, convey.ShouldBeNil)
// })
//})
})
}
func TestDaoBusinessInfo(t *testing.T) {
convey.Convey("BusinessInfo", t, func(ctx convey.C) {
var (
c = context.Background()
id = int64(0)
)
ctx.Convey("When everything goes positive", func(ctx convey.C) {
_, err := d.BusinessInfo(c, id)
ctx.Convey("Then err should be nil.info should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
//ctx.So(info, convey.ShouldNotBeNil)
})
})
})
}
func TestDaoBusinessInfoByName(t *testing.T) {
convey.Convey("BusinessInfoByName", t, func(ctx convey.C) {
var (
c = context.Background()
name = "log"
)
ctx.Convey("When everything goes positive", func(ctx convey.C) {
info, err := d.BusinessInfoByName(c, name)
ctx.Convey("Then err should be nil.info should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
ctx.So(info, convey.ShouldNotBeNil)
})
})
})
}
func TestDaoAssetList(t *testing.T) {
convey.Convey("AssetList", t, func(ctx convey.C) {
var (
c = context.Background()
typ = int(0)
name = ""
offset = int(0)
limit = int(0)
)
ctx.Convey("When everything goes positive", func(ctx convey.C) {
_, err := d.AssetList(c, typ, name, offset, limit)
ctx.Convey("Then err should be nil.list should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
//ctx.So(list, convey.ShouldNotBeNil)
})
})
})
}
func TestDaoAssetTotal(t *testing.T) {
convey.Convey("AssetTotal", t, func(ctx convey.C) {
var (
c = context.Background()
typ = int(0)
name = ""
)
ctx.Convey("When everything goes positive", func(ctx convey.C) {
total, err := d.AssetTotal(c, typ, name)
ctx.Convey("Then err should be nil.total should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
ctx.So(total, convey.ShouldNotBeNil)
})
})
})
}
func TestDaoAssetAll(t *testing.T) {
convey.Convey("AssetAll", t, func(ctx convey.C) {
var (
c = context.Background()
)
ctx.Convey("When everything goes positive", func(ctx convey.C) {
list, err := d.AssetAll(c)
ctx.Convey("Then err should be nil.list should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
ctx.So(list, convey.ShouldNotBeNil)
})
})
})
}
func TestDaoAssetInfo(t *testing.T) {
convey.Convey("AssetInfo", t, func(ctx convey.C) {
var (
c = context.Background()
id = int64(0)
)
ctx.Convey("When everything goes positive", func(ctx convey.C) {
_, err := d.AssetInfo(c, id)
ctx.Convey("Then err should be nil.info should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
//ctx.So(info, convey.ShouldNotBeNil)
})
})
})
}
func TestDaoAssetInfoByName(t *testing.T) {
convey.Convey("AssetInfoByName", t, func(ctx convey.C) {
var (
c = context.Background()
name = ""
)
ctx.Convey("When everything goes positive", func(ctx convey.C) {
info, err := d.AssetInfoByName(c, name)
ctx.Convey("Then err should be nil.info should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
ctx.So(info, convey.ShouldNotBeNil)
})
})
})
}
func TestDaoAddAsset(t *testing.T) {
convey.Convey("AddAsset", t, func(ctx convey.C) {
//var (
// c = context.Background()
// b = &model.MngAsset{}
//)
//ctx.Convey("When everything goes positive", func(ctx convey.C) {
// //id, err :=
// d.AddAsset(c, b)
// ctx.Convey("Then err should be nil.id should not be nil.", func(ctx convey.C) {
// //ctx.So(err, convey.ShouldBeNil)
// //ctx.So(id, convey.ShouldNotBeNil)
// })
//})
})
}
func TestDaoUpdateAsset(t *testing.T) {
convey.Convey("UpdateAsset", t, func(ctx convey.C) {
//var (
// c = context.Background()
// b = &model.MngAsset{}
//)
//ctx.Convey("When everything goes positive", func(ctx convey.C) {
// err := d.UpdateAsset(c, b)
// ctx.Convey("Then err should be nil.", func(ctx convey.C) {
// ctx.So(err, convey.ShouldBeNil)
// })
//})
})
}
func TestDaoAppList(t *testing.T) {
convey.Convey("AppList", t, func(ctx convey.C) {
var (
c = context.Background()
business = ""
)
ctx.Convey("When everything goes positive", func(ctx convey.C) {
list, err := d.AppList(c, business)
ctx.Convey("Then err should be nil.list should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
ctx.So(list, convey.ShouldNotBeNil)
})
})
})
}
func TestDaoAppInfo(t *testing.T) {
convey.Convey("AppInfo", t, func(ctx convey.C) {
var (
c = context.Background()
id = int64(0)
)
ctx.Convey("When everything goes positive", func(ctx convey.C) {
//a, err :=
d.AppInfo(c, id)
ctx.Convey("Then err should be nil.a should not be nil.", func(ctx convey.C) {
//ctx.So(err, convey.ShouldBeNil)
//ctx.So(a, convey.ShouldNotBeNil)
})
})
})
}
func TestDaoAppInfoByAppid(t *testing.T) {
convey.Convey("AppInfoByAppid", t, func(ctx convey.C) {
var (
c = context.Background()
appid = ""
)
ctx.Convey("When everything goes positive", func(ctx convey.C) {
_, err := d.AppInfoByAppid(c, appid)
ctx.Convey("Then err should be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
})
})
})
}
func TestDaoAddApp(t *testing.T) {
convey.Convey("AddApp", t, func(ctx convey.C) {
//var (
// c = context.Background()
// a = &model.MngApp{}
//)
//ctx.Convey("When everything goes positive", func(ctx convey.C) {
// id, err :=
// d.AddApp(c, a)
// ctx.Convey("Then err should be nil.id should not be nil.", func(ctx convey.C) {
// ctx.So(err, convey.ShouldBeNil)
// ctx.So(id, convey.ShouldNotBeNil)
// })
//})
})
}
func TestDaoUpdateApp(t *testing.T) {
convey.Convey("UpdateApp", t, func(ctx convey.C) {
//var (
// c = context.Background()
// a = &model.MngApp{}
//)
//ctx.Convey("When everything goes positive", func(ctx convey.C) {
// err := d.UpdateApp(c, a)
// ctx.Convey("Then err should be nil.", func(ctx convey.C) {
// ctx.So(err, convey.ShouldBeNil)
// })
//})
})
}
func TestDaoUpdateAppAssetTable(t *testing.T) {
convey.Convey("UpdateAppAssetTable", t, func(ctx convey.C) {
//var (
// c = context.Background()
// name = ""
// no = &model.MngAssetTable{}
//)
//ctx.Convey("When everything goes positive", func(ctx convey.C) {
// err := d.UpdateAppAssetTable(c, name, no)
// ctx.Convey("Then err should be nil.", func(ctx convey.C) {
// ctx.So(err, convey.ShouldBeNil)
// })
//})
})
}
func TestDaoUpdateAppAssetDatabus(t *testing.T) {
convey.Convey("UpdateAppAssetDatabus", t, func(ctx convey.C) {
//var (
// c = context.Background()
// name = ""
// v = &model.MngAssetDatabus{}
//)
//ctx.Convey("When everything goes positive", func(ctx convey.C) {
// err := d.UpdateAppAssetDatabus(c, name, v)
// ctx.Convey("Then err should be nil.", func(ctx convey.C) {
// ctx.So(err, convey.ShouldBeNil)
// })
//})
})
}
func TestDaoMngCount(t *testing.T) {
convey.Convey("MngCount", t, func(ctx convey.C) {
var (
c = context.Background()
v = &model.MngCount{}
)
ctx.Convey("When everything goes positive", func(ctx convey.C) {
list, err := d.MngCount(c, v)
ctx.Convey("Then err should be nil.list should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
ctx.So(list, convey.ShouldNotBeNil)
})
})
})
}
func TestDaoMngPercent(t *testing.T) {
convey.Convey("MngPercent", t, func(ctx convey.C) {
var (
c = context.Background()
v = &model.MngCount{}
)
ctx.Convey("When everything goes positive", func(ctx convey.C) {
list, err := d.MngPercent(c, v)
ctx.Convey("Then err should be nil.list should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
ctx.So(list, convey.ShouldNotBeNil)
})
})
})
}
func TestDaoUnames(t *testing.T) {
convey.Convey("Unames", t, func(ctx convey.C) {
var (
c = context.Background()
uids = []string{}
)
ctx.Convey("When everything goes positive", func(ctx convey.C) {
res, err := d.Unames(c, uids)
ctx.Convey("Then err should be nil.res should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
ctx.So(res, convey.ShouldNotBeNil)
})
})
})
}

View File

@@ -0,0 +1,147 @@
package dao
import (
"context"
"database/sql"
"fmt"
"time"
"go-common/app/admin/main/search/model"
"go-common/library/log"
)
const (
_typeDatabus = `databus`
_typeDB = `db`
_typeTable = `table`
_businessAllV2SQL = `SELECT id,pid,name,description,state FROM gf_business`
_businessInfoV2SQL = `SELECT id,pid,name,description,data_conf,index_conf,business_conf,state,mtime FROM gf_business WHERE name=?`
_bussinessInsSQL = `INSERT INTO gf_business (pid,name,description) VALUES(?,?,?)`
_bussinessUpdateSQL = `UPDATE gf_business SET %s=? WHERE name=?`
_assetDBTablesV2SQL = `SELECT id,type,db,name,regex,fields,description,state FROM gf_asset WHERE type=? OR type=?`
_assetDBInsSQL = `INSERT INTO gf_asset (type,name,description,dsn) VALUES(?,?,?,?)`
_assetTableInsSQL = `INSERT INTO gf_asset (type,name,db,regex,fields,description) VALUES(?,?,?,?,?,?)`
_assetTableUpdateSQL = `UPDATE gf_asset set fields=? WHERE name=?`
_assetSQL = `SELECT id,type,name,dsn,db,regex,fields,description,state FROM gf_asset WHERE name=?`
)
// BusinessAllV2 .
func (d *Dao) BusinessAllV2(c context.Context) (list []*model.GFBusiness, err error) {
rows, err := d.db.Query(c, _businessAllV2SQL)
if err != nil {
return
}
defer rows.Close()
for rows.Next() {
t := new(model.GFBusiness)
if err = rows.Scan(&t.ID, &t.PID, &t.Name, &t.Description, &t.State); err != nil {
return
}
list = append(list, t)
}
err = rows.Err()
return
}
// BusinessInfoV2 .
func (d *Dao) BusinessInfoV2(c context.Context, name string) (b *model.GFBusiness, err error) {
row := d.db.QueryRow(c, _businessInfoV2SQL, name)
if err != nil {
return
}
b = new(model.GFBusiness)
if err = row.Scan(&b.ID, &b.PID, &b.Name, &b.Description, &b.DataConf, &b.IndexConf, &b.BusinessConf, &b.State, &b.Mtime); err != nil {
if err == sql.ErrNoRows {
err = nil
b = nil
return
}
}
tm, _ := time.Parse(time.RFC3339, b.Mtime)
b.Mtime = tm.Format("2006-01-02 15:04:05")
return
}
// BusinessIns insert business.
func (d *Dao) BusinessIns(c context.Context, pid int64, name, description string) (rows int64, err error) {
res, err := d.db.Exec(c, _bussinessInsSQL, pid, name, description)
if err != nil {
log.Error("d.db.Exec error(%v)", err)
return
}
return res.LastInsertId()
}
// BusinessUpdate update business.
func (d *Dao) BusinessUpdate(c context.Context, name, field, value string) (rows int64, err error) {
res, err := d.db.Exec(c, fmt.Sprintf(_bussinessUpdateSQL, field), value, name)
if err != nil {
log.Error("d.db.Exec error(%v)", err)
return
}
return res.RowsAffected()
}
// AssetDBTables .
func (d *Dao) AssetDBTables(c context.Context) (list []*model.GFAsset, err error) {
rows, err := d.db.Query(c, _assetDBTablesV2SQL, _typeDB, _typeTable)
if err != nil {
return
}
defer rows.Close()
for rows.Next() {
t := new(model.GFAsset)
if err = rows.Scan(&t.ID, &t.Type, &t.DB, &t.Name, &t.Regex, &t.Fields, &t.Description, &t.State); err != nil {
return
}
list = append(list, t)
}
err = rows.Err()
return
}
// AssetDBIns insert db asset.
func (d *Dao) AssetDBIns(c context.Context, name, description, dsn string) (rows int64, err error) {
res, err := d.db.Exec(c, _assetDBInsSQL, _typeDB, name, description, dsn)
if err != nil {
log.Error("d.db.Exec error(%v)", err)
return
}
return res.LastInsertId()
}
// AssetTableIns insert table asset.
func (d *Dao) AssetTableIns(c context.Context, name, db, regex, fields, description string) (rows int64, err error) {
res, err := d.db.Exec(c, _assetTableInsSQL, _typeTable, name, db, regex, fields, description)
if err != nil {
log.Error("d.db.Exec error(%v)", err)
return
}
return res.LastInsertId()
}
// UpdateAssetTable update table asset.
func (d *Dao) UpdateAssetTable(c context.Context, name, fields string) (rows int64, err error) {
res, err := d.db.Exec(c, _assetTableUpdateSQL, fields, name)
if err != nil {
log.Error("d.db.Exec error(%v)", err)
return
}
return res.RowsAffected()
}
// Asset .
func (d *Dao) Asset(c context.Context, name string) (r *model.GFAsset, err error) {
row := d.db.QueryRow(c, _assetSQL, name)
r = new(model.GFAsset)
if err = row.Scan(&r.ID, &r.Type, &r.Name, &r.DSN, &r.DB, &r.Regex, &r.Fields, &r.Description, &r.State); err != nil {
if err == sql.ErrNoRows {
err = nil
r = nil
return
}
}
return
}

View File

@@ -0,0 +1,145 @@
package dao
import (
"context"
"testing"
"github.com/smartystreets/goconvey/convey"
)
func TestDaoBusinessAllV2(t *testing.T) {
convey.Convey("BusinessAllV2", t, func(convCtx convey.C) {
var (
c = context.Background()
)
convCtx.Convey("When everything goes positive", func(convCtx convey.C) {
list, err := d.BusinessAllV2(c)
convCtx.Convey("Then err should be nil.list should not be nil.", func(convCtx convey.C) {
convCtx.So(err, convey.ShouldBeNil)
convCtx.So(list, convey.ShouldNotBeNil)
})
})
})
}
func TestDaoBusinessInfoV2(t *testing.T) {
convey.Convey("BusinessInfoV2", t, func(convCtx convey.C) {
var (
c = context.Background()
name = "dm"
)
convCtx.Convey("When everything goes positive", func(convCtx convey.C) {
b, err := d.BusinessInfoV2(c, name)
convCtx.Convey("Then err should be nil.b should not be nil.", func(convCtx convey.C) {
convCtx.So(err, convey.ShouldBeNil)
convCtx.So(b, convey.ShouldNotBeNil)
})
})
})
}
//func TestDaoBusinessIns(t *testing.T) {
// convey.Convey("BusinessIns", t, func(convCtx convey.C) {
// var (
// c = context.Background()
// pid = int64(0)
// name = ""
// description = ""
// )
// convCtx.Convey("When everything goes positive", func(convCtx convey.C) {
// rows, err := d.BusinessIns(c, pid, name, description)
// convCtx.Convey("Then err should be nil.rows should not be nil.", func(convCtx convey.C) {
// convCtx.So(err, convey.ShouldBeNil)
// convCtx.So(rows, convey.ShouldNotBeNil)
// })
// })
// })
//}
//func TestDaoBusinessUpdate(t *testing.T) {
// convey.Convey("BusinessUpdate", t, func(convCtx convey.C) {
// var (
// c = context.Background()
// name = ""
// field = ""
// value = ""
// )
// convCtx.Convey("When everything goes positive", func(convCtx convey.C) {
// rows, err := d.BusinessUpdate(c, name, field, value)
// convCtx.Convey("Then err should be nil.rows should not be nil.", func(convCtx convey.C) {
// convCtx.So(err, convey.ShouldBeNil)
// convCtx.So(rows, convey.ShouldNotBeNil)
// })
// })
// })
//}
func TestDaoAssetDBTables(t *testing.T) {
convey.Convey("AssetDBTables", t, func(convCtx convey.C) {
var (
c = context.Background()
)
convCtx.Convey("When everything goes positive", func(convCtx convey.C) {
list, err := d.AssetDBTables(c)
convCtx.Convey("Then err should be nil.list should not be nil.", func(convCtx convey.C) {
convCtx.So(err, convey.ShouldBeNil)
convCtx.So(list, convey.ShouldNotBeNil)
})
})
})
}
//
//func TestDaoAssetDBIns(t *testing.T) {
// convey.Convey("AssetDBIns", t, func(convCtx convey.C) {
// var (
// c = context.Background()
// name = ""
// description = ""
// dsn = ""
// )
// convCtx.Convey("When everything goes positive", func(convCtx convey.C) {
// rows, err := d.AssetDBIns(c, name, description, dsn)
// convCtx.Convey("Then err should be nil.rows should not be nil.", func(convCtx convey.C) {
// convCtx.So(err, convey.ShouldBeNil)
// convCtx.So(rows, convey.ShouldNotBeNil)
// })
// })
// })
//}
//func TestDaoAssetTableIns(t *testing.T) {
// convey.Convey("AssetTableIns", t, func(convCtx convey.C) {
// var (
// c = context.Background()
// name = ""
// db = ""
// regex = ""
// fields = ""
// description = ""
// )
// convCtx.Convey("When everything goes positive", func(convCtx convey.C) {
// rows, err := d.AssetTableIns(c, name, db, regex, fields, description)
// convCtx.Convey("Then err should be nil.rows should not be nil.", func(convCtx convey.C) {
// convCtx.So(err, convey.ShouldBeNil)
// convCtx.So(rows, convey.ShouldNotBeNil)
// })
// })
// })
//}
func TestDaoAsset(t *testing.T) {
convey.Convey("Asset", t, func(convCtx convey.C) {
var (
c = context.Background()
name = "bilibili_article"
)
convCtx.Convey("When everything goes positive", func(convCtx convey.C) {
r, err := d.Asset(c, name)
convCtx.Convey("Then err should be nil.r should not be nil.", func(convCtx convey.C) {
convCtx.So(err, convey.ShouldBeNil)
convCtx.So(r, convey.ShouldNotBeNil)
})
})
})
}

View File

@@ -0,0 +1,443 @@
package dao
import (
"context"
"encoding/json"
"fmt"
"io"
"strings"
"go-common/app/admin/main/search/model"
"go-common/library/log"
"gopkg.in/olivere/elastic.v5"
)
const (
_queryConfSQL = `select appid,es_name,index_prefix,index_type,index_id,index_mapping,query_max_indexes from digger_app`
)
// QueryConf query conf
func (d *Dao) QueryConf(ctx context.Context) (res map[string]*model.QueryConfDetail, err error) {
rows, err := d.queryConfStmt.Query(ctx)
if err != nil {
log.Error("d.queryConfStmt.Query() error(%v)", err)
return
}
defer rows.Close()
res = make(map[string]*model.QueryConfDetail)
for rows.Next() {
var (
appid string
qcd = new(model.QueryConfDetail)
)
if err = rows.Scan(&appid, &qcd.ESCluster, &qcd.IndexPrefix, &qcd.IndexType, &qcd.IndexID, &qcd.IndexMapping, &qcd.MaxIndicesNum); err != nil {
log.Error("d.QueryConf() rows.Scan() error(%v)", err)
return
}
res[appid] = qcd
}
err = rows.Err()
return
}
type querysModel struct {
field string
whereKind string
esQuery elastic.Query
}
// QueryBasic 其中boolQuery方便定制化业务传参过来.
func (d *Dao) QueryBasic(c context.Context, sp *model.QueryParams) (mixedQuery *elastic.BoolQuery, qbDebug *model.QueryDebugResult) {
mixedQuery = elastic.NewBoolQuery()
qbDebug = &model.QueryDebugResult{}
querys := []*querysModel{}
netstedQuerys := map[string]*elastic.BoolQuery{} // key: path value: boolQuery
//fields
if len(sp.QueryBody.Fields) == 0 {
sp.QueryBody.Fields = []string{}
}
//from done
//where
if sp.QueryBody.Where == nil {
sp.QueryBody.Where = &model.QueryBodyWhere{} //要给个默认值
}
//where - eq
for k, v := range sp.QueryBody.Where.EQ {
querys = append(querys, &querysModel{
field: k,
whereKind: "eq",
esQuery: elastic.NewTermQuery(k, v),
})
}
//where - or
for k, v := range sp.QueryBody.Where.Or {
querys = append(querys, &querysModel{
field: k,
whereKind: "or",
esQuery: elastic.NewTermQuery(k, v),
})
}
//where - in
for k, v := range sp.QueryBody.Where.In {
if len(v) > 1024 {
e := fmt.Sprintf("where in 超过1024 business(%s) error(%v)", sp.Business, v)
log.Error(e)
qbDebug.AddErrMsg(e)
continue
}
querys = append(querys, &querysModel{
field: k,
whereKind: "in",
esQuery: elastic.NewTermsQuery(k, v...),
})
}
//where - range
ranges, err := d.queryBasicRange(sp.QueryBody.Where.Range)
if err != nil {
qbDebug.AddErrMsg(err.Error())
}
for k, v := range ranges {
querys = append(querys, &querysModel{
field: k,
whereKind: "range",
esQuery: v,
})
}
//where - combo
for _, v := range sp.QueryBody.Where.Combo {
//外面用bool+should+minimum包裹
combo := elastic.NewBoolQuery()
//里面每个子项也是bool+should+minimum
cmbEQ := elastic.NewBoolQuery()
cmbIn := elastic.NewBoolQuery()
cmbRange := elastic.NewBoolQuery()
cmbNotEQ := elastic.NewBoolQuery()
cmbNotIn := elastic.NewBoolQuery()
cmbNotRange := elastic.NewBoolQuery()
//所有的minumum
if v.Min.Min == 0 {
v.Min.Min = 1
}
if v.Min.EQ == 0 {
v.Min.EQ = 1
}
if v.Min.In == 0 {
v.Min.In = 1
}
if v.Min.Range == 0 {
v.Min.Range = 1
}
if v.Min.NotEQ == 0 {
v.Min.NotEQ = 1
}
if v.Min.NotIn == 0 {
v.Min.NotIn = 1
}
if v.Min.NotRange == 0 {
v.Min.NotRange = 1
}
//子项should
for _, vEQ := range v.EQ {
for eqK, eqV := range vEQ {
cmbEQ.Should(elastic.NewTermQuery(eqK, eqV))
}
}
for _, vIn := range v.In {
for inK, inV := range vIn {
cmbIn.Should(elastic.NewTermsQuery(inK, inV...))
}
}
for _, vRange := range v.Range {
ranges, _ := d.queryBasicRange(vRange)
for _, rangeV := range ranges {
cmbRange.Should(rangeV)
}
}
for _, notEQ := range v.NotEQ {
for k, v := range notEQ {
cmbNotEQ.Should(elastic.NewTermQuery(k, v))
}
}
for _, notIn := range v.NotIn {
for k, v := range notIn {
cmbNotIn.Should(elastic.NewTermsQuery(k, v...))
}
}
for _, notRange := range v.NotRange {
ranges, _ := d.queryBasicRange(notRange)
for _, v := range ranges {
cmbNotRange.Should(v)
}
}
//子项minimum
if len(v.EQ) > 0 {
combo.Should(cmbEQ.MinimumNumberShouldMatch(v.Min.EQ))
}
if len(v.In) > 0 {
combo.Should(cmbIn.MinimumNumberShouldMatch(v.Min.In))
}
if len(v.Range) > 0 {
combo.Should(cmbRange.MinimumNumberShouldMatch(v.Min.Range))
}
if len(v.NotEQ) > 0 {
combo.MustNot(elastic.NewBoolQuery().Should(cmbNotEQ.MinimumNumberShouldMatch(v.Min.NotEQ)))
}
if len(v.NotIn) > 0 {
combo.MustNot(elastic.NewBoolQuery().Should(cmbNotIn.MinimumNumberShouldMatch(v.Min.NotIn)))
}
if len(v.NotRange) > 0 {
combo.MustNot(elastic.NewBoolQuery().Should(cmbNotRange.MinimumNumberShouldMatch(v.Min.NotRange)))
}
//合并子项
mixedQuery.Filter(combo.MinimumNumberShouldMatch(v.Min.Min))
}
//where - like
like, err := d.queryBasicLike(sp.QueryBody.Where.Like, sp.Business)
if err != nil {
qbDebug.AddErrMsg(err.Error())
}
for _, v := range like {
querys = append(querys, &querysModel{
whereKind: "like",
esQuery: v,
})
}
//mixedQuery
for _, q := range querys {
// like TODO like的map型字段也要支持must not和 nested
if q.field == "" && q.whereKind == "like" {
mixedQuery.Must(q.esQuery)
continue
}
if q.field == "" {
continue
}
// prepare nested 一个DSL只能出现一个nested不然会有问题
if mapField := strings.Split(q.field, "."); len(mapField) > 1 && mapField[0] != "" {
if _, ok := netstedQuerys[mapField[0]]; !ok {
netstedQuerys[mapField[0]] = elastic.NewBoolQuery()
}
if bl, ok := sp.QueryBody.Where.Not[q.whereKind][q.field]; ok && bl {
// mixedQuery.Must(elastic.NewNestedQuery(mapField[0], elastic.NewBoolQuery().MustNot(q.esQuery)))
netstedQuerys[mapField[0]].MustNot(q.esQuery)
continue
}
// mixedQuery.Must(elastic.NewNestedQuery(mapField[0], elastic.NewBoolQuery().Must(q.esQuery)))
netstedQuerys[mapField[0]].Must(q.esQuery)
continue
}
// must not
if bl, ok := sp.QueryBody.Where.Not[q.whereKind][q.field]; ok && bl {
mixedQuery.MustNot(q.esQuery)
continue
}
// should
if q.whereKind == "or" {
mixedQuery.Should(q.esQuery)
mixedQuery.MinimumShouldMatch("1") // 暂时为1
continue
}
// default
mixedQuery.Filter(q.esQuery)
// random order with seed
if sp.QueryBody.OrderRandomSeed != "" {
random := elastic.NewRandomFunction().Seed(sp.QueryBody.OrderRandomSeed)
score := elastic.NewFunctionScoreQuery().Add(elastic.NewBoolQuery(), random)
mixedQuery.Must(score)
}
}
// insert nested
for k, n := range netstedQuerys {
mixedQuery.Must(elastic.NewNestedQuery(k, n))
}
// DSL
if sp.DebugLevel != 0 {
if src, e := mixedQuery.Source(); e == nil {
if data, er := json.Marshal(src); er == nil {
qbDebug.DSL = string(data)
}
}
}
return
}
// queryBasicRange .
func (d *Dao) queryBasicRange(rangeMap map[string]string) (rangeQuery map[string]*elastic.RangeQuery, err error) {
rangeQuery = make(map[string]*elastic.RangeQuery)
for k, v := range rangeMap {
if r := strings.Trim(v, " "); r != "" {
if rs := []rune(r); len(rs) > 3 {
firstStr := string(rs[0:1])
endStr := string(rs[len(rs)-1:])
rangeStr := strings.Trim(v, "[]() ")
FromTo := strings.Split(rangeStr, ",")
if len(FromTo) != 2 {
err = fmt.Errorf("sp.QueryBody.Where.Range Fromto err")
continue
}
rQuery := elastic.NewRangeQuery(k)
rc := 0
if firstStr == "(" && strings.Trim(FromTo[0], " ") != "" {
rQuery.Gt(strings.Trim(FromTo[0], " "))
rc++
}
if firstStr == "[" && strings.Trim(FromTo[0], " ") != "" {
rQuery.Gte(strings.Trim(FromTo[0], " "))
rc++
}
if endStr == ")" && strings.Trim(FromTo[1], " ") != "" {
rQuery.Lt(strings.Trim(FromTo[1], " "))
rc++
}
if endStr == "]" && strings.Trim(FromTo[1], " ") != "" {
rQuery.Lte(strings.Trim(FromTo[1], " "))
rc++
}
if rc == 0 {
continue
}
rangeQuery[k] = rQuery
} else {
// 范围格式有问题
err = fmt.Errorf("sp.QueryBody.Where.Range range format err. error(%v)", v)
continue
}
}
}
return
}
func (d *Dao) queryBasicLike(likeMap []model.QueryBodyWhereLike, business string) (likeQuery []elastic.Query, err error) {
for _, v := range likeMap {
if len(v.KW) == 0 {
continue
}
switch v.Level {
case model.LikeLevelHigh:
kw := []string{}
r := []rune(v.KW[0])
for i := 0; i < len(r); i++ {
if k := string(r[i : i+1]); !strings.ContainsAny(k, "~[](){}^?:\"\\/!+-=&* ") { //去掉特殊符号
kw = append(kw, k)
} else if len(kw) > 1 && kw[len(kw)-1:][0] != "*" {
kw = append(kw, "*", " ", "*")
}
}
if len(kw) == 0 || strings.Join(kw, "") == "* *" {
continue
}
qs := elastic.NewQueryStringQuery("*" + strings.Trim(strings.Join(kw, ""), "* ") + "*").AllowLeadingWildcard(true) //默认是or
if !v.Or {
qs.DefaultOperator("AND")
}
for _, v := range v.KWFields {
qs.Field(v)
}
likeQuery = append(likeQuery, qs)
case model.LikeLevelMiddel:
// 单个字要特殊处理
if r := []rune(v.KW[0]); len(r) == 1 && len(v.KW) == 1 {
qs := elastic.NewQueryStringQuery("*" + string(r[:]) + "*").AllowLeadingWildcard(true) //默认是or
if !v.Or {
qs.DefaultOperator("AND")
}
for _, v := range v.KWFields {
qs.Field(v)
}
likeQuery = append(likeQuery, qs)
continue
}
// 自定义analyzer时multi_match无法使用minimum_should_match默认为至少一个满足导致结果集还是很大
// ngram(2,2)
for _, kw := range v.KW {
rn := []rune(kw)
for i := 0; i+1 < len(rn); i++ {
kwStr := string(rn[i : i+2])
for _, kwField := range v.KWFields {
likeQuery = append(likeQuery, elastic.NewTermQuery(kwField, kwStr))
}
}
}
case "", model.LikeLevelLow:
qs := elastic.NewMultiMatchQuery(strings.Join(v.KW, " "), v.KWFields...).Type("best_fields").TieBreaker(0.6).MinimumShouldMatch("90%") //默认是and
// TODO 业务自定义match
if business == "copyright" {
qs.MinimumShouldMatch("10%")
}
if business == "academy_archive" {
qs.MinimumShouldMatch("50%")
}
if v.Or {
qs.Operator("OR")
}
likeQuery = append(likeQuery, qs)
}
}
return
}
func (d *Dao) Scroll(c context.Context, sp *model.QueryParams) (res *model.QueryResult, debug *model.QueryDebugResult, err error) {
var (
tList []json.RawMessage
tLen int
ScrollID = ""
)
res = &model.QueryResult{}
esCluster := sp.AppIDConf.ESCluster
query, _ := d.QueryBasic(c, sp)
eSearch, ok := d.esPool[esCluster]
if !ok {
PromError(fmt.Sprintf("es:集群不存在%s", esCluster), "s.dao.searchResult indexName:%s", esCluster)
return
}
fsc := elastic.NewFetchSourceContext(true).Include(sp.QueryBody.Fields...)
// multi sort
sorterSlice := []elastic.Sorter{}
if len(sp.QueryBody.Where.Like) > 0 && sp.QueryBody.OrderScoreFirst { // like 长度 > 0但里面是空的也是个问题
sorterSlice = append(sorterSlice, elastic.NewScoreSort().Desc())
}
for _, i := range sp.QueryBody.Order {
for k, v := range i {
if v == "asc" {
sorterSlice = append(sorterSlice, elastic.NewFieldSort(k).Asc())
} else {
sorterSlice = append(sorterSlice, elastic.NewFieldSort(k).Desc())
}
}
}
if len(sp.QueryBody.Where.Like) > 0 && !sp.QueryBody.OrderScoreFirst {
sorterSlice = append(sorterSlice, elastic.NewScoreSort().Desc())
}
for {
searchResult, err := eSearch.Scroll().Index(sp.QueryBody.From).Type("base").
Query(query).FetchSourceContext(fsc).Size(5000).Scroll("1m").ScrollId(ScrollID).SortBy(sorterSlice...).Do(c)
if err == io.EOF {
break
} else if err != nil {
PromError(fmt.Sprintf("es:执行查询失败%s ", "Scroll"), "es:执行查询失败%v", err)
break
}
ScrollID = searchResult.ScrollId
for _, hit := range searchResult.Hits.Hits {
var t json.RawMessage
if err = json.Unmarshal(*hit.Source, &t); err != nil {
PromError(fmt.Sprintf("es:Unmarshal%s ", "Scroll"), "es:Unmarshal%v", err)
break
}
tList = append(tList, t)
tLen++
if tLen >= sp.QueryBody.Pn*sp.QueryBody.Ps {
goto ClearScroll
}
}
}
ClearScroll:
go eSearch.ClearScroll().ScrollId(ScrollID).Do(context.Background())
if res.Result, err = json.Marshal(tList); err != nil {
PromError(fmt.Sprintf("es:Unmarshal%s ", "Scroll"), "es:Unmarshal%v", err)
return
}
return
}

View File

@@ -0,0 +1,413 @@
package dao
import (
"context"
"encoding/json"
"fmt"
"strings"
"time"
"go-common/app/admin/main/search/model"
"go-common/library/ecode"
"go-common/library/log"
"gopkg.in/olivere/elastic.v5"
)
// ArchiveVideoScore 稿件一审打分排序.
func (d *Dao) ArchiveVideoScore(c context.Context, sp *model.QueryParams) (res *model.QueryResult, debug *model.QueryDebugResult, err error) {
query, qbDebug := d.QueryBasic(c, sp)
// query append
diffs := time.Now().Unix() - 1420041600
days := fmt.Sprintf("%dd", diffs/(3600*24))
score := elastic.NewFunctionScoreQuery().Add(elastic.NewTermQuery("user_type", 1), elastic.NewExponentialDecayFunction().FieldName("arc_senddate").Origin("2015-01-01 00:00:00").Scale(days).Offset("1d").Decay(0.8).Weight(float64(10000))).Add(nil, elastic.NewExponentialDecayFunction().FieldName("arc_senddate").Origin("2015-01-01 00:00:00").Scale(days).Offset("1d").Decay(0.8).Weight(float64(1)))
query = query.Must(score)
sp.QueryBody.Order = []map[string]string{}
// do
if res, debug, err = d.QueryResult(c, query, sp, qbDebug); err != nil {
PromError(fmt.Sprintf("es:%s ", sp.Business), "%v", err)
}
return
}
// ArchiveScore 稿件二审打分排序.
func (d *Dao) ArchiveScore(c context.Context, sp *model.QueryParams) (res *model.QueryResult, debug *model.QueryDebugResult, err error) {
query, qbDebug := d.QueryBasic(c, sp)
// query append
diffs := time.Now().Unix() - 1420041600
days := fmt.Sprintf("%dd", diffs/(3600*24))
score := elastic.NewFunctionScoreQuery().Add(elastic.NewTermQuery("user_type", 1), elastic.NewExponentialDecayFunction().FieldName("ctime").Origin("2015-01-01 00:00:00").Scale(days).Offset("1d").Decay(0.8).Weight(float64(10000))).Add(nil, elastic.NewExponentialDecayFunction().FieldName("ctime").Origin("2015-01-01 00:00:00").Scale(days).Offset("1d").Decay(0.8).Weight(float64(1)))
query = query.Must(score)
sp.QueryBody.Order = []map[string]string{}
// do
if res, debug, err = d.QueryResult(c, query, sp, qbDebug); err != nil {
PromError(fmt.Sprintf("es:%s ", sp.Business), "%v", err)
}
return
}
// TaskQaRandom .
func (d *Dao) TaskQaRandom(c context.Context, sp *model.QueryParams) (res *model.QueryResult, debug *model.QueryDebugResult, err error) {
random := elastic.NewRandomFunction()
if sp != nil && sp.QueryBody != nil && sp.QueryBody.Where != nil && sp.QueryBody.Where.EQ != nil {
if seed, ok := sp.QueryBody.Where.EQ["seed"]; ok {
random = elastic.NewRandomFunction().Seed(seed)
delete(sp.QueryBody.Where.EQ, "seed")
}
}
query, qbDebug := d.QueryBasic(c, sp)
if err != nil {
PromError(fmt.Sprintf("es basic:%s ", sp.Business), "%v", err)
}
score := elastic.NewFunctionScoreQuery().Add(elastic.NewBoolQuery(), random)
qy := elastic.NewBoolQuery().Must(query, score)
if res, debug, err = d.QueryResult(c, qy, sp, qbDebug); err != nil {
PromError(fmt.Sprintf("es:%s ", sp.Business), "%v", err)
}
return
}
// EsportsContestsDate 电竞右侧日历联动.
func (d *Dao) EsportsContestsDate(c context.Context, sp *model.QueryParams) (res *model.QueryResult, debug *model.QueryDebugResult, err error) {
res = &model.QueryResult{}
// query basic
query, qbDebug := d.QueryBasic(c, sp)
debug = qbDebug
esCluster := sp.AppIDConf.ESCluster
if _, ok := d.esPool[esCluster]; !ok {
debug.AddErrMsg("es:集群不存在" + esCluster)
return
}
aggs := elastic.NewTermsAggregation()
fsc := elastic.NewFetchSourceContext(true).Include("ids")
aggs = aggs.Field("stime").Size(1000).SubAggregation("top_ids_hits", elastic.NewTopHitsAggregation().FetchSourceContext(fsc).Size(1000))
searchPrepare := d.esPool[esCluster].Search().Index(sp.QueryBody.From).Query(query).Aggregation("group_by_stime", aggs).Size(0)
if sp.DebugLevel == 2 {
searchPrepare.Profile(true)
}
searchResult, err := searchPrepare.Do(context.Background())
if err != nil {
debug.AddErrMsg(fmt.Sprintf("es:执行查询失败%s. %v", esCluster, err))
PromError(fmt.Sprintf("es:执行查询失败%s ", esCluster), "%v", err)
return
}
result, ok := searchResult.Aggregations.Terms("group_by_stime")
if !ok {
return
}
type hitDoc struct {
Hits []struct {
Source struct {
IDs []string `json:"ids"`
} `json:"_source"`
} `json:"hits"`
}
type idsRes struct {
Date string
IDs []string
}
ids := []idsRes{}
for _, b := range result.Buckets {
var hit hitDoc
//b.KeyAsString
if list, ok := b.Terms("top_ids_hits"); ok {
a, _ := list.Aggregations["hits"].MarshalJSON()
if err = json.Unmarshal(a, &hit); err != nil {
return
}
for _, h := range hit.Hits {
ids = append(ids, idsRes{
Date: *b.KeyAsString,
IDs: h.Source.IDs,
})
}
}
}
resDoc := map[string]int{}
resDocTmp := map[string]map[string]bool{}
for _, v := range ids {
if _, ok := resDocTmp[v.Date]; !ok {
resDocTmp[v.Date] = map[string]bool{}
}
for _, id := range v.IDs {
resDocTmp[v.Date][id] = true
}
}
for date, idList := range resDocTmp {
resDoc[date] = len(idList)
}
if doc, er := json.Marshal(resDoc); er != nil {
debug.AddErrMsg(fmt.Sprintf("es:Unmarshal docBuckets es:Unmarshal%v", er))
} else {
res.Result = doc
}
return
}
var (
_pubed = []interface{}{-40, 0, 10000, 1, 1001, 15000, 20000, 30000}
_notpubed = []interface{}{-2, -4, -5, -11, -12, -16}
_ispubing = []interface{}{-1, -6, -7, -8, -9, -10, -13, -15, -30}
_all = append(append(_pubed, _notpubed...), _ispubing...)
)
// CreativeArchiveSearch 创作中心
func (d *Dao) CreativeArchiveSearch(c context.Context, sp *model.QueryParams) (res *model.QueryResult, debug *model.QueryDebugResult, err error) {
var (
mid interface{}
ok bool
)
docBuckets := map[string]interface{}{}
if sp == nil && sp.QueryBody == nil && sp.QueryBody.Where == nil && sp.QueryBody.Where.EQ == nil {
return res, debug, ecode.RequestErr
}
if mid, ok = sp.QueryBody.Where.EQ["mid"]; !ok {
return res, debug, ecode.RequestErr
}
// 列表
if state, ok := sp.QueryBody.Where.EQ["state"]; ok {
if sp.QueryBody.Where.In == nil {
sp.QueryBody.Where.In = map[string][]interface{}{}
}
switch state {
case "pubed":
sp.QueryBody.Where.In["state"] = _pubed
case "not_pubed":
sp.QueryBody.Where.In["state"] = _notpubed
case "is_pubing":
sp.QueryBody.Where.In["state"] = _ispubing
default:
sp.QueryBody.Where.In["state"] = _all
}
delete(sp.QueryBody.Where.EQ, "state")
} else {
if sp.QueryBody.Where.In == nil {
sp.QueryBody.Where.In = map[string][]interface{}{}
}
sp.QueryBody.Where.In["state"] = _all
}
query, qbDebug := d.QueryBasic(c, sp)
if res, debug, err = d.QueryResult(c, query, sp, qbDebug); err != nil {
PromError(fmt.Sprintf("es:%s ", sp.Business), "%v", err)
return
}
docBuckets["vlist"] = res.Result
// 类型统计
typeFilter := elastic.NewBoolQuery().Must(elastic.NewTermsQuery("mid", mid))
typeFilter = typeFilter.Filter(elastic.NewTermsQuery("state", _all...))
for _, v := range sp.QueryBody.Where.Like {
typeFilter = typeFilter.Filter(elastic.NewMultiMatchQuery(strings.Join(v.KW, " "), v.KWFields...).Type("best_fields").TieBreaker(0.6).MinimumShouldMatch("100%"))
}
typeAgg := elastic.NewTermsAggregation().Field("pid")
request1 := elastic.NewSearchRequest().Index(sp.QueryBody.From).Type("base").Source(elastic.NewSearchSource().Query(typeFilter).Aggregation("pid", typeAgg))
// 状态统计
stateFilter := elastic.NewBoolQuery().Filter(elastic.NewTermsQuery("mid", mid))
if pid, ok := sp.QueryBody.Where.EQ["pid"]; ok {
stateFilter = stateFilter.Filter(elastic.NewTermsQuery("pid", pid))
}
for _, v := range sp.QueryBody.Where.Like {
stateFilter = typeFilter.Filter(elastic.NewMultiMatchQuery(strings.Join(v.KW, " "), v.KWFields...).Type("best_fields").TieBreaker(0.6).MinimumShouldMatch("100%"))
}
stateAgg := elastic.NewFiltersAggregation().
FilterWithName("pubed", elastic.NewTermsQuery("state", _pubed...)).
FilterWithName("not_pubed", elastic.NewTermsQuery("state", _notpubed...)).
FilterWithName("is_pubing", elastic.NewTermsQuery("state", _ispubing...))
request2 := elastic.NewSearchRequest().Index(sp.QueryBody.From).Type("base").Source(elastic.NewSearchSource().Query(stateFilter).Aggregation("state", stateAgg))
MultiRes, err := d.esPool[sp.AppIDConf.ESCluster].MultiSearch().Add(request1, request2).Do(c)
if err != nil {
PromError(fmt.Sprintf("es:%s ", sp.Business), "%v", err)
return
}
// 取得数据
tmp := map[string]interface{}{}
json.Unmarshal(*MultiRes.Responses[0].Aggregations["pid"], &tmp)
docBuckets["tlist"] = tmp["buckets"]
tmp = map[string]interface{}{}
json.Unmarshal(*MultiRes.Responses[1].Aggregations["state"], &tmp)
docBuckets["plist"] = tmp["buckets"]
if resResult, e := json.Marshal(docBuckets); e != nil {
log.Error("CreativeArchiveSearch.json.error(%v)", e)
} else {
res.Result = resResult
}
return
}
// CreativeArchiveStaff 创作中心
func (d *Dao) CreativeArchiveStaff(c context.Context, sp *model.QueryParams) (res *model.QueryResult, debug *model.QueryDebugResult, err error) {
docBuckets := map[string]interface{}{}
if sp == nil || sp.QueryBody == nil || sp.QueryBody.Where == nil || sp.QueryBody.Where.Combo == nil || len(sp.QueryBody.Where.Combo) != 1 {
return res, debug, ecode.RequestErr
}
combo := sp.QueryBody.Where.Combo[0]
if len(combo.EQ) == 0 {
return res, debug, ecode.RequestErr
}
queryListParams := &model.QueryParams{
QueryBody: &model.QueryBody{
Where: &model.QueryBodyWhere{
Combo: sp.QueryBody.Where.Combo,
},
},
}
queryList, _ := d.QueryBasic(c, queryListParams)
// 列表
if state, ok := sp.QueryBody.Where.EQ["state"]; ok {
if sp.QueryBody.Where.In == nil {
sp.QueryBody.Where.In = map[string][]interface{}{}
}
switch state {
case "pubed":
sp.QueryBody.Where.In["state"] = _pubed
case "not_pubed":
sp.QueryBody.Where.In["state"] = _notpubed
case "is_pubing":
sp.QueryBody.Where.In["state"] = _ispubing
default:
sp.QueryBody.Where.In["state"] = _all
}
delete(sp.QueryBody.Where.EQ, "state")
} else {
if sp.QueryBody.Where.In == nil {
sp.QueryBody.Where.In = map[string][]interface{}{}
}
sp.QueryBody.Where.In["state"] = _all
}
query, qbDebug := d.QueryBasic(c, sp)
if res, debug, err = d.QueryResult(c, query, sp, qbDebug); err != nil {
PromError(fmt.Sprintf("es:%s ", sp.Business), "%v", err)
return
}
docBuckets["vlist"] = res.Result
// 类型统计
typeFilter := elastic.NewBoolQuery().Filter(queryList)
typeFilter = typeFilter.Filter(elastic.NewTermsQuery("state", _all...))
for _, v := range sp.QueryBody.Where.Like {
typeFilter = typeFilter.Filter(elastic.NewMultiMatchQuery(strings.Join(v.KW, " "), v.KWFields...).Type("best_fields").TieBreaker(0.6).MinimumShouldMatch("90%"))
}
typeAgg := elastic.NewTermsAggregation().Field("pid")
request1 := elastic.NewSearchRequest().Index(sp.QueryBody.From).Type("base").Source(elastic.NewSearchSource().Query(typeFilter).Aggregation("pid", typeAgg).Size(0))
// 状态统计
stateFilter := elastic.NewBoolQuery().Filter(queryList)
if pid, ok := sp.QueryBody.Where.EQ["pid"]; ok {
stateFilter = stateFilter.Filter(elastic.NewTermsQuery("pid", pid))
}
for _, v := range sp.QueryBody.Where.Like {
stateFilter = typeFilter.Filter(elastic.NewMultiMatchQuery(strings.Join(v.KW, " "), v.KWFields...).Type("best_fields").TieBreaker(0.6).MinimumShouldMatch("90%"))
}
stateAgg := elastic.NewFiltersAggregation().
// 稿件状态
FilterWithName("pubed", elastic.NewTermsQuery("state", _pubed...)).
FilterWithName("not_pubed", elastic.NewTermsQuery("state", _notpubed...)).
FilterWithName("is_pubing", elastic.NewTermsQuery("state", _ispubing...))
request2 := elastic.NewSearchRequest().Index(sp.QueryBody.From).Type("base").Source(elastic.NewSearchSource().Query(stateFilter).Aggregation("state", stateAgg).Size(0))
MultiRes, err := d.esPool[sp.AppIDConf.ESCluster].MultiSearch().Add(request1, request2).Do(c)
if err != nil {
PromError(fmt.Sprintf("es:%s ", sp.Business), "%v", err)
return
}
// 取得数据
tmp := map[string]interface{}{}
json.Unmarshal(*MultiRes.Responses[0].Aggregations["pid"], &tmp)
docBuckets["tlist"] = tmp["buckets"]
tmp = map[string]interface{}{}
json.Unmarshal(*MultiRes.Responses[1].Aggregations["state"], &tmp)
docBuckets["plist"] = tmp["buckets"]
if resResult, e := json.Marshal(docBuckets); e != nil {
log.Error("CreativeArchiveSearch.json.error(%v)", e)
} else {
res.Result = resResult
}
return
}
// CreativeArchiveStaff 创作中心
func (d *Dao) CreativeArchiveApply(c context.Context, sp *model.QueryParams) (res *model.QueryResult, debug *model.QueryDebugResult, err error) {
var (
applyStaffMid interface{}
ok bool
)
docBuckets := map[string]interface{}{}
if sp == nil || sp.QueryBody == nil || sp.QueryBody.Where == nil || sp.QueryBody.Where.EQ == nil {
return res, debug, ecode.RequestErr
}
if applyStaffMid, ok = sp.QueryBody.Where.EQ["apply_staff.apply_staff_mid"]; !ok {
return res, debug, ecode.RequestErr
}
// 列表
if state, ok := sp.QueryBody.Where.EQ["apply_staff.deal_state"]; ok {
if sp.QueryBody.Where.In == nil {
sp.QueryBody.Where.In = map[string][]interface{}{}
}
switch state {
case "pending": //待处理
sp.QueryBody.Where.In["apply_staff.deal_state"] = []interface{}{1}
case "processed": //已处理
sp.QueryBody.Where.In["apply_staff.deal_state"] = []interface{}{2}
case "neglected": //已忽略
sp.QueryBody.Where.In["apply_staff.deal_state"] = []interface{}{3}
default:
sp.QueryBody.Where.In["apply_staff.deal_state"] = []interface{}{1, 2, 3}
}
delete(sp.QueryBody.Where.EQ, "apply_staff.deal_state")
} else {
if sp.QueryBody.Where.In == nil {
sp.QueryBody.Where.In = map[string][]interface{}{}
}
sp.QueryBody.Where.In["apply_staff.deal_state"] = []interface{}{1, 2, 3}
}
sp.QueryBody.Where.In["state"] = _all
query, qbDebug := d.QueryBasic(c, sp)
if res, debug, err = d.QueryResult(c, query, sp, qbDebug); err != nil {
PromError(fmt.Sprintf("es:%s ", sp.Business), "%v", err)
return
}
docBuckets["vlist"] = res.Result
// 类型统计
typeFilter := elastic.NewBoolQuery().Filter(
elastic.NewTermsQuery("state", _all...),
elastic.NewNestedQuery("apply_staff", elastic.NewBoolQuery().Must(
elastic.NewTermQuery("apply_staff.apply_staff_mid", applyStaffMid),
elastic.NewTermsQuery("apply_staff.deal_state", []interface{}{1, 2, 3}...),
)),
)
for _, v := range sp.QueryBody.Where.Like {
typeFilter = typeFilter.Filter(elastic.NewMultiMatchQuery(strings.Join(v.KW, " "), v.KWFields...).Type("best_fields").TieBreaker(0.6).MinimumShouldMatch("90%"))
}
typeAgg := elastic.NewTermsAggregation().Field("pid")
request1 := elastic.NewSearchRequest().Index(sp.QueryBody.From).Type("base").Source(elastic.NewSearchSource().Query(typeFilter).Aggregation("pid", typeAgg).Size(0))
// 状态统计
stateFilter := elastic.NewBoolQuery().Filter(
elastic.NewTermsQuery("state", _all...),
elastic.NewNestedQuery("apply_staff", elastic.NewBoolQuery().Must(elastic.NewTermQuery("apply_staff.apply_staff_mid", applyStaffMid))),
)
if pid, ok := sp.QueryBody.Where.EQ["pid"]; ok {
stateFilter = stateFilter.Filter(elastic.NewTermsQuery("pid", pid))
}
for _, v := range sp.QueryBody.Where.Like {
stateFilter = typeFilter.Filter(elastic.NewMultiMatchQuery(strings.Join(v.KW, " "), v.KWFields...).Type("best_fields").TieBreaker(0.6).MinimumShouldMatch("90%"))
}
stateAgg := elastic.NewFiltersAggregation().
FilterWithName("pending", elastic.NewNestedQuery("apply_staff", elastic.NewBoolQuery().Must(elastic.NewTermQuery("apply_staff.apply_staff_mid", applyStaffMid), elastic.NewTermQuery("apply_staff.deal_state", 1)))).
FilterWithName("processed", elastic.NewNestedQuery("apply_staff", elastic.NewBoolQuery().Must(elastic.NewTermQuery("apply_staff.apply_staff_mid", applyStaffMid), elastic.NewTermQuery("apply_staff.deal_state", 2)))).
FilterWithName("neglected", elastic.NewNestedQuery("apply_staff", elastic.NewBoolQuery().Must(elastic.NewTermQuery("apply_staff.apply_staff_mid", applyStaffMid), elastic.NewTermQuery("apply_staff.deal_state", 3))))
request2 := elastic.NewSearchRequest().Index(sp.QueryBody.From).Type("base").Source(elastic.NewSearchSource().Query(stateFilter).Aggregation("state", stateAgg).Size(0))
MultiRes, err := d.esPool[sp.AppIDConf.ESCluster].MultiSearch().Add(request1, request2).Do(c)
if err != nil {
PromError(fmt.Sprintf("es:%s ", sp.Business), "%v", err)
return
}
// 取得数据
tmp := map[string]interface{}{}
json.Unmarshal(*MultiRes.Responses[0].Aggregations["pid"], &tmp)
docBuckets["tlist"] = tmp["buckets"]
tmp = map[string]interface{}{}
json.Unmarshal(*MultiRes.Responses[1].Aggregations["state"], &tmp)
docBuckets["plist"] = tmp["buckets"]
if resResult, e := json.Marshal(docBuckets); e != nil {
log.Error("CreativeArchiveSearch.json.error(%v)", e)
} else {
res.Result = resResult
}
return
}

View File

@@ -0,0 +1,191 @@
package dao
import (
"context"
"go-common/app/admin/main/search/model"
"testing"
"github.com/smartystreets/goconvey/convey"
)
func TestDaoArchiveVideoScore(t *testing.T) {
convey.Convey("ArchiveVideoScore", t, func(ctx convey.C) {
var (
c = context.Background()
sp = &model.QueryParams{
QueryBody: &model.QueryBody{},
AppIDConf: &model.QueryConfDetail{
ESCluster: "",
},
}
)
ctx.Convey("When everything goes positive", func(ctx convey.C) {
_, debug, err := d.ArchiveVideoScore(c, sp)
ctx.Convey("Then err should be nil.res,debug should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
ctx.So(debug, convey.ShouldNotBeNil)
//ctx.So(res, convey.ShouldNotBeNil)
})
})
})
}
func TestDaoArchiveScore(t *testing.T) {
convey.Convey("ArchiveScore", t, func(ctx convey.C) {
var (
c = context.Background()
sp = &model.QueryParams{
QueryBody: &model.QueryBody{},
AppIDConf: &model.QueryConfDetail{
ESCluster: "",
},
}
)
ctx.Convey("When everything goes positive", func(ctx convey.C) {
_, debug, err := d.ArchiveScore(c, sp)
ctx.Convey("Then err should be nil.res,debug should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
ctx.So(debug, convey.ShouldNotBeNil)
//ctx.So(res, convey.ShouldNotBeNil)
})
})
})
}
func TestDaoTaskQaRandom(t *testing.T) {
convey.Convey("TaskQaRandom", t, func(ctx convey.C) {
var (
c = context.Background()
sp = &model.QueryParams{
QueryBody: &model.QueryBody{},
AppIDConf: &model.QueryConfDetail{
ESCluster: "",
},
}
)
ctx.Convey("When everything goes positive", func(ctx convey.C) {
_, debug, err := d.TaskQaRandom(c, sp)
ctx.Convey("Then err should be nil.res,debug should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
ctx.So(debug, convey.ShouldNotBeNil)
//ctx.So(res, convey.ShouldNotBeNil)
})
})
})
}
func TestDaoEsportsContestsDate(t *testing.T) {
convey.Convey("EsportsContestsDate", t, func(ctx convey.C) {
var (
c = context.Background()
sp = &model.QueryParams{
QueryBody: &model.QueryBody{},
AppIDConf: &model.QueryConfDetail{
ESCluster: "pcie_pub_out01",
},
}
)
ctx.Convey("When everything goes positive", func(ctx convey.C) {
//res, debug, err :=
d.EsportsContestsDate(c, sp)
ctx.Convey("Then err should be nil.res,debug should not be nil.", func(ctx convey.C) {
//ctx.So(err, convey.ShouldBeNil)
//ctx.So(debug, convey.ShouldNotBeNil)
//ctx.So(res, convey.ShouldNotBeNil)
})
})
})
}
func TestDaoCreativeArchiveSearch(t *testing.T) {
convey.Convey("CreativeArchiveSearch", t, func(ctx convey.C) {
var (
c = context.Background()
sp = &model.QueryParams{
QueryBody: &model.QueryBody{
Where: &model.QueryBodyWhere{
EQ: map[string]interface{}{"mid": 1},
},
},
AppIDConf: &model.QueryConfDetail{
ESCluster: "ssd_pub_in01",
},
}
)
ctx.Convey("When everything goes positive", func(ctx convey.C) {
//res, debug, err :=
d.CreativeArchiveSearch(c, sp)
ctx.Convey("Then err should be nil.res,debug should not be nil.", func(ctx convey.C) {
//ctx.So(err, convey.ShouldBeNil)
//ctx.So(debug, convey.ShouldNotBeNil)
//ctx.So(res, convey.ShouldNotBeNil)
})
})
})
}
func TestDaoCreativeArchiveStaff(t *testing.T) {
convey.Convey("CreativeArchiveStaff", t, func(ctx convey.C) {
var (
c = context.Background()
sp = &model.QueryParams{
QueryBody: &model.QueryBody{
Where: &model.QueryBodyWhere{
Combo: []model.QueryBodyWhereCombo{
{
EQ: []map[string]interface{}{{"mid": 1}},
},
},
Like: []model.QueryBodyWhereLike{{
KWFields: []string{"title"},
KW: []string{"title"},
}},
},
},
AppIDConf: &model.QueryConfDetail{
ESCluster: "ssd_pub_in02",
},
}
)
ctx.Convey("When everything goes positive", func(ctx convey.C) {
//res, debug, err :=
d.CreativeArchiveStaff(c, sp)
ctx.Convey("Then err should be nil.res,debug should not be nil.", func(ctx convey.C) {
//ctx.So(err, convey.ShouldBeNil)
//ctx.So(debug, convey.ShouldNotBeNil)
//ctx.So(res, convey.ShouldNotBeNil)
})
})
})
}
func TestDaoCreativeArchiveApply(t *testing.T) {
convey.Convey("CreativeArchiveApply", t, func(ctx convey.C) {
var (
c = context.Background()
sp = &model.QueryParams{
QueryBody: &model.QueryBody{
Where: &model.QueryBodyWhere{
EQ: map[string]interface{}{"apply_staff.apply_staff_mid": "1"},
Like: []model.QueryBodyWhereLike{{
KWFields: []string{"title"},
KW: []string{"title"},
}},
},
},
AppIDConf: &model.QueryConfDetail{
ESCluster: "ssd_pub_in02",
},
}
)
ctx.Convey("When everything goes positive", func(ctx convey.C) {
//res, debug, err :=
d.CreativeArchiveApply(c, sp)
ctx.Convey("Then err should be nil.res,debug should not be nil.", func(ctx convey.C) {
//ctx.So(err, convey.ShouldBeNil)
//ctx.So(debug, convey.ShouldNotBeNil)
//ctx.So(res, convey.ShouldNotBeNil)
})
})
})
}

View File

@@ -0,0 +1,96 @@
package dao
import (
"context"
"go-common/app/admin/main/search/model"
"testing"
"github.com/smartystreets/goconvey/convey"
)
func TestDaoQueryConf(t *testing.T) {
convey.Convey("QueryConf", t, func(ctx convey.C) {
var (
c = context.Background()
)
ctx.Convey("When everything goes positive", func(ctx convey.C) {
res, err := d.QueryConf(c)
ctx.Convey("Then err should be nil.res should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
ctx.So(res, convey.ShouldNotBeNil)
})
})
})
}
func TestDaoQueryBasic(t *testing.T) {
convey.Convey("QueryBasic", t, func(ctx convey.C) {
var (
c = context.Background()
sp = &model.QueryParams{
QueryBody: &model.QueryBody{},
}
)
ctx.Convey("When everything goes positive", func(ctx convey.C) {
mixedQuery, qbDebug := d.QueryBasic(c, sp)
ctx.Convey("Then mixedQuery,qbDebug should not be nil.", func(ctx convey.C) {
ctx.So(qbDebug, convey.ShouldNotBeNil)
ctx.So(mixedQuery, convey.ShouldNotBeNil)
})
})
})
}
func TestDaoqueryBasicRange(t *testing.T) {
convey.Convey("queryBasicRange", t, func(ctx convey.C) {
var (
rangeMap map[string]string
)
ctx.Convey("When everything goes positive", func(ctx convey.C) {
rangeQuery, err := d.queryBasicRange(rangeMap)
ctx.Convey("Then err should be nil.rangeQuery should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
ctx.So(rangeQuery, convey.ShouldNotBeNil)
})
})
})
}
func TestDaoqueryBasicLike(t *testing.T) {
convey.Convey("queryBasicLike", t, func(ctx convey.C) {
var (
likeMap = []model.QueryBodyWhereLike{}
business = ""
)
ctx.Convey("When everything goes positive", func(ctx convey.C) {
_, err := d.queryBasicLike(likeMap, business)
ctx.Convey("Then err should be nil.likeQuery should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
//ctx.So(likeQuery, convey.ShouldNotBeNil)
})
})
})
}
func TestDaoScroll(t *testing.T) {
convey.Convey("Scroll", t, func(ctx convey.C) {
var (
c = context.Background()
sp = &model.QueryParams{
QueryBody: &model.QueryBody{},
AppIDConf: &model.QueryConfDetail{
ESCluster: "",
},
}
)
ctx.Convey("When everything goes positive", func(ctx convey.C) {
//res, debug, err :=
d.Scroll(c, sp)
ctx.Convey("Then err should be nil.res,debug should not be nil.", func(ctx convey.C) {
//ctx.So(err, convey.ShouldBeNil)
//ctx.So(debug, convey.ShouldNotBeNil)
//ctx.So(res, convey.ShouldNotBeNil)
})
})
})
}

View File

@@ -0,0 +1,48 @@
load(
"@io_bazel_rules_go//go:def.bzl",
"go_library",
)
package(default_visibility = ["//visibility:public"])
filegroup(
name = "package-srcs",
srcs = glob(["**"]),
tags = ["automanaged"],
visibility = ["//visibility:private"],
)
filegroup(
name = "all-srcs",
srcs = [":package-srcs"],
tags = ["automanaged"],
visibility = ["//visibility:public"],
)
go_library(
name = "go_default_library",
srcs = [
"archive.go",
"archive_update.go",
"copyright.go",
"http.go",
"log.go",
"log_delete.go",
"mng.go",
"mng_v2.go",
"query.go",
"upsert.go",
],
importpath = "go-common/app/admin/main/search/http",
tags = ["automanaged"],
deps = [
"//app/admin/main/search/conf:go_default_library",
"//app/admin/main/search/dao:go_default_library",
"//app/admin/main/search/model:go_default_library",
"//app/admin/main/search/service:go_default_library",
"//library/ecode:go_default_library",
"//library/log:go_default_library",
"//library/net/http/blademaster:go_default_library",
"//library/net/http/blademaster/middleware/permit:go_default_library",
],
)

View File

@@ -0,0 +1,145 @@
package http
import (
"strings"
"go-common/app/admin/main/search/model"
"go-common/library/ecode"
"go-common/library/log"
bm "go-common/library/net/http/blademaster"
)
func archiveSearch(c *bm.Context) {
form := c.Request.Form
appidStr := form.Get("appid")
switch appidStr {
case "archive_check":
archiveCheck(c)
case "video":
video(c)
case "task_qa":
taskQa(c)
case "archive_commerce":
archiveCommerce(c)
default:
c.JSON(nil, ecode.RequestErr)
return
}
}
func archiveCheck(c *bm.Context) {
var (
err error
sp = &model.ArchiveCheckParams{
Bsp: &model.BasicSearchParams{},
}
form = c.Request.Form
)
if err = c.Bind(sp); err != nil {
c.JSON(nil, ecode.RequestErr)
return
}
if err = c.Bind(sp.Bsp); err != nil {
c.JSON(nil, ecode.RequestErr)
return
}
orderStr := form.Get("order")
if len(sp.Bsp.Order) == 1 && sp.Bsp.Order[0] == "" {
sp.Bsp.Order = nil
}
if orderStr != "" {
sp.Bsp.Order = strings.Split(orderStr, ",")
}
kwFieldsStr := form.Get("kw_fields")
if kwFieldsStr == "" {
sp.Bsp.KwFields = []string{"title", "content", "tag", "author"}
} else {
sp.Bsp.KwFields = strings.Split(kwFieldsStr, ",")
}
res, err := svr.ArchiveCheck(c, sp)
if err != nil {
log.Error("svr.ArchiveCheck(%v) error(%v)", sp, err)
c.JSON(nil, err)
return
}
c.JSON(res, err)
}
func video(c *bm.Context) {
var (
err error
sp = &model.VideoParams{
Bsp: &model.BasicSearchParams{},
}
form = c.Request.Form
)
if err = c.Bind(sp); err != nil {
c.JSON(nil, ecode.RequestErr)
return
}
if err = c.Bind(sp.Bsp); err != nil {
c.JSON(nil, ecode.RequestErr)
return
}
if form.Get("order_type") == "" {
sp.OrderType = -1
}
if form.Get("kw_fields") == "" {
sp.Bsp.KwFields = []string{"arc_title", "arc_author"}
}
res, err := svr.Video(c, sp)
if err != nil {
log.Error("svr.ArchiveCheck(%v) error(%v)", sp, err)
c.JSON(nil, err)
return
}
c.JSON(res, err)
}
func taskQa(c *bm.Context) {
var (
err error
sp = &model.TaskQa{
Bsp: &model.BasicSearchParams{},
}
)
if err = c.Bind(sp); err != nil {
c.JSON(nil, ecode.RequestErr)
return
}
if err = c.Bind(sp.Bsp); err != nil {
c.JSON(nil, ecode.RequestErr)
return
}
res, err := svr.TaskQa(c, sp)
if err != nil {
log.Error("svr.TaskQa(%v) error(%v)", sp, err)
c.JSON(nil, err)
return
}
c.JSON(res, err)
}
func archiveCommerce(c *bm.Context) {
var (
err error
sp = &model.ArchiveCommerce{
Bsp: &model.BasicSearchParams{},
}
)
if err = c.Bind(sp); err != nil {
c.JSON(nil, ecode.RequestErr)
return
}
if err = c.Bind(sp.Bsp); err != nil {
c.JSON(nil, ecode.RequestErr)
return
}
res, err := svr.ArchiveCommerce(c, sp)
if err != nil {
log.Error("svr.ArchiveCommerce(%v) error(%v)", sp, err)
c.JSON(nil, err)
return
}
c.JSON(res, err)
}

View File

@@ -0,0 +1,50 @@
package http
import (
"encoding/json"
"go-common/app/admin/main/search/dao"
"go-common/app/admin/main/search/model"
"go-common/library/ecode"
"go-common/library/log"
bm "go-common/library/net/http/blademaster"
)
// updateBlocked .
func updateArchive(c *bm.Context) {
form := c.Request.Form
appid := form.Get("appid")
switch appid {
case "task_qa_fans":
updateTaskQaFans(c)
default:
c.JSON(nil, ecode.RequestErr)
}
}
func updateTaskQaFans(c *bm.Context) {
var (
err error
bulkItem []dao.BulkItem
d []*model.TaskQaFansParams
form = c.Request.Form
)
data := form.Get("data")
if data == "" {
c.JSON(nil, ecode.RequestErr)
return
}
if err = json.Unmarshal([]byte(data), &d); err != nil {
log.Error("json.Unmarshal error(%v)", err)
c.JSON(nil, err)
return
}
for _, v := range d {
bulkItem = append(bulkItem, v)
}
if err = svr.Update(c, "ssd_archive", bulkItem); err != nil {
log.Error("srv.Update error(%v)", err)
}
c.JSON(nil, err)
}

View File

@@ -0,0 +1,36 @@
package http
import (
"encoding/json"
"go-common/app/admin/main/search/dao"
"go-common/app/admin/main/search/model"
"go-common/library/ecode"
"go-common/library/log"
bm "go-common/library/net/http/blademaster"
)
func copyRight(c *bm.Context) {
var (
err error
bulkItem []dao.BulkItem
d []*model.CopyRight
form = c.Request.Form
)
data := form.Get("data")
if data == "" {
c.JSON(nil, ecode.RequestErr)
return
}
if err = json.Unmarshal([]byte(data), &d); err != nil {
log.Error("json.Unmarshal error(%v)", err)
c.JSON(nil, ecode.RequestErr)
return
}
for _, v := range d {
bulkItem = append(bulkItem, v)
}
if err = svr.Index(c, "internalPublic", bulkItem); err != nil {
log.Error("srv.Index error(%v)", err)
}
c.JSON(nil, err)
}

View File

@@ -0,0 +1,103 @@
package http
import (
"net/http"
"go-common/app/admin/main/search/conf"
"go-common/app/admin/main/search/service"
"go-common/library/log"
bm "go-common/library/net/http/blademaster"
"go-common/library/net/http/blademaster/middleware/permit"
)
var (
authSrv *permit.Permit
svr *service.Service
)
// Init init http
func Init(c *conf.Config, s *service.Service) {
svr = s
authSrv = permit.New(c.Auth)
engine := bm.DefaultServer(c.BM)
route(engine)
if err := engine.Start(); err != nil {
log.Error("engine.Start error(%v)", err)
panic(err)
}
}
func route(e *bm.Engine) {
e.Ping(ping)
searchG := e.Group("/x/admin/search")
{
// V3新版查询和更新接口
searchG.GET("/query", querySearch)
searchG.GET("/query/debug", queryDebug)
searchG.POST("/upsert", upsert)
// V2老接口
searchG.GET("/archive", archiveSearch)
searchG.GET("/log", logSearch)
searchG.POST("/log/delete", logDelete)
searchG.GET("/log/audit", bMlogAudit)
searchG.GET("/log/audit_group", bMlogAuditGroupBy)
searchG.GET("/log/user_action", bMlogUserAction)
// update (deprecated)
searchG.POST("/archive/update", updateArchive)
// index insert (deprecated)
searchG.POST("/copyright/index", copyRight)
// sven
mng := searchG.Group("/mng")
{
mng.GET("/business/list", businessList)
mng.GET("/business/all", businessAll)
mng.GET("/business/info", businessInfo)
mng.POST("/business/add", addBusiness)
mng.POST("/business/update", updateBusiness)
mng.POST("/business/update_app", updateBusinessApp)
mng.GET("/asset/list", assetList)
mng.GET("/asset/all", assetAll)
mng.GET("/asset/info", assetInfo)
mng.POST("/asset/add", addAsset)
mng.POST("/asset/update", updateAsset)
mng.GET("/app/list", appList)
mng.GET("/app/info", appInfo)
mng.POST("/app/add", addApp)
mng.POST("/app/update", updateApp)
mng.GET("/countlist", countlist)
mng.GET("/count", count)
mng.GET("/percent", percent)
}
// sven v2
mng2 := searchG.Group("/mng/v2")
{
mng2.GET("/business/all", businessAllV2)
mng2.GET("/business/info", businessInfoV2)
mng2.POST("/business/add", businessAdd)
mng2.POST("/business/update", businessUpdate)
mng2.GET("/asset/list", assetDBTables)
mng2.GET("/asset/info", assetInfoV2)
mng2.GET("/asset/dbconnect", assetDBConnect)
mng2.POST("/asset/dbadd", assetDBAdd)
mng2.POST("/asset/tableadd", assetTableAdd)
mng2.POST("/asset/tableupdate", updateAssetTable)
mng2.GET("/asset/showtables", assetShowTables)
mng2.GET("/asset/tablefields", assetTableFields)
mng2.GET("/cluster/owners", clusterOwners)
}
}
}
// ping check health
func ping(ctx *bm.Context) {
if err := svr.Ping(ctx); err != nil {
ctx.Error = err
ctx.AbortWithStatus(http.StatusServiceUnavailable)
}
}

View File

@@ -0,0 +1,215 @@
package http
import (
"context"
"go-common/app/admin/main/search/model"
"go-common/library/ecode"
"go-common/library/log"
bm "go-common/library/net/http/blademaster"
)
func logSearch(c *bm.Context) {
form := c.Request.Form
appidStr := form.Get("appid")
switch appidStr {
case "log_audit":
logAudit(c)
case "log_audit_group":
logAuditGroupBy(c)
case "log_user_action":
logUserAction(c)
default:
c.JSON(nil, ecode.RequestErr)
}
}
func bAuth(c *bm.Context, appID string, businessID int) bool {
if business, ok := svr.Check(appID, businessID); ok && business.PermissionPoint != "" {
authSrv.Permit(business.PermissionPoint)(c)
return !c.IsAborted()
}
c.JSON(nil, ecode.AccessDenied)
c.Abort()
return false
}
func logAudit(c *bm.Context) {
var (
err error
sp = &model.LogParams{
Bsp: &model.BasicSearchParams{},
}
)
if err = c.Bind(sp); err != nil {
c.JSON(nil, ecode.RequestErr)
return
}
if err = c.Bind(sp.Bsp); err != nil {
c.JSON(nil, ecode.RequestErr)
return
}
business, ok := svr.Check("log_audit", sp.Business)
if !ok {
c.JSON(nil, ecode.RequestErr)
return
}
res, err := svr.LogAudit(c, c.Request.Form, sp, business)
if err != nil {
log.Error("srv.LogAudit(%v) error(%v)", sp, err)
c.JSON(nil, err)
return
}
c.JSON(res, err)
}
func logAuditGroupBy(c *bm.Context) {
var (
err error
sp = &model.LogParams{
Bsp: &model.BasicSearchParams{},
}
)
if err = c.Bind(sp); err != nil {
c.JSON(nil, ecode.RequestErr)
return
}
if err = c.Bind(sp.Bsp); err != nil {
c.JSON(nil, ecode.RequestErr)
return
}
business, ok := svr.Check("log_audit", sp.Business)
if !ok {
c.JSON(nil, ecode.RequestErr)
return
}
res, err := svr.LogAuditGroupBy(c, c.Request.Form, sp, business)
if err != nil {
log.Error("srv.LogAuditGroupBy(%v) error(%v)", sp, err)
c.JSON(nil, err)
return
}
c.JSON(res, err)
}
func logUserAction(c *bm.Context) {
var (
err error
sp = &model.LogParams{
Bsp: &model.BasicSearchParams{},
}
)
if err = c.Bind(sp); err != nil {
c.JSON(nil, ecode.RequestErr)
return
}
if err = c.Bind(sp.Bsp); err != nil {
c.JSON(nil, ecode.RequestErr)
return
}
business, ok := svr.Check("log_user_action", sp.Business)
if !ok {
c.JSON(nil, ecode.RequestErr)
return
}
res, err := svr.LogUserAction(c, c.Request.Form, sp, business)
if err != nil {
log.Error("srv.LogUserAction(%v) error(%v)", sp, err)
c.JSON(nil, err)
return
}
c.JSON(res, err)
}
func bMlogAudit(c *bm.Context) {
var (
err error
sp = &model.LogParams{
Bsp: &model.BasicSearchParams{},
}
)
if err = c.Bind(sp); err != nil {
return
}
if err = c.Bind(sp.Bsp); err != nil {
return
}
if !bAuth(c, "log_audit", sp.Business) {
return
}
business, ok := svr.Check("log_audit", sp.Business)
if !ok {
c.JSON(nil, ecode.RequestErr)
return
}
if uid, ok := c.Get("uid"); ok {
go svr.LogCount(context.Background(), "log_audit", sp.Business, uid)
}
res, err := svr.LogAudit(c, c.Request.Form, sp, business)
if err != nil {
log.Error("srv.bMlogAudit(%v) error(%v)", sp, err)
return
}
c.JSON(res, err)
}
func bMlogAuditGroupBy(c *bm.Context) {
var (
err error
sp = &model.LogParams{
Bsp: &model.BasicSearchParams{},
}
)
if err = c.Bind(sp); err != nil {
return
}
if err = c.Bind(sp.Bsp); err != nil {
return
}
if !bAuth(c, "log_audit", sp.Business) {
return
}
business, ok := svr.Check("log_audit", sp.Business)
if !ok {
c.JSON(nil, ecode.RequestErr)
return
}
res, err := svr.LogAuditGroupBy(c, c.Request.Form, sp, business)
if err != nil {
log.Error("srv.bMlogAuditGroupBy(%v) error(%v)", sp, err)
return
}
c.JSON(res, err)
}
func bMlogUserAction(c *bm.Context) {
var (
err error
sp = &model.LogParams{
Bsp: &model.BasicSearchParams{},
}
)
if err = c.Bind(sp); err != nil {
return
}
if err = c.Bind(sp.Bsp); err != nil {
return
}
if !bAuth(c, "log_user_action", sp.Business) {
return
}
business, ok := svr.Check("log_user_action", sp.Business)
if !ok {
c.JSON(nil, ecode.RequestErr)
return
}
if uid, ok := c.Get("uid"); ok {
go svr.LogCount(context.Background(), "log_user_action", sp.Business, uid)
}
res, err := svr.LogUserAction(c, c.Request.Form, sp, business)
if err != nil {
log.Error("srv.bMlogUserAction(%v) error(%v)", sp, err)
return
}
c.JSON(res, err)
}

View File

@@ -0,0 +1,79 @@
package http
import (
"go-common/app/admin/main/search/model"
"go-common/library/ecode"
"go-common/library/log"
bm "go-common/library/net/http/blademaster"
)
func logDelete(c *bm.Context) {
form := c.Request.Form
appidStr := form.Get("appid")
switch appidStr {
case "log_audit":
logAuditDelete(c)
case "log_user_action":
logUserActionDelete(c)
default:
c.JSON(nil, ecode.RequestErr)
}
}
func logAuditDelete(c *bm.Context) {
var (
err error
sp = &model.LogParams{
Bsp: &model.BasicSearchParams{},
}
)
if err = c.Bind(sp); err != nil {
c.JSON(nil, ecode.RequestErr)
return
}
if err = c.Bind(sp.Bsp); err != nil {
c.JSON(nil, ecode.RequestErr)
return
}
business, ok := svr.Check("log_audit", sp.Business)
if !ok {
c.JSON(nil, ecode.RequestErr)
return
}
res, err := svr.LogAuditDelete(c, c.Request.Form, sp, business)
if err != nil {
log.Error("srv.LogAuditDelete(%v) error(%v)", sp, err)
c.JSON(nil, err)
return
}
c.JSON(res, err)
}
func logUserActionDelete(c *bm.Context) {
var (
err error
sp = &model.LogParams{
Bsp: &model.BasicSearchParams{},
}
)
if err = c.Bind(sp); err != nil {
c.JSON(nil, ecode.RequestErr)
return
}
if err = c.Bind(sp.Bsp); err != nil {
c.JSON(nil, ecode.RequestErr)
return
}
business, ok := svr.Check("log_user_action", sp.Business)
if !ok {
c.JSON(nil, ecode.RequestErr)
return
}
res, err := svr.LogUserActionDelete(c, c.Request.Form, sp, business)
if err != nil {
log.Error("srv.logUserActionDelete(%v) error(%v)", sp, err)
c.JSON(nil, err)
return
}
c.JSON(res, err)
}

View File

@@ -0,0 +1,166 @@
package http
import (
"go-common/app/admin/main/search/model"
bm "go-common/library/net/http/blademaster"
)
func businessList(ctx *bm.Context) {
p := &model.ParamMngBusiness{}
if err := ctx.Bind(p); err != nil {
return
}
list, total, err := svr.BusinessList(ctx, p.Name, p.Pn, p.Ps)
if err != nil {
ctx.JSON(nil, err)
return
}
data := make(map[string]interface{}, 2)
data["list"] = list
data["page"] = &model.Page{
Pn: p.Pn,
Ps: p.Ps,
Total: total,
}
ctx.JSON(data, nil)
}
func businessAll(ctx *bm.Context) {
ctx.JSON(svr.BusinessAll(ctx))
}
func businessInfo(ctx *bm.Context) {
p := &model.ParamMngBusiness{}
if err := ctx.Bind(p); err != nil {
return
}
ctx.JSON(svr.BusinessInfo(ctx, p.ID))
}
func addBusiness(ctx *bm.Context) {
p := &model.ParamMngBusiness{}
if err := ctx.Bind(p); err != nil {
return
}
b := &model.MngBusiness{Name: p.Name, Desc: p.Desc, AppsJSON: "[]"}
ctx.JSON(svr.AddBusiness(ctx, b))
}
func updateBusiness(ctx *bm.Context) {
p := &model.ParamMngBusiness{}
if err := ctx.Bind(p); err != nil {
return
}
b := &model.MngBusiness{ID: p.ID, Name: p.Name, Desc: p.Desc, AppsJSON: p.Apps}
ctx.JSON(nil, svr.UpdateBusiness(ctx, b))
}
func updateBusinessApp(ctx *bm.Context) {
p := &model.ParamMngBusinessApp{}
if err := ctx.Bind(p); err != nil {
return
}
ctx.JSON(nil, svr.UpdateBusinessApp(ctx, p.Business, p.App, p.IncrWay, p.IsJob, p.IncrOpen))
}
func assetList(ctx *bm.Context) {
p := &model.ParamMngAsset{}
if err := ctx.Bind(p); err != nil {
return
}
list, total, err := svr.AssetList(ctx, p.Type, p.Name, p.Pn, p.Ps)
if err != nil {
ctx.JSON(nil, err)
return
}
data := make(map[string]interface{}, 2)
data["list"] = list
data["page"] = &model.Page{
Pn: p.Pn,
Ps: p.Ps,
Total: total,
}
ctx.JSON(data, nil)
}
func assetAll(ctx *bm.Context) {
ctx.JSON(svr.AssetAll(ctx))
}
func assetInfo(ctx *bm.Context) {
p := &model.ParamMngAsset{}
if err := ctx.Bind(p); err != nil {
return
}
ctx.JSON(svr.AssetInfo(ctx, p.ID))
}
func addAsset(ctx *bm.Context) {
p := &model.ParamMngAsset{}
if err := ctx.Bind(p); err != nil {
return
}
a := &model.MngAsset{Name: p.Name, Type: p.Type, Config: p.Config, Desc: p.Desc}
ctx.JSON(svr.AddAsset(ctx, a))
}
func updateAsset(ctx *bm.Context) {
p := &model.ParamMngAsset{}
if err := ctx.Bind(p); err != nil {
return
}
a := &model.MngAsset{ID: p.ID, Name: p.Name, Type: p.Type, Config: p.Config, Desc: p.Desc}
ctx.JSON(nil, svr.UpdateAsset(ctx, a))
}
func appList(ctx *bm.Context) {
p := &model.MngApp{}
if err := ctx.Bind(p); err != nil {
return
}
ctx.JSON(svr.AppList(ctx, p.Business))
}
func appInfo(ctx *bm.Context) {
p := &model.MngApp{}
if err := ctx.Bind(p); err != nil {
return
}
ctx.JSON(svr.AppInfo(ctx, p.ID))
}
func addApp(ctx *bm.Context) {
p := &model.MngApp{}
if err := ctx.Bind(p); err != nil {
return
}
ctx.JSON(svr.AddApp(ctx, p))
}
func updateApp(ctx *bm.Context) {
p := &model.MngApp{}
if err := ctx.Bind(p); err != nil {
return
}
ctx.JSON(nil, svr.UpdateApp(ctx, p))
}
func countlist(ctx *bm.Context) {
ctx.JSON(svr.MngCountList(ctx))
}
func count(ctx *bm.Context) {
p := &model.MngCount{}
if err := ctx.Bind(p); err != nil {
return
}
ctx.JSON(svr.MngCount(ctx, p))
}
func percent(ctx *bm.Context) {
p := &model.MngCount{}
if err := ctx.Bind(p); err != nil {
return
}
ctx.JSON(svr.MngPercent(ctx, p))
}

View File

@@ -0,0 +1,143 @@
package http
import (
"go-common/app/admin/main/search/model"
bm "go-common/library/net/http/blademaster"
)
func businessAllV2(c *bm.Context) {
c.JSON(svr.BusinessAllV2(c))
}
func businessInfoV2(c *bm.Context) {
p := new(struct {
Name string `form:"name" validate:"required"`
})
if err := c.Bind(p); err != nil {
return
}
c.JSON(svr.BusinessInfoV2(c, p.Name))
}
func businessAdd(c *bm.Context) {
p := new(struct {
Pid int64 `form:"pid" validate:"required,min=1"`
Name string `form:"name" validate:"required"`
Description string `form:"description" validate:"required"`
})
if err := c.Bind(p); err != nil {
return
}
c.JSON(svr.BusinessAdd(c, p.Pid, p.Name, p.Description))
}
func businessUpdate(c *bm.Context) {
p := new(struct {
Name string `form:"name" validate:"required"`
Field string `form:"field" validate:"required"`
Value string `form:"value"`
})
if err := c.Bind(p); err != nil {
return
}
c.JSON(svr.BusinessUpdate(c, p.Name, p.Field, p.Value))
}
func assetDBTables(c *bm.Context) {
c.JSON(svr.AssetDBTables(c))
}
func assetDBConnect(c *bm.Context) {
p := new(struct {
Host string `form:"host" validate:"required"`
Port string `form:"port" validate:"required"`
User string `form:"user" validate:"required"`
Password string `form:"password" validate:"required"`
})
if err := c.Bind(p); err != nil {
return
}
c.JSON(svr.AssetDBConnect(c, p.Host, p.Port, p.User, p.Password))
}
func assetDBAdd(c *bm.Context) {
p := new(struct {
Name string `form:"name" validate:"required"`
Description string `form:"description"`
Host string `form:"host" validate:"required"`
Port string `form:"port" validate:"required"`
User string `form:"user" validate:"required"`
Password string `form:"password" validate:"required"`
})
if err := c.Bind(p); err != nil {
return
}
c.JSON(svr.AssetDBAdd(c, p.Name, p.Description, p.Host, p.Port, p.User, p.Password))
}
func assetTableAdd(c *bm.Context) {
p := new(struct {
DB string `form:"db" validate:"required"`
Regex string `form:"regex" validate:"required"`
Fields string `form:"fields" validate:"required"`
Description string `form:"description"`
})
if err := c.Bind(p); err != nil {
return
}
c.JSON(svr.AssetTableAdd(c, p.DB, p.Regex, p.Fields, p.Description))
}
func updateAssetTable(c *bm.Context) {
p := new(struct {
Name string `form:"name" validate:"required"`
Fields string `form:"fields" validate:"required"`
})
if err := c.Bind(p); err != nil {
return
}
c.JSON(svr.UpdateAssetTable(c, p.Name, p.Fields))
}
func assetInfoV2(c *bm.Context) {
p := new(struct {
Name string `form:"name" validate:"required"`
})
if err := c.Bind(p); err != nil {
return
}
c.JSON(svr.AssetInfoV2(c, p.Name))
}
func assetShowTables(c *bm.Context) {
p := new(struct {
DB string `form:"db" validate:"required"`
})
if err := c.Bind(p); err != nil {
return
}
c.JSON(svr.AssetShowTables(c, p.DB))
}
func assetTableFields(c *bm.Context) {
p := new(struct {
DB string `form:"db" validate:"required"`
Regex string `form:"regex" validate:"required"`
})
if err := c.Bind(p); err != nil {
return
}
fs, count, err := svr.AssetTableFields(c, p.DB, p.Regex)
data := &struct {
Fields []*model.TableField `json:"fields"`
Count int `json:"count"`
}{
Fields: fs,
Count: count,
}
c.JSON(data, err)
}
func clusterOwners(c *bm.Context) {
c.JSON(svr.ClusterOwners(), nil)
}

View File

@@ -0,0 +1,157 @@
package http
import (
"encoding/json"
"errors"
"strings"
"go-common/app/admin/main/search/model"
"go-common/library/ecode"
"go-common/library/log"
bm "go-common/library/net/http/blademaster"
)
func checkParams(c *bm.Context) (sp *model.QueryParams, err error) {
sp = &model.QueryParams{
QueryBody: &model.QueryBody{},
}
if err = c.Bind(sp); err != nil {
return
}
decoder := json.NewDecoder(strings.NewReader(sp.QueryBodyStr))
decoder.UseNumber()
if err = decoder.Decode(&sp.QueryBody); err != nil {
log.Error("s.http.upsert(%v) json error(%v)", err, sp.QueryBodyStr)
}
if sp.QueryBody == nil {
c.JSON(nil, ecode.RequestErr)
return
}
// eq
if sp.QueryBody.Where != nil {
for k, v := range sp.QueryBody.Where.EQ {
if integer, ok := v.(json.Number); ok {
if sp.QueryBody.Where.EQ[k], err = integer.Int64(); err != nil {
log.Error("sp.QueryBody.Where.EQ.numberToInt64(%v)(%v)", integer, err)
}
}
}
// or
for k, v := range sp.QueryBody.Where.Or {
if integer, ok := v.(json.Number); ok {
if sp.QueryBody.Where.Or[k], err = integer.Int64(); err != nil {
log.Error("sp.QueryBody.Where.EQ.numberToInt64(%v)(%v)", integer, err)
}
}
}
// in
for k, v := range sp.QueryBody.Where.In {
vs := make([]interface{}, 0)
for _, v2 := range v {
if integer, ok := v2.(json.Number); ok {
vs = append(vs, integer)
} else {
vs = append(vs, v2)
}
}
sp.QueryBody.Where.In[k] = vs
}
}
if sp.QueryBody.From == "" {
err = errors.New("query from is empty")
return
}
// deny *
if b, ok := model.PermConf["star"][sp.Business]; !(ok && b == "true") {
if strings.Contains(sp.QueryBody.From, "*") {
err = errors.New("query from * is denied")
return
}
}
if err = svr.CheckQueryConf(c, sp); err != nil {
log.Error("svr.QueryConf(%+v) error(%v)", sp, err)
return
}
if sp.QueryBody.Pn == 0 {
sp.QueryBody.Pn = 1
}
if sp.QueryBody.Ps == 0 {
sp.QueryBody.Ps = 10
}
return
}
func querySearch(c *bm.Context) {
var (
err error
sp *model.QueryParams
res *model.QueryResult
)
res = &model.QueryResult{}
if sp, err = checkParams(c); err != nil {
log.Error("checkParams(%v) error(%v)", sp, err)
c.JSON(res, err)
return
}
//query mode
switch sp.AppIDConf.QueryMode {
case model.QueryModeBasic: //完全通用查询支持nested方式
res, _, err = svr.QueryBasic(c, sp)
case model.QueryModeExtra: //半自定义,在查询体基础上加额外条件
res, _, err = svr.QueryExtra(c, sp)
case 4: //完整自定义查询
// todo
default:
res, _, err = svr.QueryBasic(c, sp)
}
if err != nil {
log.Error("srv.QueryBasic(%v) error(%v)", sp, err)
c.JSON(nil, err)
return
}
c.JSON(res, err)
}
// queryDebug 1. queryBody is right or not 2. return dsl body 3. return dsl profile
func queryDebug(c *bm.Context) {
var (
err error
sp *model.QueryParams
resDebug = &model.QueryDebugResult{}
)
if sp, err = checkParams(c); err != nil {
log.Error("checkParams(%v) error(%v)", c, err)
resDebug.AddErrMsg(err.Error())
resDebug.QueryBody = sp.QueryBodyStr
c.JSON(resDebug, ecode.RequestErr)
return
}
// 默认高级debug
if sp.DebugLevel == 0 {
sp.DebugLevel = 2
}
switch sp.AppIDConf.QueryMode {
case 0, model.QueryModeBasic: //完全通用查询
if _, resDebug, err = svr.QueryBasic(c, sp); err != nil {
log.Error("QueryDebug(%v) error(%v)", sp, err)
resDebug.AddErrMsg(err.Error())
c.JSON(resDebug, ecode.RequestErr)
return
}
case model.QueryModeExtra: //半自定义,根据查询体基础再做额外处理
if _, resDebug, err = svr.QueryExtra(c, sp); err != nil {
log.Error("QueryDebug(%v) error(%v)", sp, err)
resDebug.AddErrMsg(err.Error())
c.JSON(resDebug, ecode.RequestErr)
return
}
case 3: //完全自定义
//todo
default:
}
resDebug.QueryBody = sp.QueryBodyStr
c.JSON(resDebug, err)
}

View File

@@ -0,0 +1,36 @@
package http
import (
"encoding/json"
"strings"
"go-common/app/admin/main/search/model"
"go-common/library/ecode"
"go-common/library/log"
bm "go-common/library/net/http/blademaster"
)
func upsert(c *bm.Context) {
up := &model.UpsertParams{}
if err := c.Bind(up); err != nil {
return
}
dataBody := map[string][]model.MapData{}
decoder := json.NewDecoder(strings.NewReader(up.DataStr))
decoder.UseNumber()
if err := decoder.Decode(&dataBody); err != nil {
log.Error("s.http.upsert(%v) json error(%v)", err, dataBody)
}
if len(dataBody) == 0 {
c.JSON(nil, ecode.RequestErr)
return
}
for _, n := range dataBody {
for _, m := range n {
if err := m.NumberToInt64(); err != nil {
log.Error("s.http.upsert(%v) to int64 error(%v)", err, m)
}
}
}
c.JSON(nil, svr.Upsert(c, up, dataBody))
}

View File

@@ -0,0 +1,44 @@
load(
"@io_bazel_rules_go//go:def.bzl",
"go_library",
)
package(default_visibility = ["//visibility:public"])
filegroup(
name = "package-srcs",
srcs = glob(["**"]),
tags = ["automanaged"],
visibility = ["//visibility:private"],
)
filegroup(
name = "all-srcs",
srcs = [":package-srcs"],
tags = ["automanaged"],
visibility = ["//visibility:public"],
)
go_library(
name = "go_default_library",
srcs = [
"archive.go",
"copyright.go",
"es.go",
"log.go",
"mapdata.go",
"mng.go",
"mng_v2.go",
"param.go",
"query.go",
"query_extra.go",
"uname.go",
"upsert.go",
],
importpath = "go-common/app/admin/main/search/model",
tags = ["automanaged"],
deps = [
"//library/log:go_default_library",
"//vendor/gopkg.in/olivere/elastic.v5:go_default_library",
],
)

View File

@@ -0,0 +1,101 @@
package model
import "strconv"
// ArchiveCheckParams search params.
type ArchiveCheckParams struct {
Bsp *BasicSearchParams
Aids []int64 `form:"aids,split" params:"aids"`
TypeIds []int64 `form:"typeids,split" params:"typeids"`
States []int64 `form:"states,split" params:"states"`
Attrs []int64 `form:"attrs,split" params:"attrs"`
DurationFrom int64 `form:"duration_from" params:"duration_from"`
DurationTo int64 `form:"duration_to" params:"duration_to"`
Mids []int64 `form:"mids,split" params:"mids"`
MidFrom int64 `form:"mid_from" params:"mid_from"`
MidTo int64 `form:"mid_to" params:"mid_to"`
AllKW int `form:"all_kw" params:"all_kw" default:"0"`
TimeFrom string `form:"time_from" params:"time_from"`
TimeTo string `form:"time_to" params:"time_to"`
Time string `form:"time" params:"time"`
FromIP string `form:"from_ip" params:"from_ip"`
}
// VideoParams search video
type VideoParams struct {
Bsp *BasicSearchParams
VIDs []int64 `form:"vids,split" params:"vids"`
AIDs []int64 `form:"aids,split" params:"aids"`
CIDs []int64 `form:"cids,split" params:"cids"`
TIDs []int64 `form:"tids,split" params:"tids"`
FileNames []string `form:"filename,split" params:"filename"`
TagID int64 `form:"tag_id" params:"tag_id"`
Status []int64 `form:"status,split" params:"status"`
XCodeState []int64 `form:"xcode_state,split" params:"xcode_state"`
UserType int `form:"user_type" params:"user_type"`
// archive
RelationStates []int64 `form:"relation_state,split" params:"relation_state"`
ArcMids []int64 `form:"arc_mids,split" params:"arc_mids"`
DurationFrom int `form:"duration_from" params:"duration_from"`
DurationTo int `form:"duration_to" params:"duration_to"`
// other
OrderType int `form:"order_type" params:"order_type"`
}
// TaskQa .
type TaskQa struct {
Bsp *BasicSearchParams
Ids []int64 `form:"ids,split" params:"ids"`
TaskIds []string `form:"task_ids,split" params:"task_ids"`
Uids []string `form:"uids,split" params:"uids"`
ArcTagIds []string `form:"arc_tagids,split" params:"arc_tagids"`
AuditTagIds []int64 `form:"audit_tagids,split" params:"audit_tagids"`
UpGroups []string `form:"up_groups,split" params:"up_groups"`
ArcTitles []string `form:"arc_titles,split" params:"arc_titles"`
ArcTypeIds []string `form:"arc_typeids,split" params:"arc_typeids"`
States []string `form:"states,split" params:"states"`
AuditStatuses []string `form:"audit_statuses,split" params:"audit_statuses"`
FansFrom string `form:"fans_from" params:"fans_from"`
FansTo string `form:"fans_to" params:"fans_to"`
CtimeFrom string `form:"ctime_from" params:"ctime_from"`
CtimeTo string `form:"ctime_to" params:"ctime_to"`
FtimeFrom string `form:"ftime_from" params:"ftime_from"`
FtimeTo string `form:"ftime_to" params:"ftime_to"`
}
// ArchiveCommerce .
type ArchiveCommerce struct {
Bsp *BasicSearchParams
Ids []string `form:"ids,split" params:"ids"`
PTypeIds []string `form:"ptypeids,split" params:"ptypeids"`
TypeIds []string `form:"typeids,split" params:"typeids"`
Mids []string `form:"mids,split" params:"mids"`
States []string `form:"states,split" params:"states"`
Copyrights []string `form:"copyrights,split" params:"copyrights"`
OrderIds []string `form:"order_ids,split" params:"order_ids"`
// 逻辑判断
Action string `form:"action" params:"action"` // 获取一级分区列表、等其他定制查询
IsOrder int `form:"is_order" params:"is_order" default:"-1"` //是否商单
IsOriginal int `form:"is_original" params:"is_original" default:"-1"` //是否原创
}
// TaskQaFansParams .
type TaskQaFansParams struct {
ID int64 `json:"id"`
Fans int64 `json:"fans"`
}
// IndexName .
func (m *TaskQaFansParams) IndexName() string {
return "task_qa"
}
// IndexType .
func (m *TaskQaFansParams) IndexType() string {
return "base"
}
// IndexID .
func (m *TaskQaFansParams) IndexID() string {
return strconv.FormatInt(m.ID, 10)
}

View File

@@ -0,0 +1,33 @@
package model
import (
"strconv"
)
// CopyRight .
type CopyRight struct {
ID int64 `json:"id"`
Name string `json:"name"`
OName string `json:"oname"`
AkaNames string `json:"aka_names"`
Level string `json:"level"`
AVoid string `json:"avoid"`
Plan string `json:"plan"`
Description string `json:"description"`
URL string `json:"url"`
}
// IndexName .
func (c *CopyRight) IndexName() string {
return "copyright"
}
// IndexType .
func (c *CopyRight) IndexType() string {
return "base"
}
// IndexID .
func (c *CopyRight) IndexID() string {
return strconv.FormatInt(c.ID, 10)
}

View File

@@ -0,0 +1,59 @@
package model
import (
"encoding/json"
)
// ES .
type ES struct {
Addr string
}
// Page .
type Page struct {
Pn int `json:"num"`
Ps int `json:"size"`
Total int64 `json:"total"`
}
// SearchResult search result (deprecated).
type SearchResult struct {
Order string `json:"order"`
Sort string `json:"sort"`
Result []json.RawMessage `json:"result"`
Debug string `json:"debug"`
Page *Page `json:"page"`
}
// BasicSearchParams (deprecated).
type BasicSearchParams struct {
AppID string `form:"appid" params:"appid"`
Pattern string `form:"pattern" params:"pattern" default:"equal"` //关键字匹配模式完成匹配equal模糊查询like
KW string `form:"kw" params:"kw"`
KwFields []string `form:"kw_fields,split" params:"kw_fields"`
KWs []string `form:"kws,split" params:"kws"` //关键词组用于AND OR连接
Order []string `form:"order,split" params:"order"`
Sort []string `form:"sort,split" params:"sort" default:"desc"`
Pn int `form:"pn" params:"pn;Range(1,5000)" default:"1"`
Ps int `form:"ps" params:"ps;Range(1,1000)" default:"10"`
Highlight bool `form:"highlight" params:"highlight" default:"false"`
ScoreFirst bool `form:"score_first" params:"score_first" default:"true"`
Debug bool `form:"debug" params:"debug"`
Source []string
}
// BasicMNGSearchParams .
type BasicMNGSearchParams struct {
Order string `form:"order" params:"order"`
Sort string `form:"sort" params:"sort" default:"desc"`
Pn int `form:"pn" params:"pn;Range(1,5000)" default:"1"`
Ps int `form:"ps" params:"ps;Range(1,1000)" default:"10"`
}
// BasicUpdateParams (deprecated).
type BasicUpdateParams struct {
AppID string
}
// UpdateParams update params (deprecated).
type UpdateParams map[string]interface{}

View File

@@ -0,0 +1,75 @@
package model
// LogParams .
type LogParams struct {
Bsp *BasicSearchParams
Business int `form:"business" params:"business"`
CTimeFrom string `form:"ctime_from" params:"ctime_from"`
CTimeTo string `form:"ctime_to" params:"ctime_to"`
}
// Business .
type Business struct {
ID int
AppID string
Name string
AdditionalMapping string
Mapping map[string]string
IndexFormat string
IndexCluster string
PermissionPoint string
}
// UDepTsData .
type UDepTsData struct {
Code int `json:"code"`
Data map[string]string
}
// IPData .
type IPData struct {
Code int `json:"code"`
Data map[string]struct {
Country string `json:"country"`
Province string `json:"province"`
City string `json:"city"`
Isp string `json:"isp"`
}
}
// LogAuditDefaultMapping .
var LogAuditDefaultMapping = map[string]string{
"uname": "string",
"uid": "string",
"type": "string",
"oid": "string",
"action": "string",
"ctime": "time",
"int_0": "int",
"int_1": "int",
"int_2": "int",
"str_0": "string",
"str_1": "string",
"str_2": "string",
"extra_data": "string",
}
// LogUserActionDefaultMapping .
var LogUserActionDefaultMapping = map[string]string{
"mid": "string",
"platform": "string",
"build": "string",
"buvid": "string",
"type": "string",
"oid": "string",
"action": "string",
"ip": "string",
"ctime": "time",
"int_0": "int",
"int_1": "int",
"int_2": "int",
"str_0": "string",
"str_1": "string",
"str_2": "string",
"extra_data": "string",
}

View File

@@ -0,0 +1,48 @@
package model
import (
"encoding/json"
"fmt"
"reflect"
"strings"
"go-common/library/log"
)
// MapData .
type MapData map[string]interface{}
// StrID .
func (m MapData) StrID(indexID string) string {
if indexID == "base" { // 需要改配置
return ""
}
var data []interface{}
arr := strings.Split(indexID, ",")
for _, v := range arr[1:] {
v = strings.TrimSpace(v)
if item, ok := m[v].(interface{}); ok {
if reflect.TypeOf(item).Kind() == reflect.Float64 {
item = int64(item.(float64))
}
data = append(data, item)
continue
}
log.Error("model.MapData.StrID err (%v)", v)
}
if len(data) == 0 {
return ""
}
return fmt.Sprintf(arr[0], data...)
}
func (m MapData) NumberToInt64() (err error) {
for k, v := range m {
if integer, ok := v.(json.Number); ok {
if m[k], err = integer.Int64(); err != nil {
log.Error("service.log.numberToInt64(%v)(%v)", integer, err)
}
}
}
return
}

View File

@@ -0,0 +1,109 @@
package model
const (
// MngAssetTypeDB .
MngAssetTypeDB = 1
// MngAssetTypeES .
MngAssetTypeES = 2
// MngAssetTypeDatabus .
MngAssetTypeDatabus = 3
// MngAssetTypeTable .
MngAssetTypeTable = 4
)
// MngBusiness .
type MngBusiness struct {
ID int64 `json:"id"`
Name string `json:"name"`
Desc string `json:"desc"`
Apps []*MngBusinessApp `json:"apps"`
AppsJSON string `json:"-"`
}
// MngBusinessApp .
type MngBusinessApp struct {
AppID string `json:"appid"`
IncrWay string `json:"incr_way"`
IncrOpen bool `json:"incr_open"`
}
// MngAsset .
type MngAsset struct {
ID int64 `json:"id"`
Name string `json:"name"`
Type int `json:"type"`
Config string `json:"config"`
Desc string `json:"desc"`
}
// MngAssetTable .
type MngAssetTable struct {
TablePrefix string `json:"prefix"`
TableFormat string `json:"format"`
}
// MngAssetDatabus .
type MngAssetDatabus struct {
DatabusInfo string `json:"info"`
DatabusIndexID string `json:"index_id"`
}
// MngApp .
type MngApp struct {
ID int64 `json:"id" form:"id"`
Business string `json:"business" form:"business"`
AppID string `json:"appid" form:"appid"`
Desc string `json:"desc" form:"desc"`
DBName string `json:"db_name" form:"db_name"`
ESName string `json:"es_name" form:"es_name"`
TableName string `json:"table_name" form:"table_name"`
TablePrefix string `json:"-"`
TableFormat string `json:"-"`
DatabusName string `json:"databus_name" form:"databus_name"`
DatabusInfo string `json:"-"`
DatabusIndexID string `json:"-"`
IndexPrefix string `json:"index_prefix" form:"index_prefix"`
IndexVersion string `json:"index_version" form:"index_version"`
IndexFormat string `json:"index_format" form:"index_format"`
IndexType string `json:"index_type" form:"index_type"`
IndexID string `json:"index_id" form:"index_id"`
DataIndexSuffix string `json:"data_index_suffix" form:"data_index_suffix"`
IndexMapping string `json:"index_mapping" form:"index_mapping"`
DataFields string `json:"data_fields" form:"data_fields"`
DataExtra string `json:"data_extra" form:"data_extra"`
ReviewNum int `json:"review_num" form:"review_num"`
ReviewTime int `json:"review_time" form:"review_time"`
Sleep float64 `json:"sleep" form:"sleep"`
Size int `json:"size" form:"size"`
SQLByID string `json:"sql_by_id" form:"sql_by_id"`
SQLByMtime string `json:"sql_by_mtime" form:"sql_by_mtime"`
SQLByIDMtime string `json:"sql_by_idmtime" form:"sql_by_idmtime"`
QueryMaxIndexes int `json:"query_max_indexes" form:"query_max_indexes"`
}
// MngCount .
type MngCount struct {
Business string `json:"business" form:"business"`
Type string `json:"type" form:"type"`
Name string `json:"name"`
Chart string `json:"chart"`
Param string `json:"param"`
}
// MngCountRes .
type MngCountRes struct {
Time string `json:"time"`
Count string `json:"count"`
}
// MngPercentRes .
type MngPercentRes struct {
Name string `json:"name"`
Count string `json:"count"`
}
// UnamesData .
type UnamesData struct {
Code int `json:"code"`
Data map[string]string
}

View File

@@ -0,0 +1,42 @@
package model
const (
// UserSearchDB bili_search.
UserSearchDB = "bili_search"
// DBDsnFormat .
DBDsnFormat = "%s:%s@tcp(%s:%s)/%s?timeout=5s&readTimeout=5s&writeTimeout=5s&parseTime=true&loc=Local&charset=utf8mb4,utf8"
)
// GFAsset .
type GFAsset struct {
ID int64 `json:"id"`
Type string `json:"type"`
Name string `json:"name"`
DSN string `json:"dsn"`
DB string `json:"db"`
Regex string `json:"relex"`
Fields string `json:"fields"`
Description string `json:"description"`
State int8 `json:"state"`
}
// GFBusiness .
type GFBusiness struct {
ID int64 `json:"id"`
PID int64 `json:"pid"`
Name string `json:"name"`
DataConf string `json:"data_conf"`
IndexConf string `json:"index_conf"`
BusinessConf string `json:"business_conf"`
Description string `json:"description"`
State int8 `json:"state"`
Mtime string `json:"mtime"`
}
// TableField .
type TableField struct {
Name string `json:"name"`
Type string `json:"type"`
Count int `json:"count"`
Primary bool `json:"primary"`
}

View File

@@ -0,0 +1,36 @@
package model
// Pager .
type Pager struct {
Pn int `form:"pn" validate:"min=1" default:"1"`
Ps int `form:"ps" validate:"min=1" default:"10"`
}
// ParamMngBusiness .
type ParamMngBusiness struct {
ID int64 `form:"id"`
Name string `form:"name"`
Desc string `form:"desc"`
Apps string `form:"apps"`
IsJob bool `form:"is_job"`
Pager
}
// ParamMngBusinessApp .
type ParamMngBusinessApp struct {
Business string `form:"business"`
App string `form:"app"`
IsJob bool `form:"is_job"`
IncrWay string `form:"incr_way"`
IncrOpen bool `form:"incr_open"`
}
// ParamMngAsset .
type ParamMngAsset struct {
ID int64 `form:"id"`
Type int `form:"type"`
Name string `form:"name"`
Config string `form:"config"`
Desc string `form:"desc"`
Pager
}

View File

@@ -0,0 +1,169 @@
package model
import (
"encoding/json"
"gopkg.in/olivere/elastic.v5"
)
// QueryParams .
type QueryParams struct {
Business string `form:"business" params:"business;Required" validate:"required"`
QueryBodyStr string `form:"query" params:"query;Required" validate:"required"`
DebugLevel int `form:"debug_level" params:"debug_level" default:"0"` // 2 默认全局debug包含dsl执行后的分析1 dsl执行前的分析防止504啥分析看不到。通过 /x/admin/search/query/debug ※包含es query体是否正确 + dsl体 + explain 返回信息) 方式,非签名请求
QueryBody *QueryBody
AppIDConf *QueryConfDetail
}
// QueryBody .
type QueryBody struct {
Fields []string `json:"fields"` // default:"*" _sourcedefault = *
From string `json:"from"` //索引名,多个用逗号隔开
Where *QueryBodyWhere `json:"where"`
Order []map[string]string `json:"order"`
OrderScoreFirst bool `json:"order_score_first"`
OrderRandomSeed string `json:"order_random_seed"` // 随机排序种子
Scroll bool `json:"scroll"`
Highlight bool `json:"highlight"` //default:"false"
Pn int `json:"pn"` //Range(1,5000) default:"1"
Ps int `json:"ps"` //Range(1,1000) default:"10"
}
// QueryBodyWhere .
type QueryBodyWhere struct {
EQ map[string]interface{} `json:"eq"` //可能是数据或字符,[12,333,67] ["asd", "你好"]
Or map[string]interface{} `json:"or"` //暂时不支持minimum should
In map[string][]interface{} `json:"in"` //TODO改造为slice
Range map[string]string `json:"range"` //[10,20) (2018-05-10 00:00:00,2018-05-31 00:00:00] (,30]
Like []QueryBodyWhereLike `json:"like"`
Enhanced []QueryBodyWhereEnhanced `json:"enhanced"` //包含GourpBy Collapse
Combo []QueryBodyWhereCombo `json:"combo"` //混合与或
Not map[string]map[string]bool `json:"not"` //对eq、in、range条件取反
}
// QueryBodyWhereLike .
type QueryBodyWhereLike struct {
KWFields []string `json:"kw_fields"`
KW []string `json:"kw"` //将kw的值使用空白间隔给query
Or bool `json:"or"` //default:"false"
Level string `json:"level"` //默认default
}
// QueryBodyWhereEnhanced .
type QueryBodyWhereEnhanced struct {
Mode string `json:"mode"`
Field string `json:"field"`
Order []map[string]string `json:"order"`
Size int `json:"size"` //todosdk增加子集返回数
// more conditions...
}
// QueryBodyWhereCombo .
type QueryBodyWhereCombo struct {
EQ []map[string]interface{} `json:"eq"`
In []map[string][]interface{} `json:"in"`
Range []map[string]string `json:"range"`
NotEQ []map[string]interface{} `json:"not_eq"`
NotIn []map[string][]interface{} `json:"not_in"`
NotRange []map[string]string `json:"not_range"`
Min struct {
EQ int `json:"eq"`
In int `json:"in"`
Range int `json:"range"`
NotEQ int `json:"not_eq"`
NotIn int `json:"not_in"`
NotRange int `json:"not_range"`
Min int `json:"min"`
} `json:"min"`
}
// QueryConfDetail .
type QueryConfDetail struct {
ESCluster string
IndexPrefix string
IndexType string
IndexID string
IndexMapping string
MaxIndicesNum int
QueryMode int //1:默认完全走查询体 2:基于查询体的定制 3:nested查询
MaxPageSize int //最大page size
}
// QueryResult query result.
type QueryResult struct {
Order string `json:"order"`
Sort string `json:"sort"`
Result json.RawMessage `json:"result"`
Debug *QueryDebugResult `json:"debug"`
Page *Page `json:"page"`
}
// QueryDebugResult query result.
type QueryDebugResult struct {
ErrMsg []string `json:"err_msg"`
QueryBody string `json:"query_body"`
DSL string `json:"dsl"`
Mapping map[string]interface{} `json:"mapping"`
Profile *elastic.SearchProfile `json:"profile"` //性能分析
}
// AddErrMsg .
func (qdr *QueryDebugResult) AddErrMsg(msg ...string) {
qdr.ErrMsg = append(qdr.ErrMsg, msg...)
}
// UpsertResult upsert result.
type UpsertResult struct {
}
var (
// QueryModeBasic completely using basic query & nested .
QueryModeBasic = 1
// QueryModeExtra write some extra conditions under basic query .
QueryModeExtra = 2
// EnhancedModeGroupBy group by .
EnhancedModeGroupBy = "group_by"
// EnhancedModeSum sum from a filed .
EnhancedModeSum = "sum"
// EnhancedModeCollapse collapse .
EnhancedModeCollapse = "collapse"
// EnhancedModeDistinct distinct .
EnhancedModeDistinct = "distinct"
// EnhancedModeDistinctCount distinct .
EnhancedModeDistinctCount = "distinct_count"
// EnhancedModeGroupBySum group by sum .
EnhancedModeGroupBySum = "group_by_sum"
// EnhancedModeGroupByTop top hits .
EnhancedModeGroupByTop = "group_by_tophits"
// LikeLevelHigh high level .
LikeLevelHigh = "high"
// LikeLevelMiddel middle level .
LikeLevelMiddel = "middle"
// LikeLevelLow low level .
LikeLevelLow = "low"
// QueryConf 自定义部分
QueryConf = map[string]*QueryConfDetail{
"archive_video_score": {ESCluster: "ssd_archive", IndexPrefix: "archive_video", MaxIndicesNum: 1, QueryMode: QueryModeExtra},
"archive_score": {ESCluster: "ssd_archive", IndexPrefix: "archive", MaxIndicesNum: 1, QueryMode: QueryModeExtra},
"task_qa_random": {ESCluster: "internalPublic", IndexPrefix: "task_qa", MaxIndicesNum: 1, QueryMode: QueryModeExtra},
"esports_contests_date": {ESCluster: "pcie_pub_out01", IndexPrefix: "esports_contests_map", MaxIndicesNum: 1, QueryMode: QueryModeExtra},
"creative_archive_search": {ESCluster: "pcie_pub_out01", IndexPrefix: "creative_archive", MaxIndicesNum: 1, QueryMode: QueryModeExtra},
"creative_archive_staff": {ESCluster: "pcie_pub_out02", IndexPrefix: "creative_archive", IndexID: "%d,id", IndexType: "base", MaxIndicesNum: 1, QueryMode: QueryModeExtra},
"creative_archive_apply": {ESCluster: "pcie_pub_out02", IndexPrefix: "creative_archive", MaxIndicesNum: 1, QueryMode: QueryModeExtra},
"dm_history": {ESCluster: "dmout", IndexPrefix: "dm_search", MaxIndicesNum: 1, QueryMode: QueryModeExtra},
// "pgc_contract_info": {ESCluster: "pcie_pub_out01", IndexPrefix: "pgc_contract_info", MaxIndicesNum: 1, QueryMode: QueryModeNested},
// "pgc_contract_video": {ESCluster: "pcie_pub_out01", IndexPrefix: "pgc_contract_video", MaxIndicesNum: 1, QueryMode: QueryModeNested},
}
// PermConf 权限业务
PermConf = map[string]map[string]string{
"star": {"ops_log_billions": "true"}, // 业务使用*批量获取索引
"scroll": {"dm_search": "true"}, // 业务使用scroll
"oht": {"creative_reply": "true", "creative_reply_isreport": "true", "esports": "true"}, // 业务max_result_window 100k
"es_cache": {"comics_firebird": "true", "pgc_media": "true", "pgc_season": "true"}, // request cache(失效时间和索引的refresh_interval一致)
//"routing": {"creative_reply": "o_mid"},
}
)

View File

@@ -0,0 +1 @@
package model

View File

@@ -0,0 +1,9 @@
package model
// UNameParams search params.
type UNameParams struct {
Bsp *BasicSearchParams
MIds []int64 `form:"mids,split" params:"mids"`
Sex int64 `form:"sex" params:"sex" default:"-1"`
Ranks []int64 `form:"ranks,split" params:"ranks"`
}

View File

@@ -0,0 +1,17 @@
package model
// UpsertParams .
type UpsertParams struct {
Business string `form:"business" validate:"required"`
DataStr string `form:"data" validate:"required"`
Insert bool `form:"insert" default:"false"`
UpsertBody []UpsertBody
}
// UpsertBody job的bulk优化参考这个模板 .
type UpsertBody struct {
IndexName string
IndexType string
IndexID string
Doc MapData
}

View File

@@ -0,0 +1,64 @@
load(
"@io_bazel_rules_go//go:def.bzl",
"go_test",
"go_library",
)
package(default_visibility = ["//visibility:public"])
filegroup(
name = "package-srcs",
srcs = glob(["**"]),
tags = ["automanaged"],
visibility = ["//visibility:private"],
)
filegroup(
name = "all-srcs",
srcs = [":package-srcs"],
tags = ["automanaged"],
visibility = ["//visibility:public"],
)
go_test(
name = "go_default_test",
srcs = [
"archive_test.go",
"log_test.go",
"mng_v2_test.go",
"service_test.go",
"update_test.go",
],
embed = [":go_default_library"],
rundir = ".",
tags = ["automanaged"],
deps = [
"//app/admin/main/search/conf:go_default_library",
"//app/admin/main/search/model:go_default_library",
"//vendor/github.com/smartystreets/goconvey/convey:go_default_library",
],
)
go_library(
name = "go_default_library",
srcs = [
"archive.go",
"log.go",
"mng.go",
"mng_v2.go",
"query.go",
"service.go",
"update.go",
"upsert.go",
],
importpath = "go-common/app/admin/main/search/service",
tags = ["automanaged"],
deps = [
"//app/admin/main/search/conf:go_default_library",
"//app/admin/main/search/dao:go_default_library",
"//app/admin/main/search/model:go_default_library",
"//library/ecode:go_default_library",
"//library/log:go_default_library",
"//library/sync/errgroup:go_default_library",
],
)

View File

@@ -0,0 +1,46 @@
package service
import (
"context"
"fmt"
"go-common/app/admin/main/search/dao"
"go-common/app/admin/main/search/model"
"go-common/library/ecode"
)
// ArchiveCheck gets archive check.
func (s *Service) ArchiveCheck(c context.Context, sp *model.ArchiveCheckParams) (res *model.SearchResult, err error) {
if res, err = s.dao.ArchiveCheck(c, sp); err != nil {
dao.PromError(fmt.Sprintf("es:%s 搜索archivecheck失败", sp.Bsp.AppID), "s.dao.SearchArchiveCheck(%v) error(%v) ", sp, err)
err = ecode.SearchArchiveCheckFailed
}
return
}
// Video gets video relation.
func (s *Service) Video(c context.Context, sp *model.VideoParams) (res *model.SearchResult, err error) {
if res, err = s.dao.Video(c, sp); err != nil {
dao.PromError(fmt.Sprintf("es:%s 搜索video失败", sp.Bsp.AppID), "s.dao.Video(%v) error(%v) ", sp, err)
err = ecode.SearchVideoFailed
}
return
}
// TaskQa .
func (s *Service) TaskQa(c context.Context, sp *model.TaskQa) (res *model.SearchResult, err error) {
if res, err = s.dao.TaskQa(c, sp); err != nil {
dao.PromError(fmt.Sprintf("es:%s 搜索TaskQa失败", sp.Bsp.AppID), "s.dao.TaskQa(%v) error(%v) ", sp, err)
err = ecode.SearchVideoFailed
}
return
}
// ArchiveCommerce .
func (s *Service) ArchiveCommerce(c context.Context, sp *model.ArchiveCommerce) (res *model.SearchResult, err error) {
if res, err = s.dao.ArchiveCommerce(c, sp); err != nil {
dao.PromError(fmt.Sprintf("es:%s 搜索ArchiveCommerce失败", sp.Bsp.AppID), "s.dao.TaskQa(%v) error(%v) ", sp, err)
err = ecode.SearchVideoFailed
}
return
}

View File

@@ -0,0 +1 @@
package service

View File

@@ -0,0 +1,229 @@
package service
import (
"bytes"
"context"
"encoding/json"
"fmt"
"strings"
"go-common/app/admin/main/search/dao"
"go-common/app/admin/main/search/model"
"go-common/library/ecode"
"go-common/library/log"
)
func convert(params map[string][]string) (res map[string][]interface{}) {
res = make(map[string][]interface{})
for k, v := range params {
var arr []interface{}
for _, m := range v {
if m != "" {
for _, u := range strings.Split(m, ",") {
arr = append(arr, u)
}
}
}
if len(arr) > 0 {
res[k] = arr
}
}
return res
}
// Check .
func (s *Service) Check(appID string, businessID int) (business *model.Business, ok bool) {
business, ok = s.dao.GetLogInfo(appID, businessID)
return
}
func numberToInt64(in map[string]interface{}) (out map[string]interface{}) {
var err error
out = map[string]interface{}{}
for k, v := range in {
if integer, ok := v.(json.Number); ok {
if out[k], err = integer.Int64(); err != nil {
log.Error("service.log.numberToInt64(%v)(%v)", integer, err)
}
} else {
out[k] = v
}
}
return
}
// 获取部门
func (s *Service) uDepTs(c context.Context, res *model.SearchResult, sp *model.LogParams) *model.SearchResult {
var (
uids []string
result []map[string]interface{}
err error
)
result = []map[string]interface{}{}
for _, j := range res.Result {
item := map[string]interface{}{}
decoder := json.NewDecoder(bytes.NewReader(j))
decoder.UseNumber()
if err = decoder.Decode(&item); err != nil {
dao.PromError(fmt.Sprintf("es:%s 搜索LogAudit JSON失败", sp.Bsp.AppID), "s.dao.LogAudit(%v) json error(%v)", sp, err)
}
item = numberToInt64(item)
result = append(result, item)
if _, ok := item["uid"]; ok {
uids = append(uids, fmt.Sprintf("%v", item["uid"]))
}
}
var depRs = &model.UDepTsData{
Data: map[string]string{},
}
if len(uids) > 0 {
if depRs, err = s.dao.UDepTs(c, uids); err != nil {
dao.PromError(fmt.Sprintf("es:%s 搜索LogAudit失败", sp.Bsp.AppID), "s.dao.LogAudit(%v) error(%v)", sp, err)
err = ecode.SearchLogAuditFailed
return res
}
}
for i, j := range result {
result[i]["department"] = ""
if _, ok := j["uid"]; ok {
if m, sok := depRs.Data[fmt.Sprintf("%v", j["uid"])]; sok {
result[i]["department"] = m
}
}
if res.Result[i], err = json.Marshal(j); err != nil {
log.Error("s.dao.LogAudit(%v) json res(%v)", err, j)
}
}
return res
}
// 获取IP地址
func (s *Service) IP(c context.Context, res *model.SearchResult, sp *model.LogParams) *model.SearchResult {
var (
ip []string
result []map[string]interface{}
err error
)
result = []map[string]interface{}{}
for _, j := range res.Result {
item := map[string]interface{}{}
decoder := json.NewDecoder(bytes.NewReader(j))
decoder.UseNumber()
if err = decoder.Decode(&item); err != nil {
dao.PromError(fmt.Sprintf("es:%s 搜索LogUserAction JSON失败", sp.Bsp.AppID), "s.dao.LogUserAction(%v) json error(%v)", sp, err)
}
item = numberToInt64(item)
result = append(result, item)
if _, ok := item["ip"]; ok {
if v := fmt.Sprintf("%v", item["ip"]); v != "" {
ip = append(ip, v)
}
}
}
var ipData = &model.IPData{
Data: map[string]struct {
Country string `json:"country"`
Province string `json:"province"`
City string `json:"city"`
Isp string `json:"isp"`
}{},
}
if len(ip) > 0 {
if ipData, err = s.dao.IP(c, ip); err != nil {
dao.PromError(fmt.Sprintf("es:%s 搜索LogUserAction失败", sp.Bsp.AppID), "s.dao.LogUserAction(%v) error(%v)", sp, err)
err = ecode.SearchLogAuditFailed
return res
}
}
for i, j := range result {
result[i]["location"] = ""
if _, ok := j["ip"]; ok {
if m, sok := ipData.Data[fmt.Sprintf("%v", j["ip"])]; sok {
location := make([]string, 0, 4)
for _, v := range []string{m.Country, m.Province, m.City, m.Isp} {
if v != "" {
location = append(location, v)
}
}
result[i]["location"] = strings.Join(location, "-")
}
}
if res.Result[i], err = json.Marshal(j); err != nil {
log.Error("s.dao.LogUserAction(%v) json res(%v)", err, j)
}
}
return res
}
// LogAudit .
func (s *Service) LogAudit(c context.Context, params map[string][]string, sp *model.LogParams, business *model.Business) (res *model.SearchResult, err error) {
p := convert(params)
if res, err = s.dao.LogAudit(c, p, sp, business); err != nil {
dao.PromError(fmt.Sprintf("es:%s 搜索LogAudit失败", sp.Bsp.AppID), "s.dao.LogAudit(%v) error(%v)", sp, err)
err = ecode.SearchLogAuditFailed
return
}
res = s.uDepTs(c, res, sp)
return
}
// LogAuditGroupBy .
func (s *Service) LogAuditGroupBy(c context.Context, params map[string][]string, sp *model.LogParams, business *model.Business) (res *model.SearchResult, err error) {
p := convert(params)
if v, ok := p["group"]; !ok || len(v) == 0 {
err = ecode.RequestErr
return
}
if res, err = s.dao.LogAuditGroupBy(c, p, sp, business); err != nil {
dao.PromError(fmt.Sprintf("es:%s 搜索LogAuditGroupBy失败", sp.Bsp.AppID), "s.dao.LogAuditGroupBy(%v) error(%v)", sp, err)
err = ecode.SearchLogAuditOidFailed
return
}
if res.Page.Total < int64(res.Page.Ps*(res.Page.Pn-1)) || len(res.Result) == 0 {
res.Result = []json.RawMessage{}
} else if int64(res.Page.Ps*(res.Page.Pn-1)) <= res.Page.Total && res.Page.Total <= int64(res.Page.Ps*res.Page.Pn) {
res.Result = res.Result[res.Page.Ps*(res.Page.Pn-1) : res.Page.Total]
} else {
res.Result = res.Result[res.Page.Ps*(res.Page.Pn-1) : res.Page.Ps*res.Page.Pn]
}
res = s.uDepTs(c, res, sp)
return
}
// LogAuditDelete .
func (s *Service) LogAuditDelete(c context.Context, params map[string][]string, sp *model.LogParams, business *model.Business) (res *model.SearchResult, err error) {
p := convert(params)
if res, err = s.dao.LogAuditDelete(c, p, sp, business); err != nil {
dao.PromError(fmt.Sprintf("es:%s LogAuditDelete失败", sp.Bsp.AppID), "s.dao.LogAuditDelete(%v) error(%v)", sp, err)
err = ecode.SearchLogAuditFailed
return
}
return
}
// LogUserAction .
func (s *Service) LogUserAction(c context.Context, params map[string][]string, sp *model.LogParams, business *model.Business) (res *model.SearchResult, err error) {
p := convert(params)
if res, err = s.dao.LogUserAction(c, p, sp, business); err != nil {
dao.PromError(fmt.Sprintf("es:%s 搜索LogUserAction失败", sp.Bsp.AppID), "s.dao.LogUserAction(%v) error(%v)", sp, err)
err = ecode.SearchLogUserActionFailed
return
}
res = s.IP(c, res, sp)
return
}
// LogUserActionDelete .
func (s *Service) LogUserActionDelete(c context.Context, params map[string][]string, sp *model.LogParams, business *model.Business) (res *model.SearchResult, err error) {
p := convert(params)
if res, err = s.dao.LogUserActionDelete(c, p, sp, business); err != nil {
dao.PromError(fmt.Sprintf("es:%s LogUserActionDelete失败", sp.Bsp.AppID), "s.dao.LogUserActionDelete(%v) error(%v)", sp, err)
err = ecode.SearchLogAuditFailed
return
}
return
}
func (s *Service) LogCount(c context.Context, name string, business int, uid interface{}) {
s.dao.LogCount(c, name, business, uid)
}

View File

@@ -0,0 +1,78 @@
package service
import (
"context"
"testing"
"go-common/app/admin/main/search/model"
. "github.com/smartystreets/goconvey/convey"
)
func Test_LogAudit(t *testing.T) {
var (
err error
c = context.Background()
p = &model.LogParams{
Bsp: &model.BasicSearchParams{
AppID: "log_audit",
},
}
params map[string][]string
)
Convey("LogAudit", t, WithService(func(s *Service) {
business, ok := svr.Check("log_audit", 0)
if !ok {
return
}
_, err = s.LogAudit(c, params, p, business)
So(err, ShouldBeNil)
}))
}
//func Test_LogAuditGroupBy(t *testing.T) {
// var (
// err error
// c = context.Background()
// p = &model.LogParams{
// Bsp: &model.BasicSearchParams{
// AppID: "log_audit_group",
// },
// }
// params map[string][]string
// )
// params = map[string][]string{
// "group": {"oid"},
// }
// Convey("LogAuditGroupBy", t, WithService(func(s *Service) {
// indexMapping, indexFmt, ok := svr.Check("log_audit", p.Business)
// if !ok {
// return
// }
// _, err = s.LogAuditGroupBy(c, params, p, indexMapping, indexFmt)
// Printf("---------%v", err)
// So(err, ShouldBeNil)
// }))
//}
func Test_LogUserAction(t *testing.T) {
var (
err error
c = context.Background()
p = &model.LogParams{
Bsp: &model.BasicSearchParams{
AppID: "log_user_action",
},
}
params map[string][]string
)
Convey("LogUserAction", t, WithService(func(s *Service) {
business, ok := svr.Check("log_user_action", 0)
if !ok {
return
}
_, err = s.LogUserAction(c, params, p, business)
So(err, ShouldBeNil)
}))
}

View File

@@ -0,0 +1,307 @@
package service
import (
"context"
"encoding/json"
"strconv"
"go-common/app/admin/main/search/model"
"go-common/library/ecode"
"go-common/library/sync/errgroup"
)
// BusinessList .
func (s *Service) BusinessList(ctx context.Context, name string, pn, ps int) (list []*model.MngBusiness, total int64, err error) {
offset := (pn - 1) * ps
if list, err = s.dao.BusinessList(ctx, name, offset, ps); err != nil {
return
}
total, err = s.dao.BusinessTotal(ctx, name)
return
}
// BusinessAll .
func (s *Service) BusinessAll(ctx context.Context) (list []*model.MngBusiness, err error) {
list, err = s.dao.BusinessAll(ctx)
return
}
// BusinessInfo .
func (s *Service) BusinessInfo(ctx context.Context, id int64) (info *model.MngBusiness, err error) {
info, err = s.dao.BusinessInfo(ctx, id)
return
}
// AddBusiness .
func (s *Service) AddBusiness(ctx context.Context, b *model.MngBusiness) (id int64, err error) {
info, err := s.dao.BusinessInfoByName(ctx, b.Name)
if err != nil {
return
}
if info != nil {
err = ecode.SearchBusinessExistErr
return
}
id, err = s.dao.AddBusiness(ctx, b)
return
}
// UpdateBusiness .
func (s *Service) UpdateBusiness(ctx context.Context, b *model.MngBusiness) (err error) {
err = s.dao.UpdateBusiness(ctx, b)
return
}
// UpdateBusinessApp .
func (s *Service) UpdateBusinessApp(ctx context.Context, business, app, incrWay string, isJob, incrOpen bool) (err error) {
info, err := s.dao.BusinessInfoByName(ctx, business)
if err != nil {
return
}
var exist bool
for k, v := range info.Apps {
if v.AppID == app {
exist = true
if !isJob {
info.Apps = append(info.Apps[:k], info.Apps[k+1:]...)
break
}
v.IncrWay = incrWay
v.IncrOpen = incrOpen
}
}
if !exist {
info.Apps = append(info.Apps, &model.MngBusinessApp{AppID: app, IncrWay: incrWay, IncrOpen: incrOpen})
}
bs, err := json.Marshal(info.Apps)
if err != nil {
return
}
info.AppsJSON = string(bs)
err = s.dao.UpdateBusiness(ctx, info)
return
}
// AssetList .
func (s *Service) AssetList(ctx context.Context, typ int, name string, pn, ps int) (list []*model.MngAsset, total int64, err error) {
offset := (pn - 1) * ps
if list, err = s.dao.AssetList(ctx, typ, name, offset, ps); err != nil {
return
}
total, err = s.dao.AssetTotal(ctx, typ, name)
return
}
// AssetAll .
func (s *Service) AssetAll(ctx context.Context) (list []*model.MngAsset, err error) {
list, err = s.dao.AssetAll(ctx)
return
}
// AssetInfo .
func (s *Service) AssetInfo(ctx context.Context, id int64) (info *model.MngAsset, err error) {
info, err = s.dao.AssetInfo(ctx, id)
return
}
// AddAsset .
func (s *Service) AddAsset(ctx context.Context, a *model.MngAsset) (id int64, err error) {
info, err := s.dao.AssetInfoByName(ctx, a.Name)
if err != nil {
return
}
if info != nil {
err = ecode.SearchAssetExistErr
return
}
id, err = s.dao.AddAsset(ctx, a)
return
}
// UpdateAsset .
func (s *Service) UpdateAsset(ctx context.Context, a *model.MngAsset) (err error) {
if err = s.dao.UpdateAsset(ctx, a); err != nil {
return
}
if a.Type == model.MngAssetTypeDatabus {
if a.Config == "" {
return
}
v := new(model.MngAssetDatabus)
if err = json.Unmarshal([]byte(a.Config), &v); err != nil {
return
}
err = s.dao.UpdateAppAssetDatabus(ctx, a.Name, v)
return
}
if a.Type == model.MngAssetTypeTable {
if a.Config == "" {
return
}
v := new(model.MngAssetTable)
if err = json.Unmarshal([]byte(a.Config), &v); err != nil {
return
}
err = s.dao.UpdateAppAssetTable(ctx, a.Name, v)
return
}
return
}
// AppList .
func (s *Service) AppList(ctx context.Context, business string) (list []*model.MngApp, err error) {
list, err = s.dao.AppList(ctx, business)
return
}
// AppInfo .
func (s *Service) AppInfo(ctx context.Context, id int64) (info *model.MngApp, err error) {
info, err = s.dao.AppInfo(ctx, id)
return
}
// AddApp .
func (s *Service) AddApp(ctx context.Context, a *model.MngApp) (id int64, err error) {
info, err := s.dao.AppInfoByAppid(ctx, a.AppID)
if err != nil {
return
}
if info != nil {
err = ecode.SearchAssetExistErr
return
}
id, err = s.dao.AddApp(ctx, a)
return
}
// UpdateApp .
func (s *Service) UpdateApp(ctx context.Context, a *model.MngApp) (err error) {
group := errgroup.Group{}
group.Go(func() error {
if a.TableName == "" {
a.TableFormat = ""
a.TablePrefix = ""
return nil
}
tb, e := s.dao.AssetInfoByName(ctx, a.TableName)
if e != nil {
return e
}
if tb == nil || tb.Config == "" {
return nil
}
val := new(model.MngAssetTable)
if e := json.Unmarshal([]byte(tb.Config), val); e != nil {
return e
}
a.TablePrefix = val.TablePrefix
a.TableFormat = val.TableFormat
return nil
})
group.Go(func() error {
if a.DatabusName == "" {
a.DatabusInfo = ""
a.DatabusIndexID = ""
return nil
}
dbus, e := s.dao.AssetInfoByName(ctx, a.DatabusName)
if e != nil {
return e
}
if dbus == nil || dbus.Config == "" {
return nil
}
val := new(model.MngAssetDatabus)
if e := json.Unmarshal([]byte(dbus.Config), val); e != nil {
return e
}
a.DatabusInfo = val.DatabusInfo
a.DatabusIndexID = val.DatabusIndexID
return nil
})
if err = group.Wait(); err != nil {
return
}
err = s.dao.UpdateApp(ctx, a)
return
}
// MngCountList .
func (s *Service) MngCountList(ctx context.Context) (list []*model.MngCount, err error) {
daily := "每日增量"
sum := "历史总量"
list = []*model.MngCount{
// 业务方
{Business: "业务方", Type: sum, Name: "业务方历史总量", Chart: "line", Param: "business=app&type=all"},
{Business: "业务方", Type: daily, Name: "业务方每日增量", Chart: "line", Param: "business=app&type=inc"},
// 视频+稿件
{Business: "视频稿件", Type: daily, Name: "archive每日增量", Chart: "line", Param: "business=archive&type=inc"},
{Business: "视频稿件", Type: daily, Name: "video每日增量", Chart: "line", Param: "business=archive_video&type=inc"},
{Business: "视频稿件", Type: sum, Name: "archive历史总量", Chart: "line", Param: "business=archive&type=all"},
{Business: "视频稿件", Type: sum, Name: "video历史总量", Chart: "line", Param: "business=archive_video&type=all"},
// 弹幕
{Business: "弹幕", Type: daily, Name: "弹幕每日增量", Chart: "line", Param: "business=dm&type=inc"},
{Business: "弹幕", Type: daily, Name: "弹幕举报每日增量", Chart: "line", Param: "business=dm_report&type=inc"},
{Business: "弹幕", Type: daily, Name: "弹幕监控每日增量", Chart: "line", Param: "business=dm_monitor&type=inc"},
{Business: "弹幕", Type: sum, Name: "弹幕历史总量", Chart: "line", Param: "business=dm&type=all"},
{Business: "弹幕", Type: sum, Name: "弹幕举报历史总量", Chart: "line", Param: "business=dm_report&type=all"},
{Business: "弹幕", Type: sum, Name: "弹幕监控历史总量", Chart: "line", Param: "business=dm_monitor&type=all"},
// 评论
{Business: "评论", Type: daily, Name: "评论每日增量", Chart: "line", Param: "business=reply&type=inc"},
// 日志
{Business: "日志", Type: "审核日志", Name: "审核日志每日查询量", Chart: "line", Param: "business=log_audit_access&type=inc"},
{Business: "日志", Type: "审核日志", Name: "审核日志昨日查询情况 - 业务维度", Chart: "pie", Param: "business=log_audit_business&type=inc"},
{Business: "日志", Type: "审核日志", Name: "审核日志昨日查询情况 - 用户维度", Chart: "pie", Param: "business=log_audit_uid&type=inc"},
{Business: "日志", Type: "用户日志", Name: "用户日志每日查询量", Chart: "line", Param: "business=log_user_action_access&type=inc"},
{Business: "日志", Type: "用户日志", Name: "用户日志昨日查询情况 - 业务维度", Chart: "pie", Param: "business=log_user_action_business&type=inc"},
{Business: "日志", Type: "用户日志", Name: "用户日志昨日查询情况 - 用户维度", Chart: "pie", Param: "business=log_user_action_uid&type=inc"},
// 用户
{Business: "用户", Type: sum, Name: "用户历史总量", Chart: "line", Param: "business=user&type=all"},
// 专栏
{Business: "专栏", Type: daily, Name: "专栏每日增量", Chart: "line", Param: "business=article&type=inc"},
{Business: "专栏", Type: sum, Name: "专栏历史总量", Chart: "line", Param: "business=article&type=all"},
}
return list, err
}
// MngCount .
func (s *Service) MngCount(ctx context.Context, c *model.MngCount) (list []*model.MngCountRes, err error) {
list, err = s.dao.MngCount(ctx, c)
return
}
// MngCount .
func (s *Service) MngPercent(ctx context.Context, c *model.MngCount) (list []*model.MngPercentRes, err error) {
list, err = s.dao.MngPercent(ctx, c)
switch c.Business {
case "log_audit_business":
for k, v := range list {
if id, e := strconv.Atoi(v.Name); e == nil {
if t, ok := s.dao.GetLogInfo("log_audit", id); ok {
list[k].Name = t.Name
}
}
}
case "log_user_action_business":
for k, v := range list {
if id, e := strconv.Atoi(v.Name); e == nil {
if t, ok := s.dao.GetLogInfo("log_user_action", id); ok {
list[k].Name = t.Name
}
}
}
case "log_audit_uid", "log_user_action_uid":
uid := []string{}
for _, v := range list {
uid = append(uid, v.Name)
}
if data, err := s.dao.Unames(ctx, uid); err == nil {
for k, v := range list {
if t, ok := data.Data[v.Name]; ok {
list[k].Name = t
}
}
}
}
return
}

View File

@@ -0,0 +1,199 @@
package service
import (
"context"
"database/sql"
"fmt"
"go-common/app/admin/main/search/model"
"go-common/library/ecode"
)
// BusinessAllV2 .
func (s *Service) BusinessAllV2(c context.Context) (list []*model.GFBusiness, err error) {
return s.dao.BusinessAllV2(c)
}
// BusinessInfoV2 .
func (s *Service) BusinessInfoV2(c context.Context, name string) (info *model.GFBusiness, err error) {
return s.dao.BusinessInfoV2(c, name)
}
// BusinessAdd .
func (s *Service) BusinessAdd(c context.Context, pid int64, name, description string) (id int64, err error) {
return s.dao.BusinessIns(c, pid, name, description)
}
// BusinessUpdate .
func (s *Service) BusinessUpdate(c context.Context, name, filed, value string) (id int64, err error) {
allowFields := []string{"data_conf", "index_conf", "business_conf", "description", "state"}
var allow bool
for _, v := range allowFields {
if v == filed {
allow = true
}
}
if !allow {
err = ecode.AccessDenied
return
}
return s.dao.BusinessUpdate(c, name, filed, value)
}
// AssetDBTables .
func (s *Service) AssetDBTables(c context.Context) (list []*model.GFAsset, err error) {
return s.dao.AssetDBTables(c)
}
// AssetDBConnect .
func (s *Service) AssetDBConnect(c context.Context, host, port, user, password string) (dbNames []string, err error) {
db, err := sql.Open("mysql", fmt.Sprintf("%s:%s@tcp(%s:%s)/?charset=utf8mb4,utf8", user, password, host, port))
if err != nil {
return
}
defer db.Close()
rows, err := db.Query("show databases")
if err != nil {
return
}
defer rows.Close()
dbNames = make([]string, 0)
for rows.Next() {
var name string
if err = rows.Scan(&name); err != nil {
return
}
dbNames = append(dbNames, name)
}
return dbNames, rows.Err()
}
// AssetDBAdd .
func (s *Service) AssetDBAdd(c context.Context, name, description, host, port, user, password string) (id int64, err error) {
dbnames, err := s.AssetDBConnect(c, host, port, user, password)
if err != nil {
return
}
var dbExist bool
for _, v := range dbnames {
if v == name {
dbExist = true
}
}
if !dbExist {
err = ecode.AccessDenied
return
}
dsn := fmt.Sprintf(model.DBDsnFormat, user, password, host, port, name)
return s.dao.AssetDBIns(c, name, description, dsn)
}
// AssetTableAdd .
func (s *Service) AssetTableAdd(c context.Context, db, regex, fields, description string) (id int64, err error) {
name := db + "." + regex
return s.dao.AssetTableIns(c, name, db, regex, fields, description)
}
// UpdateAssetTable .
func (s *Service) UpdateAssetTable(c context.Context, name, fields string) (id int64, err error) {
return s.dao.UpdateAssetTable(c, name, fields)
}
// AssetInfoV2 .
func (s *Service) AssetInfoV2(c context.Context, name string) (info *model.GFAsset, err error) {
return s.dao.Asset(c, name)
}
// AssetShowTables .
func (s *Service) AssetShowTables(c context.Context, dbName string) (tables []string, err error) {
asset, err := s.dao.Asset(c, dbName)
if err != nil {
return
}
db, err := sql.Open("mysql", asset.DSN)
if err != nil {
return
}
defer db.Close()
rows, err := db.Query("show tables")
if err != nil {
return
}
defer rows.Close()
tables = make([]string, 0)
for rows.Next() {
var name string
if err = rows.Scan(&name); err != nil {
return
}
tables = append(tables, name)
}
return tables, rows.Err()
}
// AssetTableFields .
func (s *Service) AssetTableFields(c context.Context, dbName, regex string) (fs []*model.TableField, count int, err error) {
asset, err := s.dao.Asset(c, dbName)
if err != nil {
return
}
db, err := sql.Open("mysql", asset.DSN)
if err != nil {
return
}
defer db.Close()
regex = fmt.Sprintf("^%s$", regex)
rows, err := db.Query("SELECT COLUMN_NAME,DATA_TYPE,count(1) FROM information_schema.COLUMNS WHERE table_name REGEXP ? GROUP BY COLUMN_NAME,DATA_TYPE", regex)
if err != nil {
return
}
defer rows.Close()
fs = make([]*model.TableField, 0)
for rows.Next() {
f := new(model.TableField)
if err = rows.Scan(&f.Name, &f.Type, &f.Count); err != nil {
return nil, 0, err
}
fs = append(fs, f)
}
if err = rows.Err(); err != nil {
return
}
if len(fs) == 0 {
err = ecode.NothingFound
return
}
for _, f := range fs {
if fs[0].Count != f.Count {
err = ecode.NothingFound
return
}
count = f.Count
}
row := db.QueryRow("SELECT COLUMN_NAME FROM information_schema.KEY_COLUMN_USAGE WHERE table_name REGEXP ? AND CONSTRAINT_NAME='PRIMARY' GROUP BY CONSTRAINT_NAME LIMIT 1", regex)
var primaryCo string
err = row.Scan(&primaryCo)
for k, v := range fs {
if v.Name == primaryCo {
fs[k].Primary = true
}
}
return fs, count, err
}
// ClusterOwners .
func (s *Service) ClusterOwners() map[string]string {
clusters := s.c.Es
res := make(map[string]string)
res["default"] = "guanhuaxin,daizhichen,libingqi,zhapuyu"
for name, es := range clusters {
if es.Owner == "" {
continue
}
if es.Cluster != "" {
name = es.Cluster
}
res[name] = es.Owner
}
return res
}

View File

@@ -0,0 +1,71 @@
package service
import (
"context"
"testing"
. "github.com/smartystreets/goconvey/convey"
)
func Test_AssetDBConnect(t *testing.T) {
var (
c = context.Background()
host = "172.16.33.205"
port = "3308"
user = "test"
password = "test"
)
Convey("AssetDBConnect", t, WithService(func(s *Service) {
res, err := svr.AssetDBConnect(c, host, port, user, password)
So(err, ShouldBeNil)
So(res, ShouldNotBeNil)
}))
}
func Test_AssetDBAdd(t *testing.T) {
var (
c = context.Background()
name = "bilibili_reply"
description = "评论数据库"
host = "172.16.33.205"
port = "3308"
user = "test"
password = "test"
)
Convey("AssetDBAdd", t, WithService(func(s *Service) {
res, err := svr.AssetDBAdd(c, name, description, host, port, user, password)
So(err, ShouldBeNil)
So(res, ShouldNotBeNil)
}))
}
func Test_AssetTableFields(t *testing.T) {
var (
c = context.Background()
db = "bilibili_reply"
regex = "reply_([0-9]{1,3})"
)
Convey("AssetTableFields", t, WithService(func(s *Service) {
res, _, err := svr.AssetTableFields(c, db, regex)
So(err, ShouldBeNil)
So(res, ShouldNotBeNil)
}))
}
func Test_BusinessUpdate(t *testing.T) {
var (
c = context.Background()
name = "dm"
field = "description"
value = "弹幕11"
)
Convey("BusinessUpdate", t, WithService(func(s *Service) {
res, err := svr.BusinessUpdate(c, name, field, value)
So(err, ShouldBeNil)
So(res, ShouldNotBeNil)
}))
}

View File

@@ -0,0 +1,146 @@
package service
import (
"context"
"fmt"
"strconv"
"strings"
"time"
"go-common/app/admin/main/search/dao"
"go-common/app/admin/main/search/model"
"go-common/library/log"
)
func (s *Service) loadQueryConfproc() {
for {
if err := s.loadQueryConf(); err != nil {
time.Sleep(time.Second)
continue
}
time.Sleep(time.Minute)
}
}
func (s *Service) loadQueryConf() (err error) {
confs, err := s.dao.QueryConf(context.Background())
if err != nil {
return
}
if len(confs) > 0 {
s.queryConf = confs
}
return
}
// CheckQueryConf check query conf
func (s *Service) CheckQueryConf(c context.Context, sp *model.QueryParams) (err error) {
app, ok := s.queryConf[sp.Business]
if app2, ok2 := model.QueryConf[sp.Business]; ok2 {
app = app2
ok = true
}
if !ok {
err = fmt.Errorf("sp.Business(%s) not exist in queryConf", sp.Business)
return
}
if app.ESCluster == "" {
err = fmt.Errorf("app(%+v) escluster is empty", app)
return
}
max := 1
if app.MaxIndicesNum > 0 {
max = app.MaxIndicesNum
}
indecies := strings.Split(sp.QueryBody.From, ",")
if len(indecies) == 0 {
err = fmt.Errorf("index name is required")
return
}
if len(indecies) > max {
err = fmt.Errorf("too many indecies(%v)", indecies)
return
}
for _, index := range indecies {
if !strings.Contains(index, app.IndexPrefix) {
err = fmt.Errorf("invalid index name(%s)", index)
return
}
}
sp.AppIDConf = app
return
}
// QueryBasic .
func (s *Service) QueryBasic(c context.Context, sp *model.QueryParams) (res *model.QueryResult, debug *model.QueryDebugResult, err error) {
switch sp.Business {
case "log_audit":
t := strings.Split(sp.QueryBody.From, "_")
if len(t) > 2 {
logID, err := strconv.Atoi(t[2])
if err != nil {
log.Error("strconv.Atoi(%s) error(%v)", t[2], err)
}
logBusiness, ok := s.dao.GetLogInfo(sp.Business, logID)
if ok {
sp.AppIDConf.ESCluster = logBusiness.IndexCluster
}
}
case "log_user_action":
t := strings.Split(sp.QueryBody.From, "_")
if len(t) > 3 {
logID, err := strconv.Atoi(t[3])
if err != nil {
log.Error("strconv.Atoi(%s) error(%v)", t[3], err)
}
logBusiness, ok := s.dao.GetLogInfo(sp.Business, logID)
if ok {
sp.AppIDConf.ESCluster = logBusiness.IndexCluster
}
}
}
bQuery, qbDebug := s.dao.QueryBasic(c, sp)
if res, debug, err = s.dao.QueryResult(c, bQuery, sp, qbDebug); err != nil {
dao.PromError(fmt.Sprintf("es:%s 搜索失败", sp.Business), "s.dao.QueryBasic(%v) error(%v)", sp, err)
}
return
}
// QueryExtra .
func (s *Service) QueryExtra(c context.Context, sp *model.QueryParams) (res *model.QueryResult, debug *model.QueryDebugResult, err error) {
switch sp.Business {
case "archive_video_score":
if res, debug, err = s.dao.ArchiveVideoScore(c, sp); err != nil {
dao.PromError(fmt.Sprintf("es:%s 搜索失败", sp.Business), "s.dao.QueryExtra(%v) error(%v)", sp, err)
}
case "archive_score":
if res, debug, err = s.dao.ArchiveScore(c, sp); err != nil {
dao.PromError(fmt.Sprintf("es:%s 搜索失败", sp.Business), "s.dao.QueryExtra(%v) error(%v)", sp, err)
}
case "task_qa_random":
if res, debug, err = s.dao.TaskQaRandom(c, sp); err != nil {
dao.PromError(fmt.Sprintf("es:%s 搜索失败", sp.Business), "s.dao.QueryExtra.TaskQaRandom(%v) error(%v)", sp, err)
}
case "esports_contests_date":
if res, debug, err = s.dao.EsportsContestsDate(c, sp); err != nil {
dao.PromError(fmt.Sprintf("es:%s 搜索失败", sp.Business), "s.dao.QueryExtra.EsportsContestsDate(%v) error(%v)", sp, err)
}
case "creative_archive_search":
if res, debug, err = s.dao.CreativeArchiveSearch(c, sp); err != nil {
dao.PromError(fmt.Sprintf("es:%s 搜索失败", sp.Business), "s.dao.QueryExtra.CreativeArchiveSearch(%v) error(%v)", sp, err)
}
case "creative_archive_staff":
if res, debug, err = s.dao.CreativeArchiveStaff(c, sp); err != nil {
dao.PromError(fmt.Sprintf("es:%s 搜索失败", sp.Business), "s.dao.QueryExtra.CreativeArchiveStaff(%v) error(%v)", sp, err)
}
case "creative_archive_apply":
if res, debug, err = s.dao.CreativeArchiveApply(c, sp); err != nil {
dao.PromError(fmt.Sprintf("es:%s 搜索失败", sp.Business), "s.dao.QueryExtra.CreativeArchiveApply(%v) error(%v)", sp, err)
}
case "dm_history":
if res, debug, err = s.dao.Scroll(c, sp); err != nil {
dao.PromError(fmt.Sprintf("es:%s 搜索失败", sp.Business), "s.dao.QueryExtra.Scroll(%v) error(%v)", sp, err)
}
}
return
}

View File

@@ -0,0 +1,32 @@
package service
import (
"context"
"go-common/app/admin/main/search/conf"
"go-common/app/admin/main/search/dao"
"go-common/app/admin/main/search/model"
)
// Service struct of service.
type Service struct {
c *conf.Config
dao *dao.Dao
queryConf map[string]*model.QueryConfDetail
}
// New create service instance and return.
func New(c *conf.Config) (s *Service) {
s = &Service{
c: c,
dao: dao.New(c),
}
s.loadQueryConf()
go s.loadQueryConfproc()
return
}
// Ping .
func (s *Service) Ping(c context.Context) error {
return s.dao.Ping(c)
}

View File

@@ -0,0 +1,35 @@
package service
import (
"flag"
"os"
"path/filepath"
"testing"
"go-common/app/admin/main/search/conf"
)
var (
svr *Service
)
func TestMain(m *testing.M) {
var (
err error
)
dir, _ := filepath.Abs("../cmd/search-admin-test.toml")
if err = flag.Set("conf", dir); err != nil {
panic(err)
}
if err = conf.Init(); err != nil {
panic(err)
}
svr = New(conf.Conf)
os.Exit(m.Run())
}
func WithService(f func(s *Service)) func() {
return func() {
f(svr)
}
}

View File

@@ -0,0 +1,33 @@
package service
import (
"context"
"fmt"
"go-common/app/admin/main/search/dao"
"go-common/library/ecode"
)
// Update update some indices.
func (s *Service) Update(c context.Context, esName string, bulkData []dao.BulkItem) (err error) {
if err = s.dao.UpdateBulk(c, esName, bulkData); err != nil {
dao.PromError(fmt.Sprintf("es:%s 更新失败", esName), "s.dao.updateBulk error(%v) ", err)
err = ecode.SearchUpdateIndexFailed
}
return
}
// MapUpdate map update.
func (s *Service) MapUpdate(c context.Context, p []dao.BulkMapItem) (err error) {
err = s.dao.UpdateMapBulk(c, "ssd_archive", p)
return
}
// Index .
func (s *Service) Index(c context.Context, esName string, bulkData []dao.BulkItem) (err error) {
if err = s.dao.BulkIndex(c, esName, bulkData); err != nil {
dao.PromError(fmt.Sprintf("es:%s 写入失败", esName), "s.dao.BulkIndex error(%v) ", err)
err = ecode.SearchUpdateIndexFailed
}
return
}

View File

@@ -0,0 +1 @@
package service

View File

@@ -0,0 +1,51 @@
package service
import (
"context"
"fmt"
"strings"
"go-common/app/admin/main/search/dao"
"go-common/app/admin/main/search/model"
"go-common/library/ecode"
"go-common/library/log"
)
// Upsert upsert docs.
func (s *Service) Upsert(c context.Context, up *model.UpsertParams, dataBody map[string][]model.MapData) (err error) {
app, ok := s.queryConf[up.Business]
if app2, ok2 := model.QueryConf[up.Business]; ok2 {
app = app2
ok = true
}
if !ok {
err = fmt.Errorf("up.Business(%s) not exists in queryConf", up.Business)
return
}
if app.ESCluster == "" {
err = fmt.Errorf("app(%+v) escluster is empty", app)
return
}
// dataBody to upsertBody
up.UpsertBody = []model.UpsertBody{}
for indexName, docs := range dataBody {
if !strings.Contains(indexName, app.IndexPrefix) {
log.Error("invalid indexName (%s)", indexName)
continue
}
for _, doc := range docs {
indexID := doc.StrID(app.IndexID)
//TODO 提前告知base不对
if indexID == "" {
continue
}
upsert := model.UpsertBody{IndexName: indexName, IndexType: app.IndexType, IndexID: indexID, Doc: doc}
up.UpsertBody = append(up.UpsertBody, upsert)
}
}
if err = s.dao.UpsertBulk(c, app.ESCluster, up); err != nil {
dao.PromError(fmt.Sprintf("es:%s 更新失败", app.ESCluster), "s.dao.UpsertBulk error(%v) ", err)
err = ecode.SearchUpdateIndexFailed
}
return
}