Create & Init Project...
This commit is contained in:
59
app/service/main/search/dao/BUILD
Normal file
59
app/service/main/search/dao/BUILD
Normal file
@ -0,0 +1,59 @@
|
||||
package(default_visibility = ["//visibility:public"])
|
||||
|
||||
load(
|
||||
"@io_bazel_rules_go//go:def.bzl",
|
||||
"go_test",
|
||||
"go_library",
|
||||
)
|
||||
|
||||
go_test(
|
||||
name = "go_default_test",
|
||||
srcs = ["dao_test.go"],
|
||||
embed = [":go_default_library"],
|
||||
rundir = ".",
|
||||
tags = ["automanaged"],
|
||||
deps = [
|
||||
"//app/service/main/search/conf:go_default_library",
|
||||
"//app/service/main/search/model:go_default_library",
|
||||
"//vendor/github.com/smartystreets/goconvey/convey:go_default_library",
|
||||
],
|
||||
)
|
||||
|
||||
go_library(
|
||||
name = "go_default_library",
|
||||
srcs = [
|
||||
"dao.go",
|
||||
"dm_date.go",
|
||||
"dm_history.go",
|
||||
"dm_search.go",
|
||||
"es.go",
|
||||
"pgc.go",
|
||||
"reply.go",
|
||||
"sms.go",
|
||||
],
|
||||
importpath = "go-common/app/service/main/search/dao",
|
||||
tags = ["automanaged"],
|
||||
visibility = ["//visibility:public"],
|
||||
deps = [
|
||||
"//app/service/main/search/conf:go_default_library",
|
||||
"//app/service/main/search/model:go_default_library",
|
||||
"//library/ecode:go_default_library",
|
||||
"//library/log:go_default_library",
|
||||
"//library/stat/prom:go_default_library",
|
||||
"//vendor/gopkg.in/olivere/elastic.v5:go_default_library",
|
||||
],
|
||||
)
|
||||
|
||||
filegroup(
|
||||
name = "package-srcs",
|
||||
srcs = glob(["**"]),
|
||||
tags = ["automanaged"],
|
||||
visibility = ["//visibility:private"],
|
||||
)
|
||||
|
||||
filegroup(
|
||||
name = "all-srcs",
|
||||
srcs = [":package-srcs"],
|
||||
tags = ["automanaged"],
|
||||
visibility = ["//visibility:public"],
|
||||
)
|
94
app/service/main/search/dao/dao.go
Normal file
94
app/service/main/search/dao/dao.go
Normal file
@ -0,0 +1,94 @@
|
||||
package dao
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
|
||||
"go-common/app/service/main/search/conf"
|
||||
"go-common/library/log"
|
||||
"go-common/library/stat/prom"
|
||||
|
||||
elastic "gopkg.in/olivere/elastic.v5"
|
||||
)
|
||||
|
||||
type Dao struct {
|
||||
// conf
|
||||
c *conf.Config
|
||||
|
||||
// esPool
|
||||
esPool map[string]*elastic.Client
|
||||
// sms
|
||||
sms *sms
|
||||
}
|
||||
|
||||
// New init dao
|
||||
func New(c *conf.Config) (d *Dao) {
|
||||
d = &Dao{
|
||||
c: c,
|
||||
}
|
||||
d.sms = newSMS(d)
|
||||
// cluster
|
||||
d.esPool = newEsPool(c, d)
|
||||
return
|
||||
}
|
||||
|
||||
// BulkItem .
|
||||
type BulkItem interface {
|
||||
IndexName() string
|
||||
IndexType() string
|
||||
IndexID() string
|
||||
}
|
||||
|
||||
// BulkMapItem .
|
||||
type BulkMapItem interface {
|
||||
IndexName() string
|
||||
IndexType() string
|
||||
IndexID() string
|
||||
PField() map[string]interface{}
|
||||
}
|
||||
|
||||
// newEsCluster cluster action
|
||||
func newEsPool(c *conf.Config, d *Dao) (esCluster map[string]*elastic.Client) {
|
||||
esCluster = make(map[string]*elastic.Client)
|
||||
for esName, e := range c.Es {
|
||||
if client, err := elastic.NewClient(elastic.SetURL(e.Addr...)); err == nil {
|
||||
esCluster[esName] = client
|
||||
} else {
|
||||
PromError("es:集群连接失败", "cluster: %s, %v", esName, err)
|
||||
if err := d.SendSMS(fmt.Sprintf("[search-job]%s集群连接失败", esName)); err != nil {
|
||||
PromError("es:集群连接短信失败", "cluster: %s, %v", esName, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// PromError prometheus error count.
|
||||
func PromError(name, format string, args ...interface{}) {
|
||||
prom.BusinessErrCount.Incr(name)
|
||||
log.Error(format, args...)
|
||||
}
|
||||
|
||||
// Ping health of db.
|
||||
func (d *Dao) Ping(c context.Context) (err error) {
|
||||
if err = d.pingESCluster(c); err != nil {
|
||||
PromError("es:ping", "Ping %v", err)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// pingESCluster ping es cluster
|
||||
func (d *Dao) pingESCluster(ctx context.Context) (err error) {
|
||||
for name := range d.c.Es {
|
||||
client, ok := d.esPool[name]
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
_, _, err = client.Ping(d.c.Es["replyExternal"].Addr[0]).Do(ctx)
|
||||
if err != nil {
|
||||
PromError("archiveESClient:Ping", "dao.pingESCluster error(%v) ", err)
|
||||
return
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
60
app/service/main/search/dao/dao_test.go
Normal file
60
app/service/main/search/dao/dao_test.go
Normal file
@ -0,0 +1,60 @@
|
||||
package dao
|
||||
|
||||
import (
|
||||
"context"
|
||||
"flag"
|
||||
"path/filepath"
|
||||
"testing"
|
||||
|
||||
"go-common/app/service/main/search/conf"
|
||||
"go-common/app/service/main/search/model"
|
||||
|
||||
. "github.com/smartystreets/goconvey/convey"
|
||||
)
|
||||
|
||||
func WithDao(f func(d *Dao)) func() {
|
||||
return func() {
|
||||
dir, _ := filepath.Abs("../cmd/goconvey.toml")
|
||||
flag.Set("conf", dir)
|
||||
flag.Parse()
|
||||
conf.Init()
|
||||
d := New(conf.Conf)
|
||||
f(d)
|
||||
}
|
||||
}
|
||||
|
||||
func Test_PgcMedia(t *testing.T) {
|
||||
Convey("open app", t, WithDao(func(d *Dao) {
|
||||
var (
|
||||
err error
|
||||
c = context.TODO()
|
||||
p *model.PgcMediaParams
|
||||
)
|
||||
_, err = d.PgcMedia(c, p)
|
||||
So(err, ShouldBeNil)
|
||||
}))
|
||||
}
|
||||
|
||||
func Test_ReplyRecord(t *testing.T) {
|
||||
Convey("reply record", t, WithDao(func(d *Dao) {
|
||||
var (
|
||||
err error
|
||||
c = context.TODO()
|
||||
p *model.ReplyRecordParams
|
||||
)
|
||||
_, err = d.ReplyRecord(c, p)
|
||||
So(err, ShouldBeNil)
|
||||
}))
|
||||
}
|
||||
|
||||
func Test_DmHistory(t *testing.T) {
|
||||
Convey("DmHistory", t, WithDao(func(d *Dao) {
|
||||
var (
|
||||
err error
|
||||
c = context.TODO()
|
||||
p *model.DmHistoryParams
|
||||
)
|
||||
_, err = d.DmHistory(c, p)
|
||||
So(err, ShouldBeNil)
|
||||
}))
|
||||
}
|
35
app/service/main/search/dao/dm_date.go
Normal file
35
app/service/main/search/dao/dm_date.go
Normal file
@ -0,0 +1,35 @@
|
||||
package dao
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"go-common/app/service/main/search/model"
|
||||
|
||||
elastic "gopkg.in/olivere/elastic.v5"
|
||||
)
|
||||
|
||||
func (d *Dao) DmDateSearch(c context.Context, p *model.DmDateParams) (res *model.SearchResult, err error) {
|
||||
query := elastic.NewBoolQuery()
|
||||
indexName := "dm_date_" + strings.Replace(p.Month, "-", "_", -1)
|
||||
if p.Bsp.KW != "" {
|
||||
query = query.Must(elastic.NewRegexpQuery(p.Bsp.KwFields[0], ".*"+p.Bsp.KW+".*"))
|
||||
}
|
||||
if p.Oid != -1 {
|
||||
query = query.Filter(elastic.NewTermQuery("oid", p.Oid))
|
||||
}
|
||||
if p.Month != "" {
|
||||
query = query.Filter(elastic.NewTermQuery("month", p.Month))
|
||||
}
|
||||
if p.MonthFrom != "" {
|
||||
query = query.Filter(elastic.NewRangeQuery("month").Gte(p.MonthFrom))
|
||||
}
|
||||
if p.MonthTo != "" {
|
||||
query = query.Filter(elastic.NewRangeQuery("month").Lte(p.MonthTo))
|
||||
}
|
||||
if res, err = d.searchResult(c, "dmExternal", indexName, query, p.Bsp); err != nil {
|
||||
PromError(fmt.Sprintf("es:%s ", p.Bsp.AppID), "%v", err)
|
||||
}
|
||||
return
|
||||
}
|
42
app/service/main/search/dao/dm_history.go
Normal file
42
app/service/main/search/dao/dm_history.go
Normal file
@ -0,0 +1,42 @@
|
||||
package dao
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
|
||||
"go-common/app/service/main/search/model"
|
||||
|
||||
elastic "gopkg.in/olivere/elastic.v5"
|
||||
)
|
||||
|
||||
func (d *Dao) DmHistory(c context.Context, p *model.DmHistoryParams) (res *model.SearchResult, err error) {
|
||||
var (
|
||||
query = elastic.NewBoolQuery()
|
||||
indexName = fmt.Sprintf("dm_search_%03d", p.Oid%1000)
|
||||
)
|
||||
if p.Bsp.KW != "" {
|
||||
query = query.Must(elastic.NewMultiMatchQuery(p.Bsp.KW, p.Bsp.KwFields...).Type("best_fields").TieBreaker(0.6))
|
||||
}
|
||||
if p.Oid != -1 {
|
||||
query = query.Filter(elastic.NewTermQuery("oidstr", p.Oid))
|
||||
}
|
||||
if len(p.States) > 0 {
|
||||
interfaceSlice := make([]interface{}, len(p.States))
|
||||
for k, m := range p.States {
|
||||
interfaceSlice[k] = m
|
||||
}
|
||||
query = query.Filter(elastic.NewTermsQuery("state", interfaceSlice...))
|
||||
}
|
||||
if p.CtimeFrom != "" {
|
||||
query = query.Filter(elastic.NewRangeQuery("ctime").Gte(p.CtimeFrom))
|
||||
}
|
||||
if p.CtimeTo != "" {
|
||||
query = query.Filter(elastic.NewRangeQuery("ctime").Lte(p.CtimeTo))
|
||||
}
|
||||
|
||||
fmt.Println(indexName)
|
||||
if res, err = d.searchResult(c, "dmExternal", indexName, query, p.Bsp); err != nil {
|
||||
PromError(fmt.Sprintf("es:%s ", p.Bsp.AppID), "%v", err)
|
||||
}
|
||||
return
|
||||
}
|
63
app/service/main/search/dao/dm_search.go
Normal file
63
app/service/main/search/dao/dm_search.go
Normal file
@ -0,0 +1,63 @@
|
||||
package dao
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
|
||||
"go-common/app/service/main/search/model"
|
||||
|
||||
elastic "gopkg.in/olivere/elastic.v5"
|
||||
)
|
||||
|
||||
// DmSearch .
|
||||
func (d *Dao) DmSearch(c context.Context, p *model.DmSearchParams) (res *model.SearchResult, err error) {
|
||||
var (
|
||||
query = elastic.NewBoolQuery()
|
||||
indexName = fmt.Sprintf("dm_search_%03d", p.Oid%1000)
|
||||
)
|
||||
if p.Bsp.KW != "" {
|
||||
query = query.Must(elastic.NewRegexpQuery(p.Bsp.KwFields[0], ".*"+p.Bsp.KW+".*"))
|
||||
}
|
||||
if p.Oid != -1 {
|
||||
query = query.Filter(elastic.NewTermQuery("oid", p.Oid))
|
||||
}
|
||||
if p.Mid != -1 {
|
||||
query = query.Filter(elastic.NewTermQuery("mid", p.Mid))
|
||||
}
|
||||
if p.Mode != -1 {
|
||||
query = query.Filter(elastic.NewTermQuery("mode", p.Mode))
|
||||
}
|
||||
if p.Pool != -1 {
|
||||
query = query.Filter(elastic.NewTermQuery("pool", p.Pool))
|
||||
}
|
||||
if p.Progress != -1 {
|
||||
query = query.Filter(elastic.NewTermQuery("progress", p.Progress))
|
||||
}
|
||||
if len(p.States) > 0 {
|
||||
interfaceSlice := make([]interface{}, len(p.States))
|
||||
for k, m := range p.States {
|
||||
interfaceSlice[k] = m
|
||||
}
|
||||
query = query.Filter(elastic.NewTermsQuery("state", interfaceSlice...))
|
||||
}
|
||||
if p.Type != -1 {
|
||||
query = query.Filter(elastic.NewTermQuery("type", p.Type))
|
||||
}
|
||||
if len(p.AttrFormat) > 0 {
|
||||
interfaceSlice := make([]interface{}, len(p.AttrFormat))
|
||||
for k, m := range p.AttrFormat {
|
||||
interfaceSlice[k] = m
|
||||
}
|
||||
query = query.Filter(elastic.NewTermsQuery("attr_format", interfaceSlice...))
|
||||
}
|
||||
if p.CtimeFrom != "" {
|
||||
query = query.Filter(elastic.NewRangeQuery("ctime").Gte(p.CtimeFrom))
|
||||
}
|
||||
if p.CtimeTo != "" {
|
||||
query = query.Filter(elastic.NewRangeQuery("ctime").Lte(p.CtimeTo))
|
||||
}
|
||||
if res, err = d.searchResult(c, "dmExternal", indexName, query, p.Bsp); err != nil {
|
||||
PromError(fmt.Sprintf("es:%s ", p.Bsp.AppID), "%v", err)
|
||||
}
|
||||
return
|
||||
}
|
129
app/service/main/search/dao/es.go
Normal file
129
app/service/main/search/dao/es.go
Normal file
@ -0,0 +1,129 @@
|
||||
package dao
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"go-common/app/service/main/search/model"
|
||||
"go-common/library/ecode"
|
||||
"go-common/library/log"
|
||||
|
||||
elastic "gopkg.in/olivere/elastic.v5"
|
||||
)
|
||||
|
||||
// UpdateMapBulk .
|
||||
func (d *Dao) UpdateMapBulk(c context.Context, esName string, bulkData []BulkMapItem) (err error) {
|
||||
if _, ok := d.esPool[esName]; !ok {
|
||||
PromError(fmt.Sprintf("es:集群不存在%s", esName), "s.dao.searchResult indexName:%s", esName)
|
||||
err = ecode.SearchUpdateIndexFailed
|
||||
return
|
||||
}
|
||||
bulkRequest := d.esPool[esName].Bulk()
|
||||
for _, b := range bulkData {
|
||||
request := elastic.NewBulkUpdateRequest().Index(b.IndexName()).Type(b.IndexType()).Id(b.IndexID()).Doc(b.PField()).DocAsUpsert(true)
|
||||
bulkRequest.Add(request)
|
||||
}
|
||||
if _, err = bulkRequest.Do(context.TODO()); err != nil {
|
||||
log.Error("esName(%s) bulk error(%v)", esName, err)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func (d *Dao) UpdateBulk(c context.Context, esName string, bulkData []BulkItem) (err error) {
|
||||
if _, ok := d.esPool[esName]; !ok {
|
||||
PromError(fmt.Sprintf("es:集群不存在%s", esName), "s.dao.searchResult indexName:%s", esName)
|
||||
err = ecode.SearchUpdateIndexFailed
|
||||
return
|
||||
}
|
||||
bulkRequest := d.esPool[esName].Bulk()
|
||||
for _, b := range bulkData {
|
||||
request := elastic.NewBulkUpdateRequest().Index(b.IndexName()).Type(b.IndexType()).Id(b.IndexID()).Doc(b).DocAsUpsert(true)
|
||||
//fmt.Println(request)
|
||||
bulkRequest.Add(request)
|
||||
}
|
||||
if bulkRequest.NumberOfActions() == 0 {
|
||||
return
|
||||
}
|
||||
if _, err = bulkRequest.Do(context.TODO()); err != nil {
|
||||
log.Error("esName(%s) bulk error(%v)", esName, err)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// searchResult get result from ES.
|
||||
func (d *Dao) searchResult(c context.Context, esClusterName, indexName string, query elastic.Query, bsp *model.BasicSearchParams) (res *model.SearchResult, err error) {
|
||||
res = &model.SearchResult{Debug: ""}
|
||||
if bsp.Debug {
|
||||
var src interface{}
|
||||
if src, err = query.Source(); err == nil {
|
||||
var data []byte
|
||||
if data, err = json.Marshal(src); err == nil {
|
||||
res = &model.SearchResult{Debug: string(data)}
|
||||
}
|
||||
}
|
||||
}
|
||||
if _, ok := d.esPool[esClusterName]; !ok {
|
||||
PromError(fmt.Sprintf("es:集群不存在%s", esClusterName), "s.dao.searchResult indexName:%s", indexName)
|
||||
res = &model.SearchResult{Debug: fmt.Sprintf("es:集群不存在%s, %s", esClusterName, res.Debug)}
|
||||
return
|
||||
}
|
||||
// multi sort
|
||||
sorterSlice := []elastic.Sorter{}
|
||||
if bsp.KW != "" {
|
||||
sorterSlice = append(sorterSlice, elastic.NewScoreSort().Desc())
|
||||
}
|
||||
for i, d := range bsp.Order {
|
||||
if len(bsp.Sort) < i+1 {
|
||||
if bsp.Sort[0] == "desc" {
|
||||
sorterSlice = append(sorterSlice, elastic.NewFieldSort(d).Desc())
|
||||
} else {
|
||||
sorterSlice = append(sorterSlice, elastic.NewFieldSort(d).Asc())
|
||||
}
|
||||
} else {
|
||||
if bsp.Sort[i] == "desc" {
|
||||
sorterSlice = append(sorterSlice, elastic.NewFieldSort(d).Desc())
|
||||
} else {
|
||||
sorterSlice = append(sorterSlice, elastic.NewFieldSort(d).Asc())
|
||||
}
|
||||
}
|
||||
}
|
||||
fsc := elastic.NewFetchSourceContext(true).Include(bsp.Source...)
|
||||
searchResult, err := d.esPool[esClusterName].
|
||||
Search().Index(indexName).
|
||||
Query(query).
|
||||
SortBy(sorterSlice...).
|
||||
From((bsp.Pn - 1) * bsp.Ps).
|
||||
Size(bsp.Ps).
|
||||
Pretty(true).
|
||||
FetchSourceContext(fsc).
|
||||
Do(c)
|
||||
if err != nil {
|
||||
PromError(fmt.Sprintf("es:执行查询失败%s ", esClusterName), "%v", err)
|
||||
res = &model.SearchResult{Debug: res.Debug + "es:执行查询失败"}
|
||||
return
|
||||
}
|
||||
var data []json.RawMessage
|
||||
for _, hit := range searchResult.Hits.Hits {
|
||||
var t json.RawMessage
|
||||
e := json.Unmarshal(*hit.Source, &t)
|
||||
if e != nil {
|
||||
PromError(fmt.Sprintf("es:%s 索引有脏数据", esClusterName), "s.dao.SearchArchiveCheck(%d,%d) error(%v) ", bsp.Pn*bsp.Ps, bsp.Ps, e)
|
||||
continue
|
||||
}
|
||||
data = append(data, t)
|
||||
}
|
||||
res = &model.SearchResult{
|
||||
Order: strings.Join(bsp.Order, ","),
|
||||
Sort: strings.Join(bsp.Sort, ","),
|
||||
Result: data,
|
||||
Debug: res.Debug,
|
||||
Page: &model.Page{
|
||||
Pn: bsp.Pn,
|
||||
Ps: bsp.Ps,
|
||||
Total: searchResult.Hits.TotalHits,
|
||||
},
|
||||
}
|
||||
return
|
||||
}
|
138
app/service/main/search/dao/pgc.go
Normal file
138
app/service/main/search/dao/pgc.go
Normal file
@ -0,0 +1,138 @@
|
||||
package dao
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
|
||||
"go-common/app/service/main/search/model"
|
||||
|
||||
elastic "gopkg.in/olivere/elastic.v5"
|
||||
)
|
||||
|
||||
// PgcMedia .
|
||||
func (d *Dao) PgcMedia(c context.Context, p *model.PgcMediaParams) (res *model.SearchResult, err error) {
|
||||
query := elastic.NewBoolQuery()
|
||||
if p.Bsp.KW != "" {
|
||||
query = query.Must(elastic.NewMultiMatchQuery(p.Bsp.KW, "title").Type("best_fields").TieBreaker(0.3))
|
||||
}
|
||||
if len(p.MediaIds) > 0 {
|
||||
interfaceSlice := make([]interface{}, len(p.MediaIds))
|
||||
for i, d := range p.MediaIds {
|
||||
interfaceSlice[i] = d
|
||||
}
|
||||
query = query.Filter(elastic.NewTermsQuery("media_id", interfaceSlice...))
|
||||
}
|
||||
if len(p.SeasonIds) > 0 {
|
||||
interfaceSlice := make([]interface{}, len(p.SeasonIds))
|
||||
for i, d := range p.SeasonIds {
|
||||
interfaceSlice[i] = d
|
||||
}
|
||||
query = query.Filter(elastic.NewTermsQuery("season_id", interfaceSlice...))
|
||||
}
|
||||
if len(p.SeasonTypes) > 0 {
|
||||
interfaceSlice := make([]interface{}, len(p.SeasonTypes))
|
||||
for i, d := range p.SeasonTypes {
|
||||
interfaceSlice[i] = d
|
||||
}
|
||||
query = query.Filter(elastic.NewTermsQuery("season_type", interfaceSlice...))
|
||||
}
|
||||
if len(p.StyleIds) > 0 {
|
||||
interfaceSlice := make([]interface{}, len(p.StyleIds))
|
||||
for i, d := range p.StyleIds {
|
||||
interfaceSlice[i] = d
|
||||
}
|
||||
query = query.Filter(elastic.NewTermsQuery("style_id", interfaceSlice...))
|
||||
}
|
||||
if p.Status > -1000 {
|
||||
query = query.Filter(elastic.NewTermQuery("status", p.Status))
|
||||
}
|
||||
if p.ReleaseDateFrom != "" {
|
||||
query = query.Filter(elastic.NewRangeQuery("release_date").Gte(p.ReleaseDateFrom))
|
||||
}
|
||||
if p.ReleaseDateTo != "" {
|
||||
query = query.Filter(elastic.NewRangeQuery("release_date").Lte(p.ReleaseDateTo))
|
||||
}
|
||||
if p.ReleaseDateFrom != "" {
|
||||
query = query.Filter(elastic.NewRangeQuery("release_date").Gte(p.ReleaseDateFrom))
|
||||
}
|
||||
if p.ReleaseDateTo != "" {
|
||||
query = query.Filter(elastic.NewRangeQuery("release_date").Lte(p.ReleaseDateTo))
|
||||
}
|
||||
if p.SeasonIDFrom > 0 {
|
||||
query = query.Filter(elastic.NewRangeQuery("season_id").Gte(p.SeasonIDFrom))
|
||||
}
|
||||
if p.SeasonIDTo > 0 {
|
||||
query = query.Filter(elastic.NewRangeQuery("season_id").Lte(p.SeasonIDTo))
|
||||
}
|
||||
if len(p.ProducerIds) > 0 {
|
||||
interfaceSlice := make([]interface{}, len(p.ProducerIds))
|
||||
for i, d := range p.ProducerIds {
|
||||
interfaceSlice[i] = d
|
||||
}
|
||||
query = query.Filter(elastic.NewTermsQuery("producer_id", interfaceSlice...))
|
||||
}
|
||||
if p.IsDeleted == 0 {
|
||||
query = query.MustNot(elastic.NewTermQuery("is_deleted", 1))
|
||||
}
|
||||
if len(p.AreaIds) > 0 {
|
||||
interfaceSlice := make([]interface{}, len(p.AreaIds))
|
||||
for i, o := range p.AreaIds {
|
||||
interfaceSlice[i] = o
|
||||
}
|
||||
query = query.Filter(elastic.NewTermsQuery("area_id", interfaceSlice...))
|
||||
}
|
||||
if p.ScoreFrom > 0 {
|
||||
query = query.Filter(elastic.NewRangeQuery("score_from").Gte(p.ScoreFrom))
|
||||
}
|
||||
if p.ScoreTo > 0 {
|
||||
query = query.Filter(elastic.NewRangeQuery("score_to").Lte(p.ScoreTo))
|
||||
}
|
||||
if p.IsFinish != "" {
|
||||
query = query.Filter(elastic.NewTermsQuery("is_finish", p.IsFinish))
|
||||
}
|
||||
if len(p.SeasonVersions) > 0 {
|
||||
interfaceSlice := make([]interface{}, len(p.SeasonVersions))
|
||||
for i, o := range p.SeasonVersions {
|
||||
interfaceSlice[i] = o
|
||||
}
|
||||
query = query.Filter(elastic.NewTermsQuery("season_version", interfaceSlice...))
|
||||
}
|
||||
if len(p.SeasonStatuses) > 0 {
|
||||
interfaceSlice := make([]interface{}, len(p.SeasonStatuses))
|
||||
for i, o := range p.SeasonStatuses {
|
||||
interfaceSlice[i] = o
|
||||
}
|
||||
query = query.Filter(elastic.NewTermsQuery("season_status", interfaceSlice...))
|
||||
}
|
||||
if p.PubTimeFrom != "" {
|
||||
query = query.Filter(elastic.NewRangeQuery("pub_time").Gte(p.PubTimeFrom))
|
||||
}
|
||||
if p.PubTimeTo != "" {
|
||||
query = query.Filter(elastic.NewRangeQuery("pub_time").Lte(p.PubTimeTo))
|
||||
}
|
||||
if len(p.SeasonMonths) > 0 {
|
||||
interfaceSlice := make([]interface{}, len(p.SeasonMonths))
|
||||
for i, o := range p.SeasonMonths {
|
||||
interfaceSlice[i] = o
|
||||
}
|
||||
query = query.Filter(elastic.NewTermsQuery("season_month", interfaceSlice...))
|
||||
}
|
||||
if p.LatestTimeFrom != "" {
|
||||
query = query.Filter(elastic.NewRangeQuery("latest_time").Gte(p.LatestTimeFrom))
|
||||
}
|
||||
if p.LatestTimeTo != "" {
|
||||
query = query.Filter(elastic.NewRangeQuery("latest_time").Lte(p.LatestTimeTo))
|
||||
}
|
||||
if len(p.CopyrightInfos) > 0 {
|
||||
interfaceSlice := make([]interface{}, len(p.CopyrightInfos))
|
||||
for i, o := range p.CopyrightInfos {
|
||||
interfaceSlice[i] = o
|
||||
}
|
||||
query = query.Filter(elastic.NewTermsQuery("copyright_info", interfaceSlice...))
|
||||
}
|
||||
p.Bsp.Source = []string{"media_id", "season_id", "season_type", "dm_count", "play_count", "fav_count", "score", "latest_time", "pub_time", "release_date"}
|
||||
if res, err = d.searchResult(c, "externalPublic", "pgc_media", query, p.Bsp); err != nil {
|
||||
PromError(fmt.Sprintf("es:%s ", p.Bsp.AppID), "%v", err)
|
||||
}
|
||||
return
|
||||
}
|
46
app/service/main/search/dao/reply.go
Normal file
46
app/service/main/search/dao/reply.go
Normal file
@ -0,0 +1,46 @@
|
||||
package dao
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
|
||||
"go-common/app/service/main/search/model"
|
||||
|
||||
elastic "gopkg.in/olivere/elastic.v5"
|
||||
)
|
||||
|
||||
// ReplyRecord search reply record from ES.
|
||||
func (d *Dao) ReplyRecord(c context.Context, p *model.ReplyRecordParams) (res *model.SearchResult, err error) {
|
||||
query := elastic.NewBoolQuery()
|
||||
if p.Mid > 0 {
|
||||
query = query.Must(elastic.NewTermQuery("mid", p.Mid))
|
||||
} else {
|
||||
return
|
||||
}
|
||||
if len(p.Types) > 0 {
|
||||
interfaceSlice := make([]interface{}, len(p.Types))
|
||||
for i, d := range p.Types {
|
||||
interfaceSlice[i] = d
|
||||
}
|
||||
query = query.Must(elastic.NewTermsQuery("type", interfaceSlice...))
|
||||
}
|
||||
if len(p.States) > 0 {
|
||||
interfaceSlice := make([]interface{}, len(p.States))
|
||||
for i, d := range p.States {
|
||||
interfaceSlice[i] = d
|
||||
}
|
||||
query = query.Must(elastic.NewTermsQuery("state", interfaceSlice...))
|
||||
}
|
||||
if p.CTimeFrom != "" {
|
||||
query = query.Must(elastic.NewRangeQuery("ctime").Gte(p.CTimeFrom))
|
||||
}
|
||||
if p.CTimeTo != "" {
|
||||
query = query.Must(elastic.NewRangeQuery("ctime").Lte(p.CTimeTo))
|
||||
}
|
||||
indexName := fmt.Sprintf("replyrecord_%d", p.Mid%100)
|
||||
if res, err = d.searchResult(c, "replyExternal", indexName, query, p.Bsp); err != nil {
|
||||
PromError(fmt.Sprintf("es:%s ", p.Bsp.AppID), "%v", err)
|
||||
return
|
||||
}
|
||||
return
|
||||
}
|
71
app/service/main/search/dao/sms.go
Normal file
71
app/service/main/search/dao/sms.go
Normal file
@ -0,0 +1,71 @@
|
||||
package dao
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
"net/url"
|
||||
"strconv"
|
||||
"time"
|
||||
|
||||
"go-common/library/log"
|
||||
)
|
||||
|
||||
const _smsURL = "http://ops-mng.bilibili.co/api/sendsms"
|
||||
|
||||
type sms struct {
|
||||
d *Dao
|
||||
|
||||
client *http.Client
|
||||
lastTime int64
|
||||
interval int64
|
||||
params *url.Values
|
||||
}
|
||||
|
||||
func newSMS(d *Dao) (s *sms) {
|
||||
s = &sms{
|
||||
d: d,
|
||||
client: &http.Client{},
|
||||
lastTime: time.Now().Unix() - d.c.SMS.Interval, //如果不想让初始化的时候告警,把减号去掉
|
||||
interval: d.c.SMS.Interval,
|
||||
params: &url.Values{
|
||||
"phone": []string{d.c.SMS.Phone},
|
||||
"token": []string{d.c.SMS.Token},
|
||||
},
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func (d *Dao) SendSMS(msg string) (err error) {
|
||||
if !d.sms.IntervalCheck() {
|
||||
log.Error("发短信太频繁啦, msg:%s", msg)
|
||||
return
|
||||
}
|
||||
if err = d.sms.Send(msg); err != nil {
|
||||
log.Error("发短信失败, msg:%s, error(%v)", msg, err)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func (sms *sms) Send(msg string) (err error) {
|
||||
var req *http.Request
|
||||
sms.params.Set("message", msg)
|
||||
if req, err = http.NewRequest("GET", _smsURL+"?"+sms.params.Encode(), nil); err != nil {
|
||||
return
|
||||
}
|
||||
req.Header.Set("x1-bilispy-timeout", strconv.FormatInt(int64(time.Duration(1)/time.Millisecond), 10))
|
||||
if _, err = sms.client.Do(req); err != nil {
|
||||
log.Error("ops-mng sendsms url(%s) error(%v)", _smsURL+"?"+sms.params.Encode(), err)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// IntervalCheck accessible or not to send msg at present time
|
||||
func (sms *sms) IntervalCheck() (send bool) {
|
||||
now := time.Now().Unix()
|
||||
if (now - sms.lastTime) >= sms.interval {
|
||||
send = true
|
||||
sms.lastTime = now
|
||||
} else {
|
||||
send = false
|
||||
}
|
||||
return
|
||||
}
|
Reference in New Issue
Block a user