Create & Init Project...

This commit is contained in:
2019-04-22 18:49:16 +08:00
commit fc4fa37393
25440 changed files with 4054998 additions and 0 deletions

View File

@@ -0,0 +1,25 @@
filegroup(
name = "package-srcs",
srcs = glob(["**"]),
tags = ["automanaged"],
visibility = ["//visibility:private"],
)
filegroup(
name = "all-srcs",
srcs = [
":package-srcs",
"//app/service/main/antispam/cmd:all-srcs",
"//app/service/main/antispam/conf:all-srcs",
"//app/service/main/antispam/dao:all-srcs",
"//app/service/main/antispam/extern:all-srcs",
"//app/service/main/antispam/http:all-srcs",
"//app/service/main/antispam/model:all-srcs",
"//app/service/main/antispam/rpc/client:all-srcs",
"//app/service/main/antispam/rpc/server:all-srcs",
"//app/service/main/antispam/service:all-srcs",
"//app/service/main/antispam/util:all-srcs",
],
tags = ["automanaged"],
visibility = ["//visibility:public"],
)

View File

@@ -0,0 +1,113 @@
#### antispa
##### Version 1.3.0
> 1.unit test
##### Version 1.2.6
> 1. grpc identify
##### Version 1.2.5
> 1. print ping err
##### Version 1.2.4
> 1. 迁移至BM框架
##### Version 1.2.3
> 1. 增加register接口
##### Version 1.2.2
> 1. redis migrate folder
##### Version 1.2.1
> 1. err judgement fix panic
##### Version 1.2.0
> 1. move in main path
##### Version 1.1.16
> 1. delete statsd
##### Version 1.1.15
> 1. limit max regexps a area can have (in conf file)
##### Version 1.1.14
> 1. optimize code
##### Version 1.1.13
> 1. temporary fix nil keyword pointer panic
##### Version 1.1.12
> 1. return deleted regexps where get all regexps
##### Version 1.1.11
> 1. fix danmu share reply's regexps
##### Version 1.1.10
> 1. return precious keyword hit counts when incr
> 2. make auto white strategy configurable.
##### Version 1.1.9
> 1. fix 'fetch rules without getting area'
##### Version 1.1.8
> 1. fix autoWhite deviation bug
##### Version 1.1.6
> 1. add turning keyword into white automatically strategy
##### Version 1.1.5
> 1. danmu has its own regexps list
##### Version 1.1.4
> 1. add danmu
##### Version 1.1.3
> 1. add unit test
##### Version 1.1.2
> 1. change `Id` to `ID`
> 2. remove unused columun keywords.senderId and keywords.regexp_content
> 3. add some comments on exported method/function
> 4. add new area "main_site_dm"
##### Version 1.1.1
> 1. fix missed "return err" bug
> 2. remove unused configuration "RefreshTrieDBSizePerQuery"
##### Version 1.1.0
> 1. use 'ctime BETWEEN ... AND XXX' instead of 'ctime < XXX'
##### Version 1.0.9
> 1. asynchronous incr count & persisit senderId in Filter.Check
> 2. add ruleDefaultExpireSec and regexpDefaultExpireSec configure options
##### Version 1.0.8
> 1. change recycle keyword sql
##### Version 1.0.7
> 1. limit durationSec and allowedCount max value
##### Version 1.0.6
> 1. refresh trie instead of building one frequently
> 2. add area `live_dm`
##### Version 1.0.5
> 1. record sender_id only if sender_id > 0
> 2. change logic to expire total_count cache
> 3. add err log when fail to update cache
##### Version 1.0.4
> 1. fix rate_limit_rule sql bug
##### Version 1.0.3
> 1. change sql to avoid slow query
##### Version 1.0.2
> 1. add log when ping error
##### Version 1.0.1
> 1. 修复ping error
##### Version 1.0.0
> 1. init commit
> 2. fix ZRANGEBYSCORE params

View File

@@ -0,0 +1,10 @@
# Owner
chenzhihui
lujinhui
# Author
xiahaiming
# Reviewer
chenzhihui
lujinhui

View File

@@ -0,0 +1,62 @@
print-%: ; @echo $*=$($*)
PREFIX?=/data/app/antispam-service
PROG=antispam-service
PROG_BAK=antispam-service_bak
TEST_SRC=$(wildcard ./**/*_test.go)
GOTESTOPT= -v
SOURCE_DIR=./
SUB_DIR=$(shell ls)
CUR_BRANCH=$(shell git branch | grep \* | cut -d ' ' -f2)
CONF?=cmd/antispam-test.toml
all: build vet lint test_quick clean
.PHONY: build run lint test vet clean test_quick
build: $(PROG)
$(PROG):
@echo "\nstart build..."
@go build -o $(PROG) cmd/main.go
run: clean build
@echo "start running in container..."
@./$(PROG) -conf=$(CONF)
clean:
@echo "\nstart cleaning up ..."
@go clean $(GO_FLAGS) $(SOURCE_DIR)
@rm -rf $(PROG)
@rm -rf build
@rm -f `find . -name "*.swp" -print`
@rm -f `find . -name "*.xlsx" -print`
vet:
@echo "\nstart running go vet..."
@for f in $(SUB_DIR); do \
if [ -d $$f ]; then \
go vet ./"$$f"/...; \
fi; \
done
lint:
@echo "\nstart running golint..."
@for f in $(SUB_DIR); do \
if [ -d $$f ]; then \
golint $$f; \
fi; \
done
test_quick:
@echo "\nstart running quick unit test..."
@go test $(GOTESTOPT) ./http/... ./service/... ./util/...
test:
@echo "\nstart running full unit test..."
@go test $(GOTESTOPT) ./dao/...
deploy:
@echo "start deploying on dev machine..."
@cp $(PREFIX)/$(PROG) $(PREFIX)/$(PROG_BAK) && go build -o $(PREFIX)/$(PROG) && supervisorctl restart $(PROG)

View File

@@ -0,0 +1,16 @@
# See the OWNERS docs at https://go.k8s.io/owners
approvers:
- chenzhihui
- lujinhui
- xiahaiming
labels:
- main
- service
- service/main/antispam
options:
no_parent_owners: true
reviewers:
- chenzhihui
- lujinhui
- xiahaiming

View File

@@ -0,0 +1,5 @@
#### antispam
##### Version 1.0.0
# $ make

View File

@@ -0,0 +1,48 @@
package(default_visibility = ["//visibility:public"])
load(
"@io_bazel_rules_go//go:def.bzl",
"go_binary",
"go_library",
)
go_binary(
name = "cmd",
embed = [":go_default_library"],
tags = ["automanaged"],
)
go_library(
name = "go_default_library",
srcs = ["main.go"],
data = [
"antispam-integration.toml",
"antispam-test.toml",
],
importpath = "go-common/app/service/main/antispam/cmd",
tags = ["automanaged"],
visibility = ["//visibility:public"],
deps = [
"//app/service/main/antispam/conf:go_default_library",
"//app/service/main/antispam/http:go_default_library",
"//app/service/main/antispam/rpc/server:go_default_library",
"//app/service/main/antispam/service:go_default_library",
"//library/ecode/tip:go_default_library",
"//library/log:go_default_library",
"//library/net/trace:go_default_library",
],
)
filegroup(
name = "package-srcs",
srcs = glob(["**"]),
tags = ["automanaged"],
visibility = ["//visibility:private"],
)
filegroup(
name = "all-srcs",
srcs = [":package-srcs"],
tags = ["automanaged"],
visibility = ["//visibility:public"],
)

View File

@@ -0,0 +1,200 @@
# This is a TOML document. Boom.
version = "2.1.0"
user = "nobody"
pid = "/tmp/antispam-service.pid"
dir = "./"
perf = "0.0.0.0:6840"
checkFile = "/data/www/antispam-service.html"
family = "antispam-service"
address = "172.16.0.148"
proc = 32
maxExportRows=100000
maxRegexpCounts=20
[log]
#dir = "/data/log/antispam-service"
[log.agent]
project = "antispam-service"
taskID = "000069"
proto = "unixgram"
addr = "/var/run/lancer/collector.sock"
chanSize = 10240
[appkeytype]
"e7482d29be4a95b8"=[1] #稿件
"33ac033ce123e590"=[4,9] #活动,活动稿件
"7d9f6f6fe2a898e8"=[4,9]
"c05dd4e1638a8af0"=[5] #直播小视频
"37207f2beaebf8d7"=[5] #android
"27eb53fc9058f8c3"=[5] #ios
"b2609538e67ea9cd"=[5] #ios
"33ac033ce123e999"=[6] #封禁信息
"f01da7e4526a8af0"=[7] #公告信息
"58344a540b61aebf"=[8,10] #直播活动、直播公告
"9edfa647fbfde3e2"=[11] #有文画站
[syncopt]
open=true
syncIntervalSec = 100
[gcopt]
open=false
intervalSec = 5
[identify]
whiteAccessKey = "nKUPGzuhU|vTG58H!HPKLlktz{8}3^"
whiteMid = 88889082
[identify.app]
key = "c1a1cb2d89c33794"
secret = "dda47eeca111e03e6845017505baea13"
[identify.host]
auth = "http://uat-passport.bilibili.com"
secret = "http://uat-open.bilibili.com"
[identify.httpClient]
key = "c1a1cb2d89c33794"
secret = "dda47eeca111e03e6845017505baea13"
dial = "1s"
timeout = "1s"
keepAlive = "60s"
timer = 1000
[identify.httpClient.breaker]
window ="10s"
sleep ="100ms"
bucket = 10
ratio = 0.5
request = 100
[identify.httpClient.url]
"http://passport.bilibili.co/intranet/auth/tokenInfo" = {timeout = "100ms"}
"http://passport.bilibili.co/intranet/auth/cookieInfo" = {timeout = "100ms"}
"http://open.bilibili.co/api/getsecret" = {timeout = "500ms"}
[tracer]
proto = "udp"
addr = "172.16.33.46:5140"
tag = "platform/antispam-service"
[app]
key = "c1a1cb2d89c33794"
secret = "dda47eeca111e03e6845017505baea13"
[httpClient]
key = "c1a1cb2d89c33794"
secret = "dda47eeca111e03e6845017505baea13"
dial = "500ms"
timeout = "1s"
keepAlive = "60s"
timer = 1000
[httpClient.breaker]
window ="3s"
sleep ="100ms"
bucket = 10
ratio = 0.1
request = 100
switchoff= false
[captcha]
[captcha.app]
key = "c1a1cb2d89c33794"
secret = "dda47eeca111e03e6845017505baea13"
[captcha.memcache]
proto = "tcp"
addr = "172.16.33.54:11211"
active = 1
idle = 1
dialTimeout = "500ms"
readTimeout = "1s"
writeTimeout = "1s"
idleTimeout = "80s"
[captcha.captcha]
bid = "1"
bidKey = "86&JHBJHJHJ)()*(*&(*"
expire = 360
[captcha.httpClient]
dial = "500ms"
timeout = "500ms"
keepAlive = "60s"
timer = 1000
[captcha.httpClient.breaker]
window ="3s"
sleep ="100ms"
bucket = 10
ratio = 0.1
request = 100
switchoff= false
[multiHTTP]
[multiHTTP.inner]
addrs = ["127.0.0.1:7022"]
maxListen = 1000
[rpc]
[[rpc.servers]]
discoveroff=false
proto = "tcp"
addr = ":6279"
weight = 10
[rpc.zookeeper]
root = "/microservice/antispam-service/"
addrs = ["172.18.33.50:2199","172.18.33.51:2199","172.18.33.52:2199"]
timeout = "1s"
[ecode]
#domain = "172.16.33.248:6401"
domain = "uat-api.bilibili.co"
all = "1h"
diff = "5m"
[ecode.clientconfig]
key = "test"
secret = "e6c4c252dc7e3d8a90805eecd7c73396"
dial = "2000ms"
timeout = "2s"
keepAlive = "10s"
timer = 128
[ecode.clientconfig.breaker]
window ="3s"
sleep ="100ms"
bucket = 10
ratio = 0.5
request = 100
[ecode.app]
key = "test"
secret = "e6c4c252dc7e3d8a90805eecd7c73396"
[mysql]
[mysql.antispam]
addr= "172.18.33.49:3311"
dsn = "antispam:F49hoAI5DVVlvFmtfstkOFGqxpPxlYA0@tcp(172.16.33.205:3308)/bilibili_antispam?timeout=5s&readTimeout=5s&writeTimeout=5s&parseTime=true&loc=Local&charset=utf8"
active = 5
idle = 2
idleTimeout ="4h"
queryTimeout = "200ms"
execTimeout = "200ms"
tranTimeout = "200ms"
[mysql.antispam.breaker]
window = "3s"
sleep = "100ms"
bucket = 10
ratio = 0.5
request = 100
[redis]
proto = "unix"
#addr = "172.16.33.54:6379"
addr = "/tmp/uat-antispam-service-redis.sock"
idle = 10
active = 10
dialTimeout = "500ms"
readTimeout = "1s"
writeTimeout = "1s"
idleTimeout = "80s"
indexExpire = "200s"
[snowflake]
workerIDs = [ 0,1 ]
replyId = 1
[snowflake.ZooKeeper]
addrs = ["172.18.33.50:2199","172.18.33.51:2199","172.18.33.52:2199"]
root = "/gosnowflake-servers"
timeout= "30s"

View File

@@ -0,0 +1,226 @@
# This is a TOML document. Boom.
version = "2.1.0"
user = "nobody"
pid = "/tmp/antispam.pid"
dir = "./"
perf = "0.0.0.0:6840"
checkFile = "/data/www/antispam.html"
family = "antispam"
address = "172.16.0.148"
proc = 32
maxDurationSec=7200
maxAllowedCounts=1000
[autoWhite]
KeywordHitCounts = 100
NumOfSenders = 50
Derivation = 0.02
[serviceoption]
maxExportRows=100000
minKeywordLen=5
defaultExpireSec=10
ruleDefaultExpireSec=-1
regexpDefaultExpireSec=-1
defaultChanSize=1000
maxRegexpCountsPerArea=20
maxSenderNum=500
buildTrieIntervalMinute=1
buildTrieMaxRowsPerQuery=4000
refreshTrieIntervalSec=10
refreshRulesIntervalSec=10
refreshRegexpsIntervalSec=10
maxSpawnGoroutines=1000
asyncTaskChanSize=1000
[serviceoption.gcopt]
open=true
intervalSec = 5
maxRowsPerQuery = 10
[log]
dir = "/data/log/antispam-service/"
[identify]
whiteAccessKey = "nKUPGzuhU|vTG58H!HPKLlktz{8}3^"
whiteMid = 88889082
[identify.app]
key = "c1a1cb2d89c33794"
secret = "dda47eeca111e03e6845017505baea13"
[identify.host]
auth = "http://passport.bilibili.com"
secret = "http://open.bilibili.com"
[identify.httpClient]
key = "c1a1cb2d89c33794"
secret = "dda47eeca111e03e6845017505baea13"
dial = "1s"
timeout = "1s"
keepAlive = "60s"
timer = 1000
[identify.httpClient.breaker]
window ="10s"
sleep ="100ms"
bucket = 10
ratio = 0.5
request = 100
[identify.httpClient.url]
"http://passport.bilibili.co/intranet/auth/tokenInfo" = {timeout = "100ms"}
"http://passport.bilibili.co/intranet/auth/cookieInfo" = {timeout = "100ms"}
"http://open.bilibili.co/api/getsecret" = {timeout = "500ms"}
[tracer]
proto = "unixgram"
addr = "/var/run/dapper-collect/dapper-collect.sock"
family = "antispam-service"
[app]
key = "c1a1cb2d89c33794"
secret = "dda47eeca111e03e6845017505baea13"
[httpClient]
key = "c1a1cb2d89c33794"
secret = "dda47eeca111e03e6845017505baea13"
dial = "500ms"
timeout = "1s"
keepAlive = "60s"
timer = 1000
[httpClient.breaker]
window ="3s"
sleep ="100ms"
bucket = 10
ratio = 0.1
request = 100
switchoff= false
[captcha]
[captcha.app]
key = "c1a1cb2d89c33794"
secret = "dda47eeca111e03e6845017505baea13"
[captcha.memcache]
proto = "tcp"
addr = "172.16.33.54:11211"
active = 1
idle = 1
dialTimeout = "500ms"
readTimeout = "1s"
writeTimeout = "1s"
idleTimeout = "80s"
[captcha.captcha]
bid = "1"
bidKey = "86&JHBJHJHJ)()*(*&(*"
expire = 360
[captcha.httpClient]
dial = "500ms"
timeout = "500ms"
keepAlive = "60s"
timer = 1000
[captcha.httpClient.breaker]
window ="3s"
sleep ="100ms"
bucket = 10
ratio = 0.1
request = 100
switchoff= false
[multiHTTP]
[multiHTTP.inner]
addrs = ["127.0.0.1:7022"]
maxListen = 1000
[rpc]
[[rpc.servers]]
discoveroff=false
proto = "tcp"
addr = ":6849"
weight = 10
[rpc.zookeeper]
root = "/microservice/antispam-service/"
addrs = ["172.18.33.172:2181"]
timeout = "1s"
[ecode]
domain = "172.16.33.248:6401"
all = "1h"
diff = "5m"
[ecode.clientconfig]
key = "test"
secret = "e6c4c252dc7e3d8a90805eecd7c73396"
dial = "2000ms"
timeout = "2s"
keepAlive = "10s"
timer = 128
[ecode.clientconfig.breaker]
window ="3s"
sleep ="100ms"
bucket = 10
ratio = 0.5
request = 100
[ecode.app]
key = "test"
secret = "e6c4c252dc7e3d8a90805eecd7c73396"
[mysql]
[mysql.antispam]
#addr= "172.16.0.148:3306"
#dsn = "test:test@tcp(172.16.0.148:3306)/bilibili_filter_job?timeout=5s&readTimeout=5s&writeTimeout=5s&parseTime=true&loc=Local&charset=utf8"
#active = 5
#idle = 2
addr= "172.16.33.205:3308"
#dsn = "test:test@tcp(172.16.33.205:3308)/bilibili_antispam?timeout=5s&readTimeout=5s&writeTimeout=5s&parseTime=true&loc=Local&charset=utf8"
dsn = "antispam:tumRXf1TYwY17QE1CYaXphakD24myKfT@tcp(172.16.33.205:3308)/bilibili_antispam?timeout=5s&readTimeout=5s&writeTimeout=5s&parseTime=true&loc=Local&charset=utf8"
active = 5
idle = 2
idleTimeout ="4h"
queryTimeout = "700ms"
execTimeout = "200ms"
tranTimeout = "200ms"
#addr= "127.0.0.1:3306"
#dsn = "root:123456@tcp(127.0.0.1:3306)/bilibili?timeout=5s&readTimeout=5s&writeTimeout=5s&parseTime=true&loc=Local&charset=utf8"
#active = 5
#idle = 2
[mysql.antispam.breaker]
window = "3s"
sleep = "100ms"
bucket = 10
ratio = 0.5
request = 100
[redis]
proto = "tcp"
addr = "172.16.33.54:6379"
#addr = "127.0.0.1:6379"
idle = 10
active = 10
dialTimeout = "500ms"
readTimeout = "1s"
writeTimeout = "1s"
idleTimeout = "80s"
indexExpire = "200s"
[snowflake]
workerIDs = [ 0,1 ]
[snowflake.ZooKeeper]
addrs = ["172.16.33.54:2181"]
root = "/gosnowflake-servers"
timeout= "30s"

View File

@@ -0,0 +1,51 @@
package main
import (
"flag"
"os"
"os/signal"
"syscall"
"time"
"go-common/app/service/main/antispam/conf"
"go-common/app/service/main/antispam/http"
rpc "go-common/app/service/main/antispam/rpc/server"
"go-common/app/service/main/antispam/service"
ecode "go-common/library/ecode/tip"
"go-common/library/log"
"go-common/library/net/trace"
)
func main() {
flag.Parse()
if err := conf.Init(conf.ConfPath); err != nil {
panic(err)
}
log.Init(conf.Conf.Log)
ecode.Init(conf.Conf.Ecode)
trace.Init(conf.Conf.Tracer)
defer trace.Close()
defer log.Close()
log.Info("antispam start")
svr := service.New(conf.Conf)
rpcSvr := rpc.New(conf.Conf, svr)
http.Init(conf.Conf, svr)
// init signal
ch := make(chan os.Signal, 1)
signal.Notify(ch, syscall.SIGHUP, syscall.SIGQUIT, syscall.SIGTERM, syscall.SIGINT)
for {
si := <-ch
switch si {
case syscall.SIGQUIT, syscall.SIGTERM, syscall.SIGINT:
log.Info("get a signal %s, stop the consume process", si.String())
rpcSvr.Close()
time.Sleep(time.Second * 2)
svr.Close()
return
case syscall.SIGHUP:
default:
return
}
}
}

View File

@@ -0,0 +1,42 @@
package(default_visibility = ["//visibility:public"])
load(
"@io_bazel_rules_go//go:def.bzl",
"go_library",
)
go_library(
name = "go_default_library",
srcs = ["conf.go"],
importpath = "go-common/app/service/main/antispam/conf",
tags = ["automanaged"],
visibility = ["//visibility:public"],
deps = [
"//library/cache/redis:go_default_library",
"//library/conf:go_default_library",
"//library/database/sql:go_default_library",
"//library/ecode/tip:go_default_library",
"//library/log:go_default_library",
"//library/net/http/blademaster:go_default_library",
"//library/net/http/blademaster/middleware/auth:go_default_library",
"//library/net/http/blademaster/middleware/verify:go_default_library",
"//library/net/rpc:go_default_library",
"//library/net/trace:go_default_library",
"//library/time:go_default_library",
"//vendor/github.com/BurntSushi/toml:go_default_library",
],
)
filegroup(
name = "package-srcs",
srcs = glob(["**"]),
tags = ["automanaged"],
visibility = ["//visibility:private"],
)
filegroup(
name = "all-srcs",
srcs = [":package-srcs"],
tags = ["automanaged"],
visibility = ["//visibility:public"],
)

View File

@@ -0,0 +1,138 @@
package conf
import (
"errors"
"flag"
"go-common/library/cache/redis"
"go-common/library/conf"
"go-common/library/database/sql"
ecode "go-common/library/ecode/tip"
"go-common/library/log"
bm "go-common/library/net/http/blademaster"
"go-common/library/net/http/blademaster/middleware/auth"
"go-common/library/net/http/blademaster/middleware/verify"
"go-common/library/net/rpc"
"go-common/library/net/trace"
"go-common/library/time"
"github.com/BurntSushi/toml"
)
const (
configKey = "antispam-service.toml"
)
var (
// Conf .
Conf *Config
// ConfPath .
ConfPath string
)
// Config .
type Config struct {
RPC *rpc.ServerConfig
App *bm.App
BM *bm.ServerConfig
HTTPClient *bm.ClientConfig
MySQL *MySQL
Redis *Redis
Tracer *trace.Config
Log *log.Config
Verify *verify.Config
Auth *auth.Config
Ecode *ecode.Config
AppkeyType map[string][]int8
ReplyURL string
ServiceOption *ServiceOption
MaxSpawnGoroutines int
MaxAllowedCounts int64
MaxDurationSec int64
AutoWhite *AutoWhite
}
// AutoWhite .
type AutoWhite struct {
KeywordHitCounts int64
NumOfSenders int64
Derivation float64
}
// ServiceOption .
type ServiceOption struct {
GcOpt *GcOpt
BuildTrieIntervalMinute int64
BuildTrieMaxRowsPerQuery int64
AsyncTaskChanSize int64
RefreshTrieIntervalSec int64
RefreshRulesIntervalSec int64
RefreshRegexpsIntervalSec int64
MinKeywordLen int
MaxSenderNum int64
DefaultExpireSec int64
DefaultChanSize int64
MaxExportRows int64
MaxRegexpCountsPerArea int64
MaxSpawnGoroutines int64
RuleDefaultExpireSec int64
RegexpDefaultExpireSec int64
}
// GcOpt .
type GcOpt struct {
Open bool
IntervalSec int
MaxRowsPerQuery int64
}
// MySQL .
type MySQL struct {
AntiSpam *sql.Config
}
// Redis .
type Redis struct {
*redis.Config
IndexExpire time.Duration
}
func init() {
flag.StringVar(&ConfPath, "conf", "", "config path")
}
// Init .
func Init(path string) error {
if len(Areas) == 0 {
panic(errors.New("areas must be set"))
}
if path == "" {
return configCenter()
}
_, err := toml.DecodeFile(path, &Conf)
return err
}
func configCenter() error {
client, err := conf.New()
if err != nil {
return err
}
value, ok := client.Value(configKey)
if !ok {
return errors.New("empty value")
}
_, err = toml.Decode(value, &Conf)
return err
}
// Areas .
var Areas = map[string]int{
"reply": 1,
"im": 2,
"live_dm": 3,
"danmu": 4,
}

View File

@@ -0,0 +1,69 @@
package(default_visibility = ["//visibility:public"])
load(
"@io_bazel_rules_go//go:def.bzl",
"go_test",
"go_library",
)
go_test(
name = "go_default_test",
srcs = [
"cond_test.go",
"dao_test.go",
"keyword_test.go",
"mysql_test.go",
"redis_test.go",
"regexp_test.go",
"rule_test.go",
"transaction_test.go",
],
embed = [":go_default_library"],
rundir = ".",
tags = ["automanaged"],
deps = [
"//app/service/main/antispam/conf:go_default_library",
"//app/service/main/antispam/model:go_default_library",
"//app/service/main/antispam/util:go_default_library",
"//vendor/github.com/smartystreets/goconvey/convey:go_default_library",
],
)
go_library(
name = "go_default_library",
srcs = [
"cond.go",
"dao.go",
"keyword.go",
"mysql.go",
"redis.go",
"regexp.go",
"rule.go",
"transaction.go",
],
importpath = "go-common/app/service/main/antispam/dao",
tags = ["automanaged"],
visibility = ["//visibility:public"],
deps = [
"//app/service/main/antispam/conf:go_default_library",
"//app/service/main/antispam/model:go_default_library",
"//app/service/main/antispam/util:go_default_library",
"//library/cache/redis:go_default_library",
"//library/database/sql:go_default_library",
"//library/log:go_default_library",
],
)
filegroup(
name = "package-srcs",
srcs = glob(["**"]),
tags = ["automanaged"],
visibility = ["//visibility:private"],
)
filegroup(
name = "all-srcs",
srcs = [":package-srcs"],
tags = ["automanaged"],
visibility = ["//visibility:public"],
)

View File

@@ -0,0 +1,24 @@
package dao
import "go-common/app/service/main/antispam/util"
// Condition .
type Condition struct {
*util.Pagination
Offset string
Limit string
Tags []string
Contents []string
Area string
Search string
State string
HitCounts string
Order, OrderBy string
LimitType, LimitScope string
StartTime, EndTime string
LastModifiedTime string
}

View File

@@ -0,0 +1 @@
package dao

View File

@@ -0,0 +1,128 @@
package dao
import (
"context"
"database/sql"
"time"
"go-common/app/service/main/antispam/conf"
"go-common/library/cache/redis"
xsql "go-common/library/database/sql"
)
// Executer .
type Executer interface {
Exec(ctx context.Context, SQL string, args ...interface{}) (sql.Result, error)
}
// Querier .
type Querier interface {
QueryRow(ctx context.Context, SQL string, args ...interface{}) *xsql.Row
Query(ctx context.Context, SQL string, args ...interface{}) (*xsql.Rows, error)
}
// KeywordDao .
type KeywordDao interface {
GetByID(context.Context, int64) (*Keyword, error)
GetByIDs(context.Context, []int64) ([]*Keyword, error)
GetByCond(context.Context, *Condition) ([]*Keyword, int64, error)
GetByOffsetLimit(context.Context, *Condition) ([]*Keyword, error)
GetByAreaAndContents(context.Context, *Condition) ([]*Keyword, error)
GetByAreaAndContent(context.Context, *Condition) (*Keyword, error)
GetRubbish(context.Context, *Condition) ([]*Keyword, error)
Insert(context.Context, *Keyword) (*Keyword, error)
Update(context.Context, *Keyword) (*Keyword, error)
DeleteByIDs(context.Context, []int64) ([]*Keyword, error)
}
// RuleDao .
type RuleDao interface {
GetByID(context.Context, int64) (*Rule, error)
GetByIDs(context.Context, []int64) ([]*Rule, error)
GetByCond(context.Context, *Condition) ([]*Rule, int64, error)
GetByArea(context.Context, *Condition) ([]*Rule, error)
GetByAreaAndTypeAndScope(context.Context, *Condition) (*Rule, error)
GetByAreaAndLimitType(context.Context, *Condition) ([]*Rule, error)
Insert(context.Context, *Rule) (*Rule, error)
Update(context.Context, *Rule) (*Rule, error)
}
// RegexpDao .
type RegexpDao interface {
GetByID(context.Context, int64) (*Regexp, error)
GetByIDs(context.Context, []int64) ([]*Regexp, error)
GetByCond(context.Context, *Condition) ([]*Regexp, int64, error)
GetByAreaAndContent(context.Context, *Condition) (*Regexp, error)
GetByContents(context.Context, []string) ([]*Regexp, error)
Insert(context.Context, *Regexp) (*Regexp, error)
Update(context.Context, *Regexp) (*Regexp, error)
}
// Tx .
type Tx interface {
Executer
RegexpTx
KeywordTx
RuleTx
Commit() error
Rollback() error
}
// KeywordTx .
type KeywordTx interface {
InsertKeyword(*Keyword) error
UpdateKeyword(*Keyword) error
}
// RegexpTx .
type RegexpTx interface {
InsertRegexp(*Regexp) error
UpdateRegexp(*Regexp) error
}
// RuleTx .
type RuleTx interface {
InsertRule(*Rule) error
UpdateRule(*Rule) error
}
// Dao .
type Dao struct {
// db *xsql.DB
redis *redis.Pool
redisExpire int
}
// New a dao and return.
func New(c *conf.Config) (d *Dao) {
d = &Dao{
// db
// db: xsql.NewMySQL(c.MySQL.AntiSpam),
// redis
redis: redis.NewPool(c.Redis.Config),
redisExpire: int(time.Duration(c.Redis.IndexExpire) / time.Second),
}
return
}
// Ping check connection used in dao
func (d *Dao) Ping(c context.Context) (err error) {
if err = d.pingRedis(c); err != nil {
return
}
err = PingMySQL(c)
return
}
// Close close all connection.
func (d *Dao) Close() {
if d.redis != nil {
d.redis.Close()
}
Close()
}

View File

@@ -0,0 +1,40 @@
package dao
import (
"flag"
"go-common/app/service/main/antispam/conf"
"os"
"testing"
)
var (
d *Dao
kwi *KeywordDaoImpl
regdi *RegexpDaoImpl
rdi *RuleDaoImpl
)
func TestMain(m *testing.M) {
if os.Getenv("DEPLOY_ENV") != "" {
flag.Set("app_id", "main.community.antispam-service")
flag.Set("conf_token", "e0de72afaf4946ca836e9b7b459b833b")
flag.Set("tree_id", "11041")
flag.Set("conf_version", "docker-1")
flag.Set("deploy_env", "uat")
flag.Set("conf_host", "config.bilibili.co")
flag.Set("conf_path", "/tmp")
flag.Set("region", "sh")
flag.Set("zone", "sh001")
}
flag.Parse()
if err := conf.Init(""); err != nil {
panic(err)
}
Init(conf.Conf)
d = New(conf.Conf)
kwi = NewKeywordDao()
regdi = NewRegexpDao()
rdi = NewRuleDao()
m.Run()
os.Exit(0)
}

View File

@@ -0,0 +1,46 @@
CREATE TABLE keywords(
id INT(11) UNSIGNED PRIMARY KEY AUTO_INCREMENT COMMENT '主键id',
area INT(11) NOT NULL DEFAULT 0 COMMENT '业务类型',
sender_id INT(11) NOT NULL DEFAULT 0 COMMENT '发送者的id',
content VARCHAR(40) NOT NULL COMMENT '关键字内容',
regexp_name VARCHAR(40) NOT NULL COMMENT '该关键字命中正则名称',
regexp_content VARCHAR(500) NOT NULL COMMENT '正则内容',
tag tinyint(4) NOT NULL DEFAULT 0 COMMENT '0:limit, 1:restrict, 2: whitelist, 3: blacklist',
hit_counts INT(11) NOT NULL DEFAULT 0 COMMENT '命中关键字次数',
state tinyint(4) NOT NULL DEFAULT 0 COMMENT '0:default, 1:deleted',
origin_content VARCHAR(1500) NOT NULL COMMENT '过滤前的内容',
`ctime` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '创建时间',
`mtime` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP COMMENT '修改时间',
UNIQUE KEY `uk_area_content` (`area`, `content`),
KEY `ix_mtime` (`mtime`),
KEY `ix_area_state_ctime` (`area`,`state`, `ctime`)
)ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8 COMMENT='过滤限制关键字表';
CREATE TABLE regexps(
id INT(11) UNSIGNED PRIMARY KEY AUTO_INCREMENT COMMENT '主键id',
name VARCHAR(20) NOT NULL COMMENT 'name',
area INT(11) NOT NULL DEFAULT 0 COMMENT '业务类型 1: reply, 2: imessage',
operation tinyint(4) NOT NULL DEFAULT 0 COMMENT '0: limit, 1: put into whitelist, 2: restrict limit, 3: ignore',
content VARCHAR(200) NOT NULL comment '正则内容',
state tinyint(4) NOT NULL DEFAULT 0 COMMENT '0:default, 1:deleted',
`ctime` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '创建时间',
`mtime` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP COMMENT '修改时间',
KEY `ix_mtime` (`mtime`),
UNIQUE KEY `uk_area_content` (`area`, `content`)
)ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8 COMMENT='过滤限制正则表';
CREATE TABLE rate_limit_rules(
id INT(11) UNSIGNED PRIMARY KEY AUTO_INCREMENT COMMENT '主键id',
area INT(11) NOT NULL DEFAULT 0 COMMENT '业务类型',
limit_type tinyint(4) NOT NULL DEFAULT 0 COMMENT '0: default, 1: strict',
limit_scope tinyint(4) NOT NULL DEFAULT 0 COMMENT '0: local, 1: global',
dur_sec int(11) NOT NULL DEFAULT 0 COMMENT '持续时间',
allowed_counts int(11) NOT NULL DEFAULT 0 COMMENT '允许发送次数',
state tinyint(4) NOT NULL DEFAULT 0 COMMENT '0:default, 1:deleted',
`ctime` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '创建时间',
`mtime` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP COMMENT '修改时间',
KEY `ix_mtime` (`mtime`)
)ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8 COMMENT='频率规则表';
CREATE UNIQUE INDEX uk_area_limit_type_limit_scope ON rate_limit_rules (area, limit_type, limit_scope);

View File

@@ -0,0 +1,349 @@
package dao
import (
"context"
"fmt"
"strings"
"time"
"go-common/app/service/main/antispam/util"
"go-common/library/database/sql"
"go-common/library/log"
)
const (
columnKeywords = "id, area, content, regexp_name, tag, hit_counts, state, origin_content, ctime, mtime"
selectKeywordCountsSQL = `SELECT COUNT(1) FROM keywords %s`
selectKeywordsByCondSQL = `SELECT ` + columnKeywords + ` FROM keywords %s`
selectKeywordByIDsSQL = `SELECT ` + columnKeywords + ` FROM keywords WHERE id IN (%s)`
selectKeywordNeedRecycledSQL = `SELECT ` + columnKeywords + ` FROM keywords FORCE INDEX(ix_ctime) WHERE state = %s AND hit_counts < %s AND tag IN(%s) AND ctime BETWEEN '%s' AND '%s' LIMIT %d`
selectKeywordByOffsetLimitSQL = `SELECT ` + columnKeywords + ` FROM keywords WHERE area = %s AND id > %s AND tag IN(%s) AND state = 0 LIMIT %s`
selectKeywordByAreaAndContentsSQL = `SELECT ` + columnKeywords + ` FROM keywords WHERE area = %s AND content IN(%s)`
insertKeywordSQL = `INSERT INTO keywords(area, content, regexp_name, tag, hit_counts, origin_content) VALUES(?, ?, ?, ?, ?, ?)`
updateKeywordSQL = `UPDATE keywords SET content = ?, regexp_name = ?, tag = ?, hit_counts = ?, state = ?, origin_content = ?, ctime = ?, mtime = ? WHERE id = ?`
deleteKeywordByIDsSQL = `UPDATE keywords SET state = 1, hit_counts = 0, mtime = ? WHERE id IN (%s)`
)
const (
// KeywordTagDefaultLimit .
KeywordTagDefaultLimit int = iota
// KeywordTagRestrictLimit .
KeywordTagRestrictLimit
// KeywordTagWhite .
KeywordTagWhite
// KeywordTagBlack .
KeywordTagBlack
)
// KeywordDaoImpl .
type KeywordDaoImpl struct{}
// Keyword .
type Keyword struct {
ID int64 `db:"id"`
Area int `db:"area"`
Tag int `db:"tag"`
State int `db:"state"`
HitCounts int64 `db:"hit_counts"`
RegexpName string `db:"regexp_name"`
Content string `db:"content"`
OriginContent string `db:"origin_content"`
CTime time.Time `db:"ctime"`
MTime time.Time `db:"mtime"`
}
// NewKeywordDao .
func NewKeywordDao() *KeywordDaoImpl {
return &KeywordDaoImpl{}
}
// GetRubbish .
func (*KeywordDaoImpl) GetRubbish(ctx context.Context, cond *Condition) (keywords []*Keyword, err error) {
querySQL := fmt.Sprintf(selectKeywordNeedRecycledSQL,
cond.State,
cond.HitCounts,
util.StrSliToSQLVarchars(cond.Tags),
cond.StartTime,
cond.EndTime,
cond.PerPage,
)
log.Info("get rubbish keywords rawSQL: %s", querySQL)
ks, err := queryKeywords(ctx, db, querySQL)
if err != nil {
return nil, err
}
return ks, nil
}
// GetByOffsetLimit .
func (*KeywordDaoImpl) GetByOffsetLimit(ctx context.Context, cond *Condition) (keywords []*Keyword, err error) {
return queryKeywords(ctx, db, fmt.Sprintf(selectKeywordByOffsetLimitSQL, cond.Area,
cond.Offset, util.StrSliToSQLVarchars(cond.Tags), cond.Limit))
}
// GetByCond .
func (*KeywordDaoImpl) GetByCond(ctx context.Context, cond *Condition) (keywords []*Keyword, totalCounts int64, err error) {
sqlConds := make([]string, 0)
if cond.Search != "" {
sqlConds = append(sqlConds, fmt.Sprintf("content LIKE '%%%s%%'", cond.Search))
}
if len(cond.Contents) > 0 {
sqlConds = append(sqlConds, fmt.Sprintf("content IN (%s)", util.StrSliToSQLVarchars(cond.Tags)))
}
if cond.LastModifiedTime != "" {
sqlConds = append(sqlConds, fmt.Sprintf("mtime >= '%s'", cond.LastModifiedTime))
cond.OrderBy = ""
}
if cond.StartTime != "" || cond.EndTime != "" {
if cond.StartTime != "" && cond.EndTime != "" {
sqlConds = append(sqlConds, fmt.Sprintf("ctime BETWEEN '%s' AND '%s'", cond.StartTime, cond.EndTime))
} else if cond.StartTime != "" {
sqlConds = append(sqlConds, fmt.Sprintf("ctime >= '%s'", cond.StartTime))
} else {
sqlConds = append(sqlConds, fmt.Sprintf("ctime <= '%s'", cond.EndTime))
}
}
if cond.State != "" {
sqlConds = append(sqlConds, fmt.Sprintf("state = %s", cond.State))
}
if cond.Area != "" {
sqlConds = append(sqlConds, fmt.Sprintf("area = %s", cond.Area))
}
if len(cond.Tags) > 0 {
sqlConds = append(sqlConds, fmt.Sprintf("tag IN(%s)", util.StrSliToSQLVarchars(cond.Tags)))
}
var optionSQL string
if len(sqlConds) > 0 {
optionSQL = fmt.Sprintf("WHERE %s", strings.Join(sqlConds, " AND "))
}
var limitSQL string
if cond.Pagination != nil {
queryCountsSQL := fmt.Sprintf(selectKeywordCountsSQL, optionSQL)
log.Info("queryCounts sql: %s", queryCountsSQL)
totalCounts, err = GetTotalCounts(ctx, db, queryCountsSQL)
if err != nil {
return nil, 0, err
}
offset, limit := cond.OffsetLimit(totalCounts)
if limit == 0 {
return nil, 0, ErrResourceNotExist
}
limitSQL = fmt.Sprintf("LIMIT %d, %d", offset, limit)
}
if cond.OrderBy != "" {
optionSQL = fmt.Sprintf("%s ORDER BY %s %s", optionSQL, cond.OrderBy, cond.Order)
}
if limitSQL != "" {
optionSQL = fmt.Sprintf("%s %s", optionSQL, limitSQL)
}
querySQL := fmt.Sprintf(selectKeywordsByCondSQL, optionSQL)
log.Info("OptionSQL(%s), GetByCondSQL(%s)", optionSQL, querySQL)
keywords, err = queryKeywords(ctx, db, querySQL)
if err != nil {
return nil, 0, err
}
if totalCounts == 0 {
totalCounts = int64(len(keywords))
}
return keywords, totalCounts, nil
}
// GetByAreaAndContents .
func (*KeywordDaoImpl) GetByAreaAndContents(ctx context.Context,
cond *Condition) ([]*Keyword, error) {
querySQL := fmt.Sprintf(selectKeywordByAreaAndContentsSQL,
cond.Area, util.StrSliToSQLVarchars(cond.Contents))
ks, err := queryKeywords(ctx, db, querySQL)
if err != nil {
return nil, err
}
res := make([]*Keyword, len(cond.Contents))
for i, c := range cond.Contents {
for _, k := range ks {
if strings.EqualFold(k.Content, c) {
res[i] = k
}
}
}
return res, nil
}
// GetByAreaAndContent .
func (kdi *KeywordDaoImpl) GetByAreaAndContent(ctx context.Context,
cond *Condition) (*Keyword, error) {
ks, err := kdi.GetByAreaAndContents(ctx, cond)
if err != nil {
return nil, err
}
if ks[0] == nil {
return nil, ErrResourceNotExist
}
return ks[0], nil
}
// Update .
func (kdi *KeywordDaoImpl) Update(ctx context.Context,
k *Keyword) (*Keyword, error) {
if err := updateKeyword(ctx, db, k); err != nil {
return nil, err
}
return kdi.GetByID(ctx, k.ID)
}
// Insert .
func (kdi *KeywordDaoImpl) Insert(ctx context.Context, k *Keyword) (*Keyword, error) {
if err := insertKeyword(ctx, db, k); err != nil {
return nil, err
}
return kdi.GetByID(ctx, k.ID)
}
// DeleteByIDs .
func (kdi *KeywordDaoImpl) DeleteByIDs(ctx context.Context, ids []int64) ([]*Keyword, error) {
if err := deleteKeywordByIDs(ctx, db, ids); err != nil {
return nil, err
}
return kdi.GetByIDs(ctx, ids)
}
// GetByID .
func (kdi *KeywordDaoImpl) GetByID(ctx context.Context, id int64) (*Keyword, error) {
ks, err := kdi.GetByIDs(ctx, []int64{id})
if err != nil {
return nil, err
}
if ks[0] == nil {
return nil, ErrResourceNotExist
}
return ks[0], nil
}
// GetByIDs .
func (*KeywordDaoImpl) GetByIDs(ctx context.Context, ids []int64) ([]*Keyword, error) {
ks, err := queryKeywords(ctx, db,
fmt.Sprintf(selectKeywordByIDsSQL, util.IntSliToSQLVarchars(ids)))
if err != nil {
return nil, err
}
res := make([]*Keyword, len(ids))
for i, id := range ids {
for _, k := range ks {
if k.ID == id {
res[i] = k
}
}
}
return res, nil
}
func insertKeyword(ctx context.Context, executer Executer, k *Keyword) error {
defaultHitCount := 1
res, err := executer.Exec(ctx,
insertKeywordSQL,
k.Area,
k.Content,
k.RegexpName,
k.Tag,
defaultHitCount,
k.OriginContent,
)
if err != nil {
log.Error("%v", err)
return err
}
lastID, err := res.LastInsertId()
if err != nil {
log.Error("%v", err)
return err
}
k.ID = lastID
return nil
}
func updateKeyword(ctx context.Context, executer Executer, k *Keyword) error {
_, err := executer.Exec(ctx,
updateKeywordSQL,
k.Content,
k.RegexpName,
k.Tag,
k.HitCounts,
k.State,
k.OriginContent,
k.CTime,
time.Now(),
k.ID,
)
if err != nil {
log.Error("%v", err)
return err
}
return nil
}
func deleteKeywordByIDs(ctx context.Context, executer Executer, ids []int64) error {
rawSQL := fmt.Sprintf(deleteKeywordByIDsSQL, util.IntSliToSQLVarchars(ids))
if _, err := executer.Exec(ctx, rawSQL, time.Now()); err != nil {
log.Error("Error: %v, RawSQL: %s", err, rawSQL)
return err
}
return nil
}
func queryKeywords(ctx context.Context, q Querier, rawSQL string) ([]*Keyword, error) {
// NOTICE: this MotherFucker Query() will never return `ErrNoRows` when there is no rows found !
rows, err := q.Query(ctx, rawSQL)
if err == sql.ErrNoRows {
return nil, ErrResourceNotExist
} else if err != nil {
log.Error("ctx: %+v, Error: %v, RawSQL: %s", ctx, err, rawSQL)
return nil, err
}
defer rows.Close()
log.Info("Query sql: %q", rawSQL)
ks, err := mapRowToKeywords(rows)
if err != nil {
return nil, err
}
if len(ks) == 0 {
return nil, ErrResourceNotExist
}
return ks, nil
}
func mapRowToKeywords(rows *sql.Rows) (ks []*Keyword, err error) {
for rows.Next() {
k := Keyword{}
err = rows.Scan(
&k.ID,
&k.Area,
&k.Content,
&k.RegexpName,
&k.Tag,
&k.HitCounts,
&k.State,
&k.OriginContent,
&k.CTime,
&k.MTime,
)
if err != nil {
log.Error("%v", err)
return nil, err
}
ks = append(ks, &k)
}
if err = rows.Err(); err != nil {
log.Error("%v", err)
return nil, err
}
return ks, nil
}

View File

@@ -0,0 +1,151 @@
package dao
import (
"context"
"fmt"
"go-common/app/service/main/antispam/util"
"math/rand"
"testing"
"github.com/smartystreets/goconvey/convey"
)
func testKeywordDaoImplGetRubbish(t *testing.T) {
var (
c = context.TODO()
cond = &Condition{Pagination: &util.Pagination{CurPage: 1, PerPage: 10}, Tags: []string{"reply"}, Area: "reply", Offset: "1", State: "0", HitCounts: "0", StartTime: "2018-8-1 16:36:48", EndTime: "2018-8-21 16:36:48"}
)
convey.Convey("GetRubbish", t, func(ctx convey.C) {
_, err := kwi.GetRubbish(c, cond)
ctx.Convey("Then err should be nil.keywords should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
})
})
}
func testKeywordDaoImplGetByOffsetLimit(t *testing.T) {
var (
c = context.TODO()
cond = &Condition{Pagination: &util.Pagination{CurPage: 1, PerPage: 10}, Tags: []string{"reply"}, Area: "reply", Offset: "1", State: "0", HitCounts: "0", StartTime: "2018-8-1 16:36:48", EndTime: "2018-8-21 16:36:48"}
)
convey.Convey("GetByOffsetLimit", t, func(ctx convey.C) {
keywords, err := kwi.GetByOffsetLimit(c, cond)
ctx.Convey("Then err should be nil.keywords should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
ctx.So(keywords, convey.ShouldNotBeNil)
})
})
}
func testKeywordDaoImplGetByCond(t *testing.T) {
var (
c = context.TODO()
cond = &Condition{Pagination: &util.Pagination{CurPage: 1, PerPage: 10}, Tags: []string{"reply"}, Area: "reply", Offset: "1", State: "0", HitCounts: "0", StartTime: "2018-8-1 16:36:48", EndTime: "2018-8-21 16:36:48"}
)
convey.Convey("GetByCond", t, func(ctx convey.C) {
keywords, totalCounts, err := kwi.GetByCond(c, cond)
ctx.Convey("Then err should be nil.keywords,totalCounts should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
ctx.So(totalCounts, convey.ShouldNotBeNil)
ctx.So(keywords, convey.ShouldNotBeNil)
})
})
}
func testKeywordDaoImplGetByAreaAndContents(t *testing.T) {
var (
c = context.TODO()
cond = &Condition{Pagination: &util.Pagination{CurPage: 1, PerPage: 10}, Tags: []string{"reply"}, Area: "reply", Offset: "1", State: "0", HitCounts: "0", StartTime: "2018-8-1 16:36:48", EndTime: "2018-8-21 16:36:48"}
)
convey.Convey("GetByAreaAndContents", t, func(ctx convey.C) {
p1, err := kwi.GetByAreaAndContents(c, cond)
ctx.Convey("Then err should be nil.p1 should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
ctx.So(p1, convey.ShouldNotBeNil)
})
})
}
func testKeywordDaoImplGetByAreaAndContent(t *testing.T) {
var (
c = context.TODO()
cond = &Condition{Pagination: &util.Pagination{CurPage: 1, PerPage: 10}, Tags: []string{"reply"}, Area: "reply", Offset: "1", State: "0", HitCounts: "0", StartTime: "2018-8-1 16:36:48", EndTime: "2018-8-21 16:36:48"}
)
convey.Convey("GetByAreaAndContent", t, func(ctx convey.C) {
p1, err := kwi.GetByAreaAndContent(c, cond)
ctx.Convey("Then err should be nil.p1 should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
ctx.So(p1, convey.ShouldNotBeNil)
})
})
}
func TestKeywordDaoImplUpdate(t *testing.T) {
var (
c = context.TODO()
k = &Keyword{ID: 1, Content: fmt.Sprint(rand.Int63())}
)
convey.Convey("Update", t, func(ctx convey.C) {
p1, err := kwi.Update(c, k)
ctx.Convey("Then err should be nil.p1 should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
ctx.So(p1, convey.ShouldNotBeNil)
})
})
}
func TestKeywordDaoImplInsert(t *testing.T) {
var (
c = context.TODO()
k = &Keyword{Content: fmt.Sprint(rand.Int63())}
)
convey.Convey("Insert", t, func(ctx convey.C) {
p1, err := kwi.Insert(c, k)
ctx.Convey("Then err should be nil.p1 should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
ctx.So(p1, convey.ShouldNotBeNil)
})
})
}
func TestKeywordDaoImplDeleteByIDs(t *testing.T) {
var (
c = context.TODO()
ids = []int64{1}
)
convey.Convey("DeleteByIDs", t, func(ctx convey.C) {
p1, err := kwi.DeleteByIDs(c, ids)
ctx.Convey("Then err should be nil.p1 should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
ctx.So(p1, convey.ShouldNotBeNil)
})
})
}
func TestKeywordDaoImplGetByID(t *testing.T) {
var (
c = context.TODO()
id = int64(1)
)
convey.Convey("GetByID", t, func(ctx convey.C) {
p1, err := kwi.GetByID(c, id)
ctx.Convey("Then err should be nil.p1 should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
ctx.So(p1, convey.ShouldNotBeNil)
})
})
}
func TestKeywordDaoImplGetByIDs(t *testing.T) {
var (
c = context.TODO()
ids = []int64{1, 2, 3}
)
convey.Convey("GetByIDs", t, func(ctx convey.C) {
p1, err := kwi.GetByIDs(c, ids)
ctx.Convey("Then err should be nil.p1 should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
ctx.So(p1, convey.ShouldNotBeNil)
})
})
}

View File

@@ -0,0 +1,77 @@
package dao
import (
"context"
"errors"
"go-common/app/service/main/antispam/conf"
"go-common/library/database/sql"
"go-common/library/log"
)
const (
// AreaReply .
AreaReply int = iota + 1
// AreaIMessage .
AreaIMessage
// AreaLiveDM .
AreaLiveDM
// AreaMainSiteDM .
AreaMainSiteDM
)
const (
// StateDefault .
StateDefault int = iota
// StateDeleted .
StateDeleted
)
var (
// ErrPingDao .
ErrPingDao = errors.New("Ping dao error")
// ErrResourceNotExist .
ErrResourceNotExist = errors.New("Resource Not Exist")
// ErrParams .
ErrParams = errors.New("wrong params")
)
// GetTotalCounts .
func GetTotalCounts(ctx context.Context, q Querier, selectCountsSQL string) (int64, error) {
var totalCounts int64
if err := q.QueryRow(ctx, selectCountsSQL).Scan(&totalCounts); err != nil {
log.Error("Error: %v, sql: %s", err, selectCountsSQL)
return 0, err
}
log.Info("GetTotalCounts query sql: %s", selectCountsSQL)
return totalCounts, nil
}
// PingMySQL .
func PingMySQL(ctx context.Context) error {
if db != nil {
if err := db.Ping(ctx); err != nil {
log.Error("%v", err)
return err
}
}
return nil
}
// Close .
func Close() {
if db != nil {
db.Close()
}
}
// Init .
func Init(conf *conf.Config) (ok bool) {
if db == nil {
db = sql.NewMySQL(conf.MySQL.AntiSpam)
}
return db != nil
}
var db *sql.DB

View File

@@ -0,0 +1,28 @@
package dao
import (
"context"
"testing"
"github.com/smartystreets/goconvey/convey"
)
func TestDaoPingMySQL(t *testing.T) {
var (
c = context.TODO()
)
convey.Convey("PingMySQL", t, func(ctx convey.C) {
err := PingMySQL(c)
ctx.Convey("Then err should be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
})
})
}
func TestDaoClose(t *testing.T) {
convey.Convey("Close", t, func(ctx convey.C) {
Close()
ctx.Convey("No return values", func(ctx convey.C) {
})
})
}

View File

@@ -0,0 +1,361 @@
package dao
import (
"context"
"fmt"
"go-common/app/service/main/antispam/model"
"go-common/library/cache/redis"
"go-common/library/log"
)
const (
_regexpsKey = "regexps"
_localCountsKey = "resource_id:%d:keyword_id:%d:local_limit_counts"
_totalCountsKey = "keyword_id:%d:total_counts"
_globalCountsKey = "keyword_id:%d:global_limit_counts"
_keywordsSenderIDsKey = "keyword_id:%d:sender_ids"
_rulesKey = "rule:area:%s:limit_type:%s"
_areaSendersKey = "area:%s:sender_id:%d"
)
func sendersKey(keywordID int64) string {
return fmt.Sprintf(_keywordsSenderIDsKey, keywordID)
}
func areaSendersKey(area string, senderID int64) string {
return fmt.Sprintf(_areaSendersKey, area, senderID)
}
func totalCountsKey(keywordID int64) string {
return fmt.Sprintf(_totalCountsKey, keywordID)
}
func localCountsKey(keywordID, oid int64) string {
return fmt.Sprintf(_localCountsKey, oid, keywordID)
}
func globalCountsKey(keywordID int64) string {
return fmt.Sprintf(_globalCountsKey, keywordID)
}
func rulesKey(area, limitType string) string {
return fmt.Sprintf(_rulesKey, area, limitType)
}
// pingRedis check redis connection
func (d *Dao) pingRedis(c context.Context) (err error) {
conn := d.redis.Get(c)
_, err = conn.Do("SET", "PING", "PONG")
conn.Close()
return
}
// CntSendersCache .
func (d *Dao) CntSendersCache(c context.Context, keywordID int64) (cnt int64, err error) {
var (
key = sendersKey(keywordID)
conn = d.redis.Get(c)
)
defer conn.Close()
if cnt, err = redis.Int64(conn.Do("ZCARD", key)); err != nil {
log.Error("redis.Int64(conn.Do(ZCARD, %s)) error(%v)", key, err)
}
return
}
// GlobalLocalLimitCache .
func (d *Dao) GlobalLocalLimitCache(c context.Context, keywordID, oid int64) ([]int64, error) {
var (
globalKey = globalCountsKey(keywordID)
localKey = localCountsKey(keywordID, oid)
conn = d.redis.Get(c)
)
defer conn.Close()
if err := conn.Send("GET", globalKey); err != nil {
log.Error("%v", err)
return nil, err
}
if err := conn.Send("GET", localKey); err != nil {
log.Error("%v", err)
return nil, err
}
if err := conn.Flush(); err != nil {
log.Error("%v", err)
return nil, err
}
counts := make([]int64, 0)
for i := 0; i < 2; i++ {
count, err := redis.Int64(conn.Receive())
if err == nil || err == redis.ErrNil {
counts = append(counts, count)
continue
}
log.Error("%v", err)
return nil, err
}
return counts, nil
}
// IncrGlobalLimitCache .
func (d *Dao) IncrGlobalLimitCache(c context.Context, keywordID int64) (int64, error) {
var (
key = globalCountsKey(keywordID)
conn = d.redis.Get(c)
)
defer conn.Close()
count, err := redis.Int64(conn.Do("INCR", key))
if err != nil {
log.Error("%v", err)
return 0, err
}
return count, nil
}
// IncrLocalLimitCache .
func (d *Dao) IncrLocalLimitCache(c context.Context, keywordID, oid int64) (int64, error) {
var (
key = localCountsKey(keywordID, oid)
conn = d.redis.Get(c)
)
defer conn.Close()
count, err := redis.Int64(conn.Do("INCR", key))
if err != nil {
log.Error("%v", err)
return 0, err
}
return count, nil
}
// LocalLimitExpire .
func (d *Dao) LocalLimitExpire(c context.Context, keywordID, oid, dur int64) error {
var (
key = localCountsKey(keywordID, oid)
conn = d.redis.Get(c)
)
defer conn.Close()
if _, err := conn.Do("EXPIRE", key, dur); err != nil {
log.Error("%v", err)
return err
}
return nil
}
// GlobalLimitExpire .
func (d *Dao) GlobalLimitExpire(c context.Context, keywordID, dur int64) error {
var (
key = globalCountsKey(keywordID)
conn = d.redis.Get(c)
)
defer conn.Close()
if _, err := conn.Do("EXPIRE", key, dur); err != nil {
log.Error("%v", err)
return err
}
return nil
}
// DelRegexpCache .
func (d *Dao) DelRegexpCache(c context.Context) error {
conn := d.redis.Get(c)
defer conn.Close()
if _, err := conn.Do("DEL", _regexpsKey); err != nil {
log.Error("%v", err)
return err
}
return nil
}
// DelRulesCache .
func (d *Dao) DelRulesCache(c context.Context, area, limitType string) error {
var (
key = rulesKey(area, limitType)
conn = d.redis.Get(c)
)
defer conn.Close()
if _, err := conn.Do("DEL", key); err != nil {
log.Error("%v", err)
return err
}
return nil
}
// AreaSendersExpire .
func (d *Dao) AreaSendersExpire(c context.Context, area string, senderID, dur int64) error {
var (
key = areaSendersKey(area, senderID)
conn = d.redis.Get(c)
)
defer conn.Close()
if _, err := conn.Do("EXPIRE", key, dur); err != nil {
log.Error("%v", err)
return err
}
return nil
}
// IncrAreaSendersCache .
func (d *Dao) IncrAreaSendersCache(c context.Context, area string, senderID int64) (int64, error) {
var (
key = areaSendersKey(area, senderID)
conn = d.redis.Get(c)
)
defer conn.Close()
count, err := redis.Int64(conn.Do("INCR", key))
if err != nil {
log.Error("%v", err)
return 0, err
}
return count, nil
}
// AllSendersCache .
func (d *Dao) AllSendersCache(c context.Context, keywordID int64) ([]string, error) {
var (
key = sendersKey(keywordID)
conn = d.redis.Get(c)
)
defer conn.Close()
r, err := redis.Strings(conn.Do("ZRANGEBYSCORE", key, "-inf", "+inf"))
if err != nil {
log.Error("%v", err)
return nil, err
}
return r, nil
}
// SendersCache .
func (d *Dao) SendersCache(c context.Context, keywordID, limit, offset int64) ([]string, error) {
var (
key = sendersKey(keywordID)
conn = d.redis.Get(c)
)
defer conn.Close()
r, err := redis.Strings(conn.Do("ZRANGEBYSCORE", key, "-inf", "+inf", "LIMIT", limit, offset))
if err != nil {
log.Error("%v", err)
return nil, err
}
return r, nil
}
// TotalLimitExpire .
func (d *Dao) TotalLimitExpire(c context.Context, keywordID, dur int64) error {
var (
key = totalCountsKey(keywordID)
conn = d.redis.Get(c)
)
defer conn.Close()
if _, err := conn.Do("EXPIRE", key, dur); err != nil {
log.Error("%v", err)
return err
}
return nil
}
// IncrTotalLimitCache .
func (d *Dao) IncrTotalLimitCache(c context.Context, keywordID int64) (int64, error) {
var (
key = totalCountsKey(keywordID)
conn = d.redis.Get(c)
)
defer conn.Close()
count, err := redis.Int64(conn.Do("INCR", key))
if err != nil {
log.Error("%v", err)
return 0, err
}
return count, nil
}
// ZaddSendersCache insert into sortedset and return total counts of sorted set
func (d *Dao) ZaddSendersCache(c context.Context, keywordID, score, senderID int64) (int64, error) {
var (
key = sendersKey(keywordID)
val = fmt.Sprintf("%d", senderID)
conn = d.redis.Get(c)
)
defer conn.Close()
_, err := redis.Int64(conn.Do("ZADD", key, score, val))
if err != nil {
log.Error("%v", err)
return 0, err
}
r, err := redis.Int64(conn.Do("ZCARD", key))
if err != nil {
log.Error("%v", err)
return 0, err
}
return r, nil
}
// ZremSendersCache return the number of memebers removed from the sorted set
func (d *Dao) ZremSendersCache(c context.Context, keywordID int64, senderIDStr string) (int64, error) {
var (
key = sendersKey(keywordID)
conn = d.redis.Get(c)
)
defer conn.Close()
r, err := redis.Int64(conn.Do("ZREM", key, senderIDStr))
if err != nil {
log.Error("%v", err)
return 0, err
}
return r, nil
}
// DelKeywordRelatedCache .
func (d *Dao) DelKeywordRelatedCache(c context.Context, ks []*model.Keyword) error {
var conn = d.redis.Get(c)
defer conn.Close()
for _, v := range ks {
if err := conn.Send("DEL", totalCountsKey(v.ID)); err != nil {
log.Error("%v", err)
return err
}
if err := conn.Send("DEL", sendersKey(v.ID)); err != nil {
log.Error("%v", err)
return err
}
}
if err := conn.Flush(); err != nil {
log.Error("%v", err)
return err
}
for i := 0; i < len(ks)*2; i++ {
if _, err := conn.Receive(); err != nil {
log.Error("conn.Receive() error(%v)", err)
return err
}
}
return nil
}
// DelCountRelatedCache .
func (d *Dao) DelCountRelatedCache(c context.Context, k *model.Keyword) error {
var conn = d.redis.Get(c)
defer conn.Close()
if err := conn.Send("DEL", globalCountsKey(k.ID)); err != nil {
log.Error("%v", err)
return err
}
if err := conn.Send("DEL", localCountsKey(k.ID, k.SenderID)); err != nil {
log.Error("%v", err)
return err
}
if err := conn.Send("DEL", sendersKey(k.ID)); err != nil {
log.Error("%v", err)
return err
}
if err := conn.Flush(); err != nil {
log.Error("%v", err)
return err
}
for i := 0; i < 3; i++ {
if _, err := conn.Receive(); err != nil {
log.Error("conn.Receive() error(%v)", err)
return err
}
}
return nil
}

View File

@@ -0,0 +1,354 @@
package dao
import (
"context"
"go-common/app/service/main/antispam/model"
"testing"
"github.com/smartystreets/goconvey/convey"
)
func TestDaosendersKey(t *testing.T) {
var (
keywordID = int64(0)
)
convey.Convey("sendersKey", t, func(ctx convey.C) {
p1 := sendersKey(keywordID)
ctx.Convey("Then p1 should not be nil.", func(ctx convey.C) {
ctx.So(p1, convey.ShouldNotBeNil)
})
})
}
func TestDaoareaSendersKey(t *testing.T) {
var (
area = ""
senderID = int64(0)
)
convey.Convey("areaSendersKey", t, func(ctx convey.C) {
p1 := areaSendersKey(area, senderID)
ctx.Convey("Then p1 should not be nil.", func(ctx convey.C) {
ctx.So(p1, convey.ShouldNotBeNil)
})
})
}
func TestDaototalCountsKey(t *testing.T) {
var (
keywordID = int64(0)
)
convey.Convey("totalCountsKey", t, func(ctx convey.C) {
p1 := totalCountsKey(keywordID)
ctx.Convey("Then p1 should not be nil.", func(ctx convey.C) {
ctx.So(p1, convey.ShouldNotBeNil)
})
})
}
func TestDaolocalCountsKey(t *testing.T) {
var (
keywordID = int64(0)
oid = int64(0)
)
convey.Convey("localCountsKey", t, func(ctx convey.C) {
p1 := localCountsKey(keywordID, oid)
ctx.Convey("Then p1 should not be nil.", func(ctx convey.C) {
ctx.So(p1, convey.ShouldNotBeNil)
})
})
}
func TestDaoglobalCountsKey(t *testing.T) {
var (
keywordID = int64(0)
)
convey.Convey("globalCountsKey", t, func(ctx convey.C) {
p1 := globalCountsKey(keywordID)
ctx.Convey("Then p1 should not be nil.", func(ctx convey.C) {
ctx.So(p1, convey.ShouldNotBeNil)
})
})
}
func TestDaorulesKey(t *testing.T) {
var (
area = ""
limitType = ""
)
convey.Convey("rulesKey", t, func(ctx convey.C) {
p1 := rulesKey(area, limitType)
ctx.Convey("Then p1 should not be nil.", func(ctx convey.C) {
ctx.So(p1, convey.ShouldNotBeNil)
})
})
}
func TestDaopingRedis(t *testing.T) {
var (
c = context.TODO()
)
convey.Convey("pingRedis", t, func(ctx convey.C) {
err := d.pingRedis(c)
ctx.Convey("Then err should be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
})
})
}
func TestDaoCntSendersCache(t *testing.T) {
var (
c = context.TODO()
keywordID = int64(0)
)
convey.Convey("CntSendersCache", t, func(ctx convey.C) {
cnt, err := d.CntSendersCache(c, keywordID)
ctx.Convey("Then err should be nil.cnt should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
ctx.So(cnt, convey.ShouldNotBeNil)
})
})
}
func TestDaoGlobalLocalLimitCache(t *testing.T) {
var (
c = context.TODO()
keywordID = int64(0)
oid = int64(0)
)
convey.Convey("GlobalLocalLimitCache", t, func(ctx convey.C) {
p1, err := d.GlobalLocalLimitCache(c, keywordID, oid)
ctx.Convey("Then err should be nil.p1 should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
ctx.So(p1, convey.ShouldNotBeNil)
})
})
}
func TestDaoIncrGlobalLimitCache(t *testing.T) {
var (
c = context.TODO()
keywordID = int64(0)
)
convey.Convey("IncrGlobalLimitCache", t, func(ctx convey.C) {
p1, err := d.IncrGlobalLimitCache(c, keywordID)
ctx.Convey("Then err should be nil.p1 should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
ctx.So(p1, convey.ShouldNotBeNil)
})
})
}
func TestDaoIncrLocalLimitCache(t *testing.T) {
var (
c = context.TODO()
keywordID = int64(0)
oid = int64(0)
)
convey.Convey("IncrLocalLimitCache", t, func(ctx convey.C) {
p1, err := d.IncrLocalLimitCache(c, keywordID, oid)
ctx.Convey("Then err should be nil.p1 should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
ctx.So(p1, convey.ShouldNotBeNil)
})
})
}
func TestDaoLocalLimitExpire(t *testing.T) {
var (
c = context.TODO()
keywordID = int64(0)
oid = int64(0)
dur = int64(0)
)
convey.Convey("LocalLimitExpire", t, func(ctx convey.C) {
err := d.LocalLimitExpire(c, keywordID, oid, dur)
ctx.Convey("Then err should be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
})
})
}
func TestDaoGlobalLimitExpire(t *testing.T) {
var (
c = context.TODO()
keywordID = int64(0)
dur = int64(0)
)
convey.Convey("GlobalLimitExpire", t, func(ctx convey.C) {
err := d.GlobalLimitExpire(c, keywordID, dur)
ctx.Convey("Then err should be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
})
})
}
func TestDaoDelRegexpCache(t *testing.T) {
var (
c = context.TODO()
)
convey.Convey("DelRegexpCache", t, func(ctx convey.C) {
err := d.DelRegexpCache(c)
ctx.Convey("Then err should be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
})
})
}
func TestDaoDelRulesCache(t *testing.T) {
var (
c = context.TODO()
area = ""
limitType = ""
)
convey.Convey("DelRulesCache", t, func(ctx convey.C) {
err := d.DelRulesCache(c, area, limitType)
ctx.Convey("Then err should be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
})
})
}
func TestDaoAreaSendersExpire(t *testing.T) {
var (
c = context.TODO()
area = ""
senderID = int64(0)
dur = int64(0)
)
convey.Convey("AreaSendersExpire", t, func(ctx convey.C) {
err := d.AreaSendersExpire(c, area, senderID, dur)
ctx.Convey("Then err should be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
})
})
}
func TestDaoIncrAreaSendersCache(t *testing.T) {
var (
c = context.TODO()
area = ""
senderID = int64(0)
)
convey.Convey("IncrAreaSendersCache", t, func(ctx convey.C) {
p1, err := d.IncrAreaSendersCache(c, area, senderID)
ctx.Convey("Then err should be nil.p1 should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
ctx.So(p1, convey.ShouldNotBeNil)
})
})
}
func TestDaoAllSendersCache(t *testing.T) {
var (
c = context.TODO()
keywordID = int64(0)
)
convey.Convey("AllSendersCache", t, func(ctx convey.C) {
p1, err := d.AllSendersCache(c, keywordID)
ctx.Convey("Then err should be nil.p1 should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
ctx.So(p1, convey.ShouldNotBeNil)
})
})
}
func TestDaoSendersCache(t *testing.T) {
var (
c = context.TODO()
keywordID = int64(0)
limit = int64(0)
offset = int64(0)
)
convey.Convey("SendersCache", t, func(ctx convey.C) {
p1, err := d.SendersCache(c, keywordID, limit, offset)
ctx.Convey("Then err should be nil.p1 should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
ctx.So(p1, convey.ShouldNotBeNil)
})
})
}
func TestDaoTotalLimitExpire(t *testing.T) {
var (
c = context.TODO()
keywordID = int64(0)
dur = int64(0)
)
convey.Convey("TotalLimitExpire", t, func(ctx convey.C) {
err := d.TotalLimitExpire(c, keywordID, dur)
ctx.Convey("Then err should be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
})
})
}
func TestDaoIncrTotalLimitCache(t *testing.T) {
var (
c = context.TODO()
keywordID = int64(0)
)
convey.Convey("IncrTotalLimitCache", t, func(ctx convey.C) {
p1, err := d.IncrTotalLimitCache(c, keywordID)
ctx.Convey("Then err should be nil.p1 should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
ctx.So(p1, convey.ShouldNotBeNil)
})
})
}
func TestDaoZaddSendersCache(t *testing.T) {
var (
c = context.TODO()
keywordID = int64(0)
score = int64(0)
senderID = int64(0)
)
convey.Convey("ZaddSendersCache", t, func(ctx convey.C) {
p1, err := d.ZaddSendersCache(c, keywordID, score, senderID)
ctx.Convey("Then err should be nil.p1 should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
ctx.So(p1, convey.ShouldNotBeNil)
})
})
}
func TestDaoZremSendersCache(t *testing.T) {
var (
c = context.TODO()
keywordID = int64(1)
senderIDStr = ""
)
convey.Convey("ZremSendersCache", t, func(ctx convey.C) {
p1, err := d.ZremSendersCache(c, keywordID, senderIDStr)
ctx.Convey("Then err should be nil.p1 should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
ctx.So(p1, convey.ShouldNotBeNil)
})
})
}
func TestDaoDelKeywordRelatedCache(t *testing.T) {
var (
c = context.TODO()
ks = []*model.Keyword{}
)
convey.Convey("DelKeywordRelatedCache", t, func(ctx convey.C) {
err := d.DelKeywordRelatedCache(c, ks)
ctx.Convey("Then err should be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
})
})
}
func TestDaoDelCountRelatedCache(t *testing.T) {
var (
c = context.TODO()
k = &model.Keyword{}
)
convey.Convey("DelCountRelatedCache", t, func(ctx convey.C) {
err := d.DelCountRelatedCache(c, k)
ctx.Convey("Then err should be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
})
})
}

View File

@@ -0,0 +1,270 @@
package dao
import (
"context"
"fmt"
"strings"
"time"
"go-common/app/service/main/antispam/util"
"go-common/library/database/sql"
"go-common/library/log"
)
const (
columnsRegexp = `id, admin_id, area, name, operation, content, state, ctime, mtime`
selectRegexpCountsSQL = `SELECT COUNT(1) FROM regexps %s`
selectRegexpsByCondSQL = `SELECT ` + columnsRegexp + ` FROM regexps %s`
selectRegexpByIDsSQL = `SELECT ` + columnsRegexp + ` FROM regexps WHERE id IN(%s)`
selectRegexpByContentsSQL = `SELECT ` + columnsRegexp + ` FROM regexps WHERE content IN(%s)`
selectRegexpByAreaAndContentSQL = `SELECT ` + columnsRegexp + ` FROM regexps WHERE area = %s AND content IN(%s)`
insertRegexpSQL = `INSERT INTO regexps(id, admin_id, area, name, operation, content, state) VALUES(?, ?, ?, ?, ?, ?, ?)`
updateRegexpSQL = `UPDATE regexps SET admin_id = ?, name = ?, content = ?, operation = ?, state = ?, mtime = ? WHERE id = ?`
)
const (
// OperationLimit .
OperationLimit int = iota
// OperationPutToWhiteList .
OperationPutToWhiteList
// OperationRestrictLimit .
OperationRestrictLimit
// OperationIgnore .
OperationIgnore
)
// RegexpDaoImpl .
type RegexpDaoImpl struct{}
// Regexp .
type Regexp struct {
ID int64 `db:"id"`
Area int `db:"area"`
Name string `db:"name"`
AdminID int64 `db:"admin_id"`
Operation int `db:"operation"`
Content string `db:"content"`
State int `db:"state"`
CTime time.Time `db:"ctime"`
MTime time.Time `db:"mtime"`
}
// NewRegexpDao .
func NewRegexpDao() *RegexpDaoImpl {
return &RegexpDaoImpl{}
}
// GetByCond .
func (*RegexpDaoImpl) GetByCond(ctx context.Context,
cond *Condition) (regexps []*Regexp, totalCounts int64, err error) {
sqlConds := make([]string, 0)
if cond.Area != "" {
sqlConds = append(sqlConds, fmt.Sprintf("area = %s", cond.Area))
}
if cond.State != "" {
sqlConds = append(sqlConds, fmt.Sprintf("state = %s", cond.State))
}
var optionSQL string
if len(sqlConds) > 0 {
optionSQL = fmt.Sprintf("WHERE %s", strings.Join(sqlConds, " AND "))
}
var limitSQL string
if cond.Pagination != nil {
queryCountsSQL := fmt.Sprintf(selectRegexpCountsSQL, optionSQL)
totalCounts, err = GetTotalCounts(ctx, db, queryCountsSQL)
if err != nil {
return nil, 0, err
}
offset, limit := cond.OffsetLimit(totalCounts)
if limit == 0 {
return nil, 0, ErrResourceNotExist
}
limitSQL = fmt.Sprintf("LIMIT %d, %d", offset, limit)
}
if cond.OrderBy != "" {
optionSQL = fmt.Sprintf("%s ORDER BY %s %s", optionSQL, cond.OrderBy, cond.Order)
}
if limitSQL != "" {
optionSQL = fmt.Sprintf("%s %s", optionSQL, limitSQL)
}
querySQL := fmt.Sprintf(selectRegexpsByCondSQL, optionSQL)
log.Info("OptionSQL(%s), GetByCondSQL(%s)", optionSQL, querySQL)
regexps, err = queryRegexps(ctx, db, querySQL)
if err != nil {
return nil, totalCounts, err
}
return regexps, totalCounts, nil
}
// Update .
func (rdi *RegexpDaoImpl) Update(ctx context.Context, r *Regexp) (*Regexp, error) {
err := updateRegexp(ctx, db, r)
if err != nil {
return nil, err
}
return rdi.GetByID(ctx, r.ID)
}
// Insert .
func (rdi *RegexpDaoImpl) Insert(ctx context.Context, r *Regexp) (*Regexp, error) {
err := insertRegexp(ctx, db, r)
if err != nil {
return nil, err
}
return rdi.GetByID(ctx, r.ID)
}
// GetByID .
func (rdi *RegexpDaoImpl) GetByID(ctx context.Context, id int64) (*Regexp, error) {
rs, err := rdi.GetByIDs(ctx, []int64{id})
if err != nil {
return nil, err
}
if rs[0] == nil {
return nil, ErrResourceNotExist
}
return rs[0], nil
}
// GetByIDs .
func (*RegexpDaoImpl) GetByIDs(ctx context.Context, ids []int64) ([]*Regexp, error) {
rs, err := queryRegexps(ctx, db, fmt.Sprintf(selectRegexpByIDsSQL, util.IntSliToSQLVarchars(ids)))
if err != nil {
return nil, err
}
res := make([]*Regexp, len(ids))
for i, id := range ids {
for _, r := range rs {
if r.ID == id {
res[i] = r
}
}
}
return res, nil
}
// GetByContents .
func (*RegexpDaoImpl) GetByContents(ctx context.Context, contents []string) ([]*Regexp, error) {
if len(contents) == 0 {
log.Error("%v", ErrParams)
return nil, ErrParams
}
rs, err := queryRegexps(ctx, db, fmt.Sprintf(selectRegexpByContentsSQL, util.StrSliToSQLVarchars(contents)))
if err != nil {
return nil, err
}
res := make([]*Regexp, len(contents))
for i, c := range contents {
for _, r := range rs {
if strings.EqualFold(r.Content, c) {
res[i] = r
}
}
}
return res, nil
}
// GetByAreaAndContent .
func (*RegexpDaoImpl) GetByAreaAndContent(ctx context.Context, cond *Condition) (*Regexp, error) {
rs, err := queryRegexps(ctx, db, fmt.Sprintf(selectRegexpByAreaAndContentSQL,
cond.Area, util.StrSliToSQLVarchars(cond.Contents)))
if err != nil {
return nil, err
}
return rs[0], nil
}
func insertRegexp(ctx context.Context, executer Executer, r *Regexp) error {
res, err := executer.Exec(ctx, insertRegexpSQL,
r.ID,
r.AdminID,
r.Area,
r.Name,
r.Operation,
r.Content,
r.State,
)
if err != nil {
log.Error("%v", err)
return err
}
lastID, err := res.LastInsertId()
if err != nil {
log.Error("%v", err)
return err
}
r.ID = lastID
return nil
}
func updateRegexp(ctx context.Context, executer Executer, r *Regexp) error {
_, err := executer.Exec(ctx, updateRegexpSQL,
r.AdminID,
r.Name,
r.Content,
r.Operation,
r.State,
time.Now(),
r.ID,
)
if err != nil {
log.Error("%v", err)
return err
}
return nil
}
func queryRegexps(ctx context.Context, q Querier, rawSQL string) ([]*Regexp, error) {
rows, err := q.Query(ctx, rawSQL)
if err == sql.ErrNoRows {
err = ErrResourceNotExist
}
if err != nil {
log.Error("Error: %v, sql: %s", err, rawSQL)
return nil, err
}
defer rows.Close()
rs, err := mapRowToRegexps(rows)
if err != nil {
return nil, err
}
if len(rs) == 0 {
log.Error("Error: %v, sql: %s", ErrResourceNotExist, rawSQL)
return nil, ErrResourceNotExist
}
return rs, nil
}
func mapRowToRegexps(rows *sql.Rows) (rs []*Regexp, err error) {
rs = make([]*Regexp, 0)
for rows.Next() {
r := Regexp{}
err = rows.Scan(
&r.ID,
&r.AdminID,
&r.Area,
&r.Name,
&r.Operation,
&r.Content,
&r.State,
&r.CTime,
&r.MTime,
)
if err != nil {
log.Error("%v", err)
return nil, err
}
rs = append(rs, &r)
}
if err = rows.Err(); err != nil {
log.Error("%v", err)
return nil, err
}
return rs, nil
}

View File

@@ -0,0 +1,107 @@
package dao
import (
"context"
"testing"
"github.com/smartystreets/goconvey/convey"
)
func testRegexpDaoImplGetByCond(t *testing.T) {
var (
c = context.TODO()
cond = &Condition{State: "0"}
)
convey.Convey("GetByCond", t, func(ctx convey.C) {
regexps, totalCounts, err := regdi.GetByCond(c, cond)
ctx.Convey("Then err should be nil.regexps,totalCounts should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
ctx.So(totalCounts, convey.ShouldNotBeNil)
ctx.So(regexps, convey.ShouldNotBeNil)
})
})
}
func testRegexpDaoImplDaoUpdate(t *testing.T) {
var (
c = context.TODO()
r = &Regexp{ID: 1, Name: "name", Area: 1, Content: "test"}
)
convey.Convey("Update", t, func(ctx convey.C) {
p1, err := regdi.Update(c, r)
ctx.Convey("Then err should be nil.p1 should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
ctx.So(p1, convey.ShouldNotBeNil)
})
})
}
func testRegexpDaoImplInsert(t *testing.T) {
var (
c = context.TODO()
r = &Regexp{ID: 1, Name: "name", Area: 1, Content: "test"}
)
convey.Convey("Insert", t, func(ctx convey.C) {
p1, err := regdi.Insert(c, r)
ctx.Convey("Then err should be nil.p1 should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
ctx.So(p1, convey.ShouldNotBeNil)
})
})
}
func testRegexpDaoImplGetByID(t *testing.T) {
var (
c = context.TODO()
id = int64(1)
)
convey.Convey("GetByID", t, func(ctx convey.C) {
p1, err := regdi.GetByID(c, id)
ctx.Convey("Then err should be nil.p1 should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
ctx.So(p1, convey.ShouldNotBeNil)
})
})
}
func testRegexpDaoImplGetByIDs(t *testing.T) {
var (
c = context.TODO()
ids = []int64{1}
)
convey.Convey("RegexpDaoImplGetByIDs", t, func(ctx convey.C) {
p1, err := regdi.GetByIDs(c, ids)
ctx.Convey("Then err should be nil.p1 should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
ctx.So(p1, convey.ShouldNotBeNil)
})
})
}
func testRegexpDaoImplGetByContents(t *testing.T) {
var (
c = context.TODO()
contents = []string{"test"}
)
convey.Convey("GetByContents", t, func(ctx convey.C) {
p1, err := regdi.GetByContents(c, contents)
ctx.Convey("Then err should be nil.p1 should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
ctx.So(p1, convey.ShouldNotBeNil)
})
})
}
func testRegexpDaoImplGetByAreaAndContent(t *testing.T) {
var (
c = context.TODO()
cond = &Condition{State: "0"}
)
convey.Convey("GetByAreaAndContent", t, func(ctx convey.C) {
p1, err := regdi.GetByAreaAndContent(c, cond)
ctx.Convey("Then err should be nil.p1 should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
ctx.So(p1, convey.ShouldNotBeNil)
})
})
}

View File

@@ -0,0 +1,270 @@
package dao
import (
"context"
"fmt"
"strings"
"time"
"go-common/app/service/main/antispam/util"
"go-common/library/database/sql"
"go-common/library/log"
)
const (
columnRules = "id, area, limit_type, limit_scope, dur_sec, allowed_counts, ctime, mtime"
selectRuleCountsSQL = `SELECT COUNT(1) FROM rate_limit_rules %s`
selectRulesByCondSQL = `SELECT ` + columnRules + ` FROM rate_limit_rules %s`
selectRuleByIDsSQL = `SELECT ` + columnRules + ` FROM rate_limit_rules WHERE id IN(%s)`
selectRulesByAreaSQL = `SELECT ` + columnRules + ` FROM rate_limit_rules WHERE area = %s`
selectRulesByAreaAndTypeSQL = `SELECT ` + columnRules + ` FROM rate_limit_rules WHERE area = %s AND limit_type = %s`
selectRulesByAreaAndTypeAndScopeSQL = `SELECT ` + columnRules + ` FROM rate_limit_rules WHERE area = %s AND limit_type = %s AND limit_scope = %s`
insertRuleSQL = `INSERT INTO rate_limit_rules(area, limit_type, limit_scope, dur_sec, allowed_counts) VALUES(?, ?, ?, ?, ?)`
updateRuleSQL = `UPDATE rate_limit_rules SET dur_sec = ?, allowed_counts = ?, mtime = ? WHERE area = ? AND limit_type = ? AND limit_scope = ?`
)
// Rule .
type Rule struct {
ID int64 `db:"id"`
Area int `db:"area"`
LimitType int `db:"limit_type"`
LimitScope int `db:"limit_scope"`
DurationSec int64 `db:"dur_sec"`
AllowedCounts int64 `db:"allowed_counts"`
CTime time.Time `db:"ctime"`
MTime time.Time `db:"mtime"`
}
// RuleDaoImpl .
type RuleDaoImpl struct{}
const (
// LimitTypeDefaultLimit .
LimitTypeDefaultLimit int = iota
// LimitTypeRestrictLimit .
LimitTypeRestrictLimit
// LimitTypeWhite .
LimitTypeWhite
// LimitTypeBlack .
LimitTypeBlack
)
const (
// LimitScopeGlobal .
LimitScopeGlobal int = iota
// LimitScopeLocal .
LimitScopeLocal
)
// NewRuleDao .
func NewRuleDao() *RuleDaoImpl {
return &RuleDaoImpl{}
}
func updateRule(ctx context.Context, executer Executer, r *Rule) error {
_, err := executer.Exec(ctx,
updateRuleSQL,
r.DurationSec,
r.AllowedCounts,
time.Now(),
r.Area,
r.LimitType,
r.LimitScope,
)
if err != nil {
log.Error("%v", err)
return err
}
return nil
}
func insertRule(ctx context.Context, executer Executer, r *Rule) error {
res, err := executer.Exec(ctx,
insertRuleSQL,
r.Area,
r.LimitType,
r.LimitScope,
r.DurationSec,
r.AllowedCounts,
)
if err != nil {
log.Error("%v", err)
return err
}
lastID, err := res.LastInsertId()
if err != nil {
log.Error("%v", err)
return err
}
r.ID = lastID
return nil
}
// GetByCond .
func (*RuleDaoImpl) GetByCond(ctx context.Context, cond *Condition) (rules []*Rule, totalCounts int64, err error) {
sqlConds := make([]string, 0)
if cond.Area != "" {
sqlConds = append(sqlConds, fmt.Sprintf("area = %s", cond.Area))
}
if cond.State != "" {
sqlConds = append(sqlConds, fmt.Sprintf("state = %s", cond.State))
}
var optionSQL string
if len(sqlConds) > 0 {
optionSQL = fmt.Sprintf("WHERE %s", strings.Join(sqlConds, " AND "))
}
var limitSQL string
if cond.Pagination != nil {
queryCountsSQL := fmt.Sprintf(selectRuleCountsSQL, optionSQL)
totalCounts, err = GetTotalCounts(ctx, db, queryCountsSQL)
if err != nil {
return nil, 0, err
}
offset, limit := cond.OffsetLimit(totalCounts)
if limit == 0 {
return nil, 0, ErrResourceNotExist
}
limitSQL = fmt.Sprintf("LIMIT %d, %d", offset, limit)
}
if cond.OrderBy != "" {
optionSQL = fmt.Sprintf("%s ORDER BY %s %s", optionSQL, cond.OrderBy, cond.Order)
}
if limitSQL != "" {
optionSQL = fmt.Sprintf("%s %s", optionSQL, limitSQL)
}
querySQL := fmt.Sprintf(selectRulesByCondSQL, optionSQL)
log.Info("OptionSQL(%s), GetByCondSQL(%s)", optionSQL, querySQL)
rules, err = queryRules(ctx, db, querySQL)
if err != nil {
return nil, totalCounts, err
}
return rules, totalCounts, nil
}
// Update .
func (rdi *RuleDaoImpl) Update(ctx context.Context, r *Rule) (*Rule, error) {
if err := updateRule(ctx, db, r); err != nil {
return nil, err
}
return rdi.GetByAreaAndTypeAndScope(ctx, &Condition{
Area: fmt.Sprintf("%d", r.Area),
LimitType: fmt.Sprintf("%d", r.LimitType),
LimitScope: fmt.Sprintf("%d", r.LimitScope),
})
}
// Insert .
func (rdi *RuleDaoImpl) Insert(ctx context.Context, r *Rule) (*Rule, error) {
if err := insertRule(ctx, db, r); err != nil {
return nil, err
}
return rdi.GetByID(ctx, r.ID)
}
// GetByID .
func (rdi *RuleDaoImpl) GetByID(ctx context.Context, id int64) (*Rule, error) {
rs, err := rdi.GetByIDs(ctx, []int64{id})
if err != nil {
return nil, err
}
if rs[0] == nil {
return nil, ErrResourceNotExist
}
return rs[0], nil
}
// GetByIDs .
func (*RuleDaoImpl) GetByIDs(ctx context.Context, ids []int64) ([]*Rule, error) {
rs, err := queryRules(ctx, db, fmt.Sprintf(selectRuleByIDsSQL, util.IntSliToSQLVarchars(ids)))
if err != nil {
return nil, err
}
res := make([]*Rule, len(ids))
for i, id := range ids {
for _, r := range rs {
if r.ID == id {
res[i] = r
}
}
}
return res, nil
}
// GetByAreaAndLimitType .
func (*RuleDaoImpl) GetByAreaAndLimitType(ctx context.Context, cond *Condition) ([]*Rule, error) {
return queryRules(ctx, db, fmt.Sprintf(selectRulesByAreaAndTypeSQL, cond.Area, cond.LimitType))
}
// GetByAreaAndTypeAndScope .
func (*RuleDaoImpl) GetByAreaAndTypeAndScope(ctx context.Context, cond *Condition) (*Rule, error) {
rs, err := queryRules(ctx, db, fmt.Sprintf(selectRulesByAreaAndTypeAndScopeSQL,
cond.Area,
cond.LimitType,
cond.LimitScope,
))
if err != nil {
return nil, err
}
return rs[0], nil
}
// GetByArea .
func (*RuleDaoImpl) GetByArea(ctx context.Context, cond *Condition) ([]*Rule, error) {
return queryRules(ctx, db, fmt.Sprintf(selectRulesByAreaSQL, cond.Area))
}
func queryRules(ctx context.Context, q Querier, rawSQL string) ([]*Rule, error) {
log.Info("Query sql: %q", rawSQL)
rows, err := q.Query(ctx, rawSQL)
if err == sql.ErrNoRows {
err = ErrResourceNotExist
}
if err != nil {
log.Error("Error: %v, RawSQL: %s", err, rawSQL)
return nil, err
}
defer rows.Close()
rs, err := mapRowToRules(rows)
if err != nil {
return nil, err
}
if len(rs) == 0 {
return nil, ErrResourceNotExist
}
return rs, nil
}
func mapRowToRules(rows *sql.Rows) (rs []*Rule, err error) {
for rows.Next() {
r := Rule{}
err = rows.Scan(
&r.ID,
&r.Area,
&r.LimitType,
&r.LimitScope,
&r.DurationSec,
&r.AllowedCounts,
&r.CTime,
&r.MTime,
)
if err != nil {
log.Error("%v", err)
return nil, err
}
rs = append(rs, &r)
}
if err = rows.Err(); err != nil {
log.Error("%v", err)
return nil, err
}
return rs, nil
}

View File

@@ -0,0 +1,121 @@
package dao
import (
"context"
"testing"
"github.com/smartystreets/goconvey/convey"
)
func testRuleDaoImplGetByCond(t *testing.T) {
var (
c = context.TODO()
cond = &Condition{}
)
convey.Convey("GetByCond", t, func(ctx convey.C) {
rules, totalCounts, err := rdi.GetByCond(c, cond)
ctx.Convey("Then err should be nil.rules,totalCounts should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
ctx.So(totalCounts, convey.ShouldNotBeNil)
ctx.So(rules, convey.ShouldNotBeNil)
})
})
}
func testRuleDaoImplUpdate(t *testing.T) {
var (
c = context.TODO()
r = &Rule{}
)
convey.Convey("Update", t, func(ctx convey.C) {
p1, err := rdi.Update(c, r)
ctx.Convey("Then err should be nil.p1 should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
ctx.So(p1, convey.ShouldNotBeNil)
})
})
}
func testRuleDaoImplInsert(t *testing.T) {
var (
c = context.TODO()
r = &Rule{}
)
convey.Convey("Insert", t, func(ctx convey.C) {
p1, err := rdi.Insert(c, r)
ctx.Convey("Then err should be nil.p1 should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
ctx.So(p1, convey.ShouldNotBeNil)
})
})
}
func testRuleDaoImplGetByID(t *testing.T) {
var (
c = context.TODO()
id = int64(0)
)
convey.Convey("GetByID", t, func(ctx convey.C) {
p1, err := rdi.GetByID(c, id)
ctx.Convey("Then err should be nil.p1 should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
ctx.So(p1, convey.ShouldNotBeNil)
})
})
}
func testRuleDaoImplDaoGetByIDs(t *testing.T) {
var (
c = context.TODO()
ids = []int64{}
)
convey.Convey("GetByIDs", t, func(ctx convey.C) {
p1, err := rdi.GetByIDs(c, ids)
ctx.Convey("Then err should be nil.p1 should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
ctx.So(p1, convey.ShouldNotBeNil)
})
})
}
func testRuleDaoImplGetByAreaAndLimitType(t *testing.T) {
var (
c = context.TODO()
cond = &Condition{}
)
convey.Convey("GetByAreaAndLimitType", t, func(ctx convey.C) {
p1, err := rdi.GetByAreaAndLimitType(c, cond)
ctx.Convey("Then err should be nil.p1 should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
ctx.So(p1, convey.ShouldNotBeNil)
})
})
}
func testRuleDaoImplGetByAreaAndTypeAndScope(t *testing.T) {
var (
c = context.TODO()
cond = &Condition{}
)
convey.Convey("GetByAreaAndTypeAndScope", t, func(ctx convey.C) {
p1, err := rdi.GetByAreaAndTypeAndScope(c, cond)
ctx.Convey("Then err should be nil.p1 should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
ctx.So(p1, convey.ShouldNotBeNil)
})
})
}
func testRuleDaoImplGetByArea(t *testing.T) {
var (
c = context.TODO()
cond = &Condition{}
)
convey.Convey("GetByArea", t, func(ctx convey.C) {
p1, err := rdi.GetByArea(c, cond)
ctx.Convey("Then err should be nil.p1 should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
ctx.So(p1, convey.ShouldNotBeNil)
})
})
}

View File

@@ -0,0 +1,48 @@
package dao
/*
import (
"context"
xsql "go-common/database/sql"
)
type TxImpl struct {
*xsql.Tx
}
func NewTx(ctx context.Context) (Tx, error) {
t, err := db.Begin(ctx)
if err != nil {
return nil, err
}
return &TxImpl{t}, nil
}
func (tx *TxImpl) UpdateKeyword(k *Keyword) error {
return updateKeyword(tx.Ctx, tx, k)
}
func (tx *TxImpl) InsertKeyword(k *Keyword) error {
return insertKeyword(tx.Ctx, tx, k)
}
func (tx *TxImpl) InsertRule(r *Rule) error {
return insertRule(tx.Ctx, tx, r)
}
func (tx *TxImpl) UpdateRegexp(r *Regexp) error {
return updateRegexp(tx.Ctx, tx, r)
}
func (tx *TxImpl) InsertRegexp(r *Regexp) error {
return insertRegexp(tx.Ctx, tx, r)
}
func (tx *TxImpl) QueryRow(_ context.Context, sql string, args ...interface{}) *xsql.Row {
return tx.Tx.QueryRow(sql, args)
}
func (tx *TxImpl) Query(_ context.Context, sql string, args ...interface{}) (*xsql.Rows, error) {
return tx.Tx.Query(sql, args)
}*/

View File

@@ -0,0 +1 @@
package dao

38
app/service/main/antispam/extern/BUILD vendored Normal file
View File

@@ -0,0 +1,38 @@
package(default_visibility = ["//visibility:public"])
load(
"@io_bazel_rules_go//go:def.bzl",
"go_library",
)
go_library(
name = "go_default_library",
srcs = [
"client.go",
"handler.go",
"mock.go",
"reply_service.go",
],
importpath = "go-common/app/service/main/antispam/extern",
tags = ["automanaged"],
visibility = ["//visibility:public"],
deps = [
"//app/service/main/antispam/conf:go_default_library",
"//library/log:go_default_library",
"//library/net/http/blademaster:go_default_library",
],
)
filegroup(
name = "package-srcs",
srcs = glob(["**"]),
tags = ["automanaged"],
visibility = ["//visibility:private"],
)
filegroup(
name = "all-srcs",
srcs = [":package-srcs"],
tags = ["automanaged"],
visibility = ["//visibility:public"],
)

View File

@@ -0,0 +1,31 @@
package extern
import (
"go-common/app/service/main/antispam/conf"
bm "go-common/library/net/http/blademaster"
)
type Client struct {
*ReplyServiceClient
}
func NewClient(c *conf.Config) *Client {
httpCli := bm.NewClient(c.HTTPClient)
return &Client{
ReplyServiceClient: &ReplyServiceClient{
host: c.ReplyURL,
commonClient: &commonClient{
httpCli: httpCli,
key: c.App.Key,
secret: c.App.Secret,
},
},
}
}
type commonClient struct {
httpCli *bm.Client
key, secret string
}

View File

@@ -0,0 +1,11 @@
package extern
import "context"
type Handler interface {
ReplyHandler
}
type ReplyHandler interface {
DeleteReply(ctx context.Context, adminId int64, rs []*Reply) error
}

View File

@@ -0,0 +1,11 @@
package extern
import "context"
type MockExternHandler struct {
ErrDeleteReply error
}
func (mf *MockExternHandler) DeleteReply(ctx context.Context, adminID int64, ks []*Reply) error {
return mf.ErrDeleteReply
}

View File

@@ -0,0 +1,98 @@
package extern
import (
"context"
"fmt"
"net/url"
"path"
"time"
"go-common/library/log"
)
const (
PathDeleteReplyByIds = "/x/internal/v2/reply/del"
)
type Reply struct {
Id int64 `json:"id"`
OId int64 `json:"oid"`
OType int64 `json:"typ"`
}
var replySvrCli *ReplyServiceClient
type ReplyServiceClient struct {
*commonClient
host string
}
type ReplyServiceResp struct {
Code int `json:"code"`
Message string `json:"messge"`
Data interface{} `json:"data"`
}
type Replys []*Reply
func (rs Replys) OIds() string {
var s string
for _, r := range rs {
s += fmt.Sprintf("%d,", r.OId)
}
return s
}
func (rs Replys) Ids() string {
var s string
for _, r := range rs {
s += fmt.Sprintf("%d,", r.Id)
}
return s[:len(s)-1]
}
func (rs Replys) OTypes() string {
var s string
for _, r := range rs {
s += fmt.Sprintf("%d,", r.OType)
}
return s[:len(s)-1]
}
func (self *ReplyServiceClient) DeleteReply(ctx context.Context, adminId int64, rs []*Reply) error {
val := url.Values{}
val.Add("adid", fmt.Sprintf("%d", adminId))
val.Add("adname", "antispam")
val.Add("oid", Replys(rs).OIds())
val.Add("rpid", Replys(rs).Ids())
val.Add("type", Replys(rs).OTypes())
val.Add("moral", "0")
val.Add("notify", "false")
val.Add("remark", "")
val.Add("ftime", "")
val.Add("reason", "delete by antispam")
return self.do(ctx, PathDeleteReplyByIds, val, &ReplyServiceResp{}, replySvrCli.httpCli.Post)
}
func (rs *ReplyServiceClient) do(ctx context.Context,
urlPath string, params url.Values, resp *ReplyServiceResp,
fn func(ctx context.Context, uri string, ip string, params url.Values, resp interface{}) error,
) error {
params.Set("appkey", rs.key)
params.Set("appsecret", rs.secret)
params.Set("ts", fmt.Sprintf("%d", time.Now().Unix()+int64(10)))
urlAddr := path.Join(rs.host + urlPath)
err := fn(ctx, urlAddr, "", params, resp)
if err != nil {
return err
}
if resp.Code != 0 {
err = fmt.Errorf("Call reply service(%s), response code is not 0, resp:%v", urlAddr+"?"+params.Encode(), resp)
log.Error("%v", err)
return err
}
log.Info("Call reply service(%s) successful, resp: %v", urlAddr+"?"+params.Encode(), resp)
return nil
}

View File

@@ -0,0 +1,59 @@
package(default_visibility = ["//visibility:public"])
load(
"@io_bazel_rules_go//go:def.bzl",
"go_library",
"go_test",
)
go_library(
name = "go_default_library",
srcs = [
"cond.go",
"filter.go",
"http.go",
"keyword.go",
"protocol.go",
"regexp.go",
"rule.go",
],
importpath = "go-common/app/service/main/antispam/http",
tags = ["automanaged"],
visibility = ["//visibility:public"],
deps = [
"//app/service/main/antispam/conf:go_default_library",
"//app/service/main/antispam/dao:go_default_library",
"//app/service/main/antispam/model:go_default_library",
"//app/service/main/antispam/service:go_default_library",
"//app/service/main/antispam/util:go_default_library",
"//library/ecode:go_default_library",
"//library/log:go_default_library",
"//library/net/http/blademaster:go_default_library",
"//library/net/http/blademaster/middleware/auth:go_default_library",
"//library/net/http/blademaster/middleware/verify:go_default_library",
],
)
go_test(
name = "go_default_xtest",
srcs = ["cond_test.go"],
tags = ["automanaged"],
deps = [
"//app/service/main/antispam/http:go_default_library",
"//vendor/github.com/stretchr/testify/assert:go_default_library",
],
)
filegroup(
name = "package-srcs",
srcs = glob(["**"]),
tags = ["automanaged"],
visibility = ["//visibility:private"],
)
filegroup(
name = "all-srcs",
srcs = [":package-srcs"],
tags = ["automanaged"],
visibility = ["//visibility:public"],
)

View File

@@ -0,0 +1,91 @@
package http
import (
"fmt"
"strings"
"time"
"go-common/app/service/main/antispam/model"
"go-common/app/service/main/antispam/service"
"go-common/app/service/main/antispam/util"
)
// Condition .
type Condition struct {
*util.Pagination
Tag string
Tags []string
Content string
Contents []string
Area string
Search string
State string
HitCounts string
Order, OrderBy string
LimitType, LimitScope string
StartTime, EndTime *time.Time
}
// ToServiceCond .
func ToServiceCond(cond *Condition) *service.Condition {
if cond == nil {
return nil
}
res := &service.Condition{
Pagination: cond.Pagination,
Area: cond.Area,
Order: cond.Order,
OrderBy: cond.OrderBy,
Tags: cond.Tags,
Contents: cond.Contents,
Search: cond.Search,
State: cond.State,
HitCounts: cond.HitCounts,
LimitType: cond.LimitType,
LimitScope: cond.LimitScope,
StartTime: cond.StartTime,
EndTime: cond.EndTime,
}
// TODO: how to handler it graceful ?
if cond.Tag != "" {
res.Tags = []string{cond.Tag}
}
if cond.Content != "" {
res.Contents = []string{cond.Content}
}
// history reasons
if res.OrderBy == "show_up_counts" {
res.OrderBy = "hit_counts"
}
return res
}
// Valid .
func (c *Condition) Valid() error {
if c.Pagination != nil {
if c.CurPage == 0 {
c.CurPage = 1
}
if c.PerPage == 0 {
c.PerPage = 20
}
}
c.Search, c.Order = strings.TrimSpace(c.Search), strings.TrimSpace(c.Order)
if c.Order == "" {
c.Order = model.OrderASC
} else {
c.Order = strings.ToUpper(c.Order)
}
if c.Order != model.OrderASC && c.Order != model.OrderDESC {
return fmt.Errorf("Order by should be 'ASC' or 'DESC' but got(%s)", c.Order)
}
return nil
}

View File

@@ -0,0 +1,32 @@
package http_test
import (
"errors"
"fmt"
"testing"
"go-common/app/service/main/antispam/http"
"github.com/stretchr/testify/assert"
)
func TestValid(t *testing.T) {
cases := []struct {
cond *http.Condition
expectedErr error
}{
{&http.Condition{Search: " ", Order: ""}, nil},
{&http.Condition{Search: "foo", Order: ""}, nil},
{&http.Condition{Search: "bar", Order: "xxx"}, errors.New("Order by should be 'ASC' or 'DESC' but got(XXX)")},
{&http.Condition{Search: "bar", Order: "asc"}, nil},
{&http.Condition{Search: "bar", Order: "ASC"}, nil},
{&http.Condition{Search: "bar", Order: "DESC"}, nil},
}
for _, c := range cases {
t.Run(fmt.Sprintf("Search(%q) Order(%q)", c.cond.Search, c.cond.Order), func(t *testing.T) {
assert := assert.New(t)
err := c.cond.Valid()
assert.Equal(c.expectedErr, err, fmt.Sprintf("cond.Valid() = %v, want %v", err, c.expectedErr))
})
}
}

View File

@@ -0,0 +1,40 @@
package http
import (
"strconv"
"go-common/app/service/main/antispam/model"
"go-common/library/ecode"
"go-common/library/log"
bm "go-common/library/net/http/blademaster"
)
// Filter .
func Filter(c *bm.Context) {
params := c.Request.Form
senderID, err := strconv.ParseInt(params.Get(ProtocolKeywordSenderID), 10, 64)
if err != nil {
log.Error("%v", err)
errResp(c, ecode.RequestErr, err)
return
}
oid, err := strconv.ParseInt(params.Get(ProtocolKeywordSubjectID), 10, 64)
if err != nil {
log.Error("%v", err)
errResp(c, ecode.RequestErr, err)
return
}
susp := &model.Suspicious{
SenderId: senderID,
Content: params.Get(ProtocolKeywordContent),
Area: params.Get(ProtocolArea),
OId: oid,
}
result, err := Svr.Filter(c, susp)
if err != nil {
errResp(c, ecode.RequestErr, err)
return
}
c.JSON(result, nil)
}

View File

@@ -0,0 +1,120 @@
package http
import (
"errors"
"fmt"
"net/http"
"net/url"
"strconv"
"go-common/app/service/main/antispam/conf"
"go-common/app/service/main/antispam/service"
"go-common/library/log"
bm "go-common/library/net/http/blademaster"
"go-common/library/net/http/blademaster/middleware/auth"
"go-common/library/net/http/blademaster/middleware/verify"
)
var (
// Svr .
Svr service.Service
verifySvc *verify.Verify
authSvc *auth.Auth
)
// Init .
func Init(c *conf.Config, s service.Service) {
Svr = s
verifySvc = verify.New(c.Verify)
authSvc = auth.New(c.Auth)
engine := bm.DefaultServer(c.BM)
interRouter(engine)
if err := engine.Start(); err != nil {
log.Error("engine.Start() error(%v)", err)
panic(err)
}
}
func interRouter(e *bm.Engine) {
e.GET("/monitor/ping", ping)
e.GET("/register", register)
e.GET("/x/internal/antispam/filter", authSvc.Guest, Filter)
regexps := e.Group("/x/internal/antispam/regexps")
regexps.GET("", verifySvc.Verify, GetRegexps)
regexps.GET("/one", verifySvc.Verify, GetRegexp)
regexps.POST("/add", verifySvc.Verify, AddRegexp)
regexps.POST("/edit", verifySvc.Verify, EditRegexp)
regexps.POST("/del", verifySvc.Verify, DeleteRegexp)
regexps.POST("/recover", verifySvc.Verify, RecoverRegexp)
rules := e.Group("/x/internal/antispam/rules")
rules.GET("", verifySvc.Verify, GetRules)
rules.GET("/one", verifySvc.Verify, GetRule)
rules.POST("/add", verifySvc.Verify, AddRule)
keywords := e.Group("/x/internal/antispam/keywords")
keywords.GET("", verifySvc.Verify, GetKeywords)
keywords.GET("/senders", verifySvc.Verify, GetKeywordSenders)
keywords.GET("/one", verifySvc.Verify, GetKeyword)
keywords.POST("/dels", verifySvc.Verify, DeleteKeywords)
keywords.POST("/action", verifySvc.Verify, UpdateKeyword)
}
func ping(c *bm.Context) {
if err := Svr.Ping(c); err != nil {
log.Error("antispam service ping error(%v)", err)
c.AbortWithStatus(http.StatusServiceUnavailable)
}
}
func register(c *bm.Context) {
c.JSON(struct{}{}, nil)
}
func getAdminIDAndArea(params url.Values) (adminID int64, area string, err error) {
adminID, err = getAdminID(params)
if err != nil {
return 0, "", err
}
area, err = parseArea(params)
if err != nil {
return 0, "", err
}
return adminID, area, nil
}
func parseArea(params url.Values) (string, error) {
area := params.Get(ProtocolArea)
if _, ok := conf.Areas[area]; !ok {
err := fmt.Errorf("invalid area(%s)", area)
log.Error("%v", err)
return "", err
}
return area, nil
}
func getAdminID(params url.Values) (int64, error) {
adminIDStr := params.Get(ProtocolAdminID)
if adminIDStr == "" {
err := errors.New("empty admin id")
log.Error("%v", err)
return 0, err
}
adminID, err := strconv.ParseInt(adminIDStr, 10, 64)
if err != nil {
log.Error("%v", err)
return 0, err
}
return adminID, nil
}
func errResp(c *bm.Context, code interface{}, err error) {
c.JSONMap(map[string]interface{}{
ProtocolData: code,
ProtocolMessage: err.Error(),
}, nil)
}

View File

@@ -0,0 +1,242 @@
package http
import (
"errors"
"strconv"
"time"
"go-common/app/service/main/antispam/conf"
"go-common/app/service/main/antispam/dao"
"go-common/app/service/main/antispam/model"
"go-common/app/service/main/antispam/util"
"go-common/library/ecode"
"go-common/library/log"
bm "go-common/library/net/http/blademaster"
)
var (
// ErrMissID .
ErrMissID = errors.New("error: id is required")
// ErrIllegalOp .
ErrIllegalOp = errors.New("error: this operation on keyword is not allowed")
)
// GetKeywordSenders return keyword's sender list
func GetKeywordSenders(c *bm.Context) {
params := c.Request.Form
if _, err := getAdminID(params); err != nil {
errResp(c, ecode.RequestErr, err)
return
}
id, err := strconv.ParseInt(params.Get(ProtocolKeywordID), 10, 64)
if err != nil {
log.Error("%v", err)
errResp(c, ecode.RequestErr, err)
return
}
k, err := Svr.GetKeywordByID(c, id)
if err != nil {
errResp(c, ecode.ServerErr, err)
return
}
if k.State == model.StateDeleted {
errResp(c, ecode.ServerErr, dao.ErrResourceNotExist)
return
}
l, err := Svr.GetSenderIDsByKeywordID(c, id)
if err != nil {
errResp(c, ecode.ServerErr, err)
return
}
c.JSON(l, nil)
}
// DeleteKeywords .
func DeleteKeywords(c *bm.Context) {
params := c.Request.Form
if _, err := getAdminID(params); err != nil {
errResp(c, ecode.RequestErr, err)
return
}
if params.Get(ProtocolKeywordIDs) == "" {
errResp(c, ecode.RequestErr, errors.New("empty ids"))
return
}
ids, err := util.StrToIntSli(params.Get(ProtocolKeywordIDs), ",")
if err != nil {
log.Error("%v", err)
errResp(c, ecode.RequestErr, err)
return
}
if len(ids) == 0 {
errResp(c, ecode.RequestErr, nil)
return
}
ks, err := Svr.DeleteKeywords(c, ids)
if err != nil {
errResp(c, ecode.ServerErr, err)
return
}
c.JSON(ks, nil)
}
// UpdateKeyword .
func UpdateKeyword(c *bm.Context) {
params := c.Request.Form
_, err := getAdminID(params)
if err != nil {
errResp(c, ecode.RequestErr, err)
return
}
op := params.Get(ProtocolKeywordOperation)
if op != ProtocolKeywordOpDefaultLimit &&
op != ProtocolKeywordOpRestrictLimit &&
op != ProtocolKeywordOpBlack &&
op != ProtocolKeywordOpWhite {
log.Error("%v", ErrIllegalOp)
errResp(c, ecode.RequestErr, ErrIllegalOp)
return
}
id, err := strconv.ParseInt(params.Get(ProtocolKeywordID), 10, 64)
if err != nil {
log.Error("%v", err)
errResp(c, ecode.RequestErr, err)
return
}
k, err := Svr.GetKeywordByID(c, id)
if err != nil {
errResp(c, ecode.ServerErr, err)
return
}
if k.State == model.StateDeleted {
errResp(c, ecode.ServerErr, dao.ErrResourceNotExist)
return
}
if k.Tag == op {
c.JSON(k, nil)
return
}
k, err = Svr.OpKeyword(c, id, op)
if err != nil {
errResp(c, ecode.ServerErr, err)
return
}
c.JSON(k, nil)
}
// GetKeyword .
func GetKeyword(c *bm.Context) {
params := c.Request.Form
_, _, err := getAdminIDAndArea(params)
if err != nil {
errResp(c, ecode.RequestErr, err)
return
}
if params.Get(ProtocolKeywordID) == "" {
errResp(c, ecode.RequestErr, ErrMissID)
return
}
id, err := strconv.ParseInt(params.Get(ProtocolKeywordID), 10, 64)
if err != nil {
log.Error("%v", err)
errResp(c, ecode.RequestErr, err)
return
}
keyword, err := Svr.GetKeywordByID(c, id)
if err != nil {
errResp(c, ecode.ServerErr, err)
return
}
c.JSON(keyword, nil)
}
// GetKeywords .
func GetKeywords(c *bm.Context) {
params := c.Request.Form
_, area, err := getAdminIDAndArea(params)
if err != nil {
errResp(c, ecode.RequestErr, err)
return
}
var (
ctimeStart, ctimeEnd string
startTime, endTime time.Time
)
cond := &Condition{
Pagination: &util.Pagination{},
Area: area,
State: model.StateDefault,
Search: params.Get(ProtocolSearch),
Order: params.Get(ProtocolOrder),
OrderBy: params.Get(ProtocolOrderBy),
Tag: params.Get(ProtocolKeywordTag),
}
if ctimeStart = params.Get(ProtocolCTimeStart); ctimeStart != "" {
startTime, err = time.Parse(util.TimeFormat, ctimeStart)
if err != nil {
log.Error("%v", err)
errResp(c, ecode.RequestErr, err)
return
}
cond.StartTime = &startTime
}
if ctimeEnd = params.Get(ProtocolCTimeEnd); ctimeEnd != "" {
endTime, err = time.Parse(util.TimeFormat, ctimeEnd)
if err != nil {
log.Error("%v", err)
errResp(c, ecode.RequestErr, err)
return
}
cond.EndTime = &endTime
}
if ctimeStart != "" && ctimeEnd != "" {
if startTime.After(endTime) {
err = errors.New("start_time cannot be bigger than end_time")
log.Error("%v", err)
errResp(c, ecode.RequestErr, err)
return
}
}
var export bool
if params.Get(ProtocolKeywordExport) != "" {
export, err = strconv.ParseBool(params.Get(ProtocolKeywordExport))
if err != nil {
log.Error("%v", err)
errResp(c, ecode.RequestErr, err)
return
}
}
if export {
cond.PerPage = conf.Conf.ServiceOption.MaxExportRows
} else {
if params.Get(ProtocolCurPage) != "" {
cond.CurPage, err = strconv.ParseInt(params.Get(ProtocolCurPage), 10, 64)
if err != nil {
log.Error("%v", err)
errResp(c, ecode.RequestErr, err)
return
}
}
if perPage := params.Get(ProtocolPerPage); perPage != "" {
if cond.PerPage, err = strconv.ParseInt(perPage, 10, 64); err != nil {
log.Error("%v", err)
errResp(c, ecode.RequestErr, err)
return
}
}
}
if err = cond.Valid(); err != nil {
log.Error("%v", err)
errResp(c, ecode.RequestErr, err)
return
}
keywords, total, err := Svr.GetKeywordsByCond(c, ToServiceCond(cond))
if err != nil {
errResp(c, ecode.ServerErr, err)
return
}
res := map[string]interface{}{}
res[ProtocolTotalCounts] = total
res[ProtocolData] = keywords
c.JSONMap(res, nil)
}

View File

@@ -0,0 +1,94 @@
package http
const (
// ProtocolArea .
ProtocolArea = "area"
// ProtocolState .
ProtocolState = "state"
// ProtocolSearch .
ProtocolSearch = "q"
// ProtocolSynced .
ProtocolSynced = "synced"
// ProtocolAdminID .
ProtocolAdminID = "admin_id"
// ProtocolCurPage .
ProtocolCurPage = "cur_page"
// ProtocolPerPage .
ProtocolPerPage = "per_page"
// ProtocolCTime .
ProtocolCTime = "ctime"
// ProtocolCTimeStart .
ProtocolCTimeStart = "ctime_start"
// ProtocolCTimeEnd .
ProtocolCTimeEnd = "ctime_end"
// ProtocolOrder .
ProtocolOrder = "order"
// ProtocolOrderBy .
ProtocolOrderBy = "order_by"
// ProtocolOrderASC .
ProtocolOrderASC = "asc"
// ProtocolOrderDESC .
ProtocolOrderDESC = "desc"
// ProtocolCode .
ProtocolCode = "code"
// ProtocolData .
ProtocolData = "data"
// ProtocolMessage .
ProtocolMessage = "msg"
// ProtocolTotalCounts .
ProtocolTotalCounts = "total_counts"
// ProtocolRegexpID .
ProtocolRegexpID = "id"
// ProtocolRegexpName .
ProtocolRegexpName = "name"
// ProtocolRegexpContent .
ProtocolRegexpContent = "content"
// ProtocolRegexpOperation .
ProtocolRegexpOperation = "op"
// ProtocolRuleDuration .
ProtocolRuleDuration = "time_span"
// ProtocolRuleLimitType .
ProtocolRuleLimitType = "limit_type"
// ProtocolRuleLimitScope .
ProtocolRuleLimitScope = "limit_scope"
// ProtocolRuleAllowedCounts .
ProtocolRuleAllowedCounts = "allowed_counts"
// ProtocolKeywordID .
ProtocolKeywordID = "id"
// ProtocolKeywordIDs .
ProtocolKeywordIDs = "ids"
// ProtocolKeywordTag .
ProtocolKeywordTag = "tag"
// ProtocolKeywordExport .
ProtocolKeywordExport = "export"
// ProtocolKeywordHitCounts .
ProtocolKeywordHitCounts = "hit_counts"
// ProtocolKeywordOpWhite .
ProtocolKeywordOpWhite = "white"
// ProtocolKeywordOpBlack .
ProtocolKeywordOpBlack = "black"
// ProtocolKeywordOperation .
ProtocolKeywordOperation = "op"
// ProtocolKeywordOpDefaultLimit .
ProtocolKeywordOpDefaultLimit = "limit"
// ProtocolKeywordOpRestrictLimit .
ProtocolKeywordOpRestrictLimit = "restrict"
// ProtocolKeywordOpBlackAndDeleteReply .
ProtocolKeywordOpBlackAndDeleteReply = "black_and_del"
// ProtocolKeywordSenderID .
ProtocolKeywordSenderID = "sender_id"
// ProtocolKeywordSubjectID .
ProtocolKeywordSubjectID = "oid"
// ProtocolKeywordContent .
ProtocolKeywordContent = "content"
)

View File

@@ -0,0 +1,255 @@
package http
import (
"errors"
"fmt"
"net/url"
"regexp"
"strconv"
"go-common/app/service/main/antispam/conf"
"go-common/app/service/main/antispam/dao"
"go-common/app/service/main/antispam/model"
"go-common/app/service/main/antispam/util"
"go-common/library/ecode"
"go-common/library/log"
bm "go-common/library/net/http/blademaster"
)
// GetRegexp .
func GetRegexp(c *bm.Context) {
params := c.Request.Form
_, err := getAdminID(params)
if err != nil {
errResp(c, ecode.RequestErr, err)
return
}
id, err := strconv.ParseInt(params.Get(ProtocolRegexpID), 10, 64)
if err != nil {
errResp(c, ecode.RequestErr, err)
return
}
regexp, err := Svr.GetRegexpByID(c, id)
if err != nil {
errResp(c, ecode.RequestErr, err)
return
}
c.JSON(regexp, nil)
}
// GetRegexps .
func GetRegexps(c *bm.Context) {
params := c.Request.Form
_, area, err := getAdminIDAndArea(params)
if err != nil {
errResp(c, ecode.RequestErr, err)
return
}
cond := &Condition{
Area: area,
Pagination: &util.Pagination{},
}
if params.Get(ProtocolCurPage) != "" {
if cond.CurPage, err = strconv.ParseInt(params.Get(ProtocolCurPage), 10, 64); err != nil {
log.Error("%v", err)
errResp(c, ecode.RequestErr, err)
return
}
}
if perPage := params.Get(ProtocolPerPage); perPage != "" {
if cond.PerPage, err = strconv.ParseInt(perPage, 10, 64); err != nil {
log.Error("%v", err)
errResp(c, ecode.RequestErr, err)
return
}
}
if err = cond.Valid(); err != nil {
log.Error("%v", err)
errResp(c, ecode.RequestErr, err)
return
}
regexps, total, err := Svr.GetRegexpsByCond(c, ToServiceCond(cond))
if err != nil {
errResp(c, ecode.RequestErr, err)
return
}
res := map[string]interface{}{}
res[ProtocolTotalCounts] = total
res[ProtocolData] = regexps
c.JSONMap(res, nil)
}
// EditRegexp .
func EditRegexp(c *bm.Context) {
params := c.Request.Form
if _, err := getAdminID(params); err != nil {
errResp(c, ecode.RequestErr, err)
return
}
id, err := strconv.ParseInt(params.Get(ProtocolRegexpID), 10, 64)
if err != nil {
log.Error("%v", err)
errResp(c, ecode.RequestErr, err)
return
}
addOrEditRegexp(c, params, id)
}
// AddRegexp .
func AddRegexp(c *bm.Context) {
params := c.Request.Form
if _, _, err := getAdminIDAndArea(params); err != nil {
errResp(c, ecode.RequestErr, err)
return
}
var id int64
addOrEditRegexp(c, params, id)
}
// RecoverRegexp .
func RecoverRegexp(c *bm.Context) {
params := c.Request.Form
adminID, err := getAdminID(params)
if err != nil {
errResp(c, ecode.RequestErr, err)
return
}
id, err := strconv.ParseInt(params.Get(ProtocolRegexpID), 10, 64)
if err != nil {
log.Error("%v", err)
errResp(c, ecode.RequestErr, err)
return
}
r, err := Svr.GetRegexpByID(c, id)
if err != nil {
errResp(c, ecode.RequestErr, err)
return
}
if r.State != model.StateDeleted {
c.JSON(r, nil)
return
}
_, total, err := Svr.GetRegexpsByCond(c, ToServiceCond(&Condition{State: model.StateDefault, Area: r.Area}))
if err != nil {
errResp(c, ecode.ServerErr, err)
return
}
if total >= conf.Conf.ServiceOption.MaxRegexpCountsPerArea {
err = fmt.Errorf("regexp counts cannot exceed %d", conf.Conf.ServiceOption.MaxRegexpCountsPerArea)
log.Error("%v", err)
errResp(c, ecode.RequestErr, err)
return
}
r.State, r.AdminID = model.StateDefault, adminID
result, err := Svr.UpsertRegexp(c, r)
if err != nil {
errResp(c, ecode.ServerErr, err)
return
}
c.JSON(result, nil)
}
// DeleteRegexp .
func DeleteRegexp(c *bm.Context) {
params := c.Request.Form
adminID, err := getAdminID(params)
if err != nil {
errResp(c, ecode.RequestErr, err)
return
}
id, err := strconv.ParseInt(params.Get(ProtocolRegexpID), 10, 64)
if err != nil {
log.Error("%v", err)
errResp(c, ecode.RequestErr, err)
return
}
r, err := Svr.DeleteRegexp(c, id, adminID)
if err != nil {
errResp(c, ecode.ServerErr, err)
return
}
c.JSON(r, nil)
}
func addOrEditRegexp(c *bm.Context, params url.Values, id int64) {
adminID, err := getAdminID(params)
if err != nil {
errResp(c, ecode.RequestErr, err)
return
}
name := params.Get(ProtocolRegexpName)
if name == "" {
err = errors.New("empty regexp name")
log.Error("%v", err)
errResp(c, ecode.RequestErr, err)
return
}
content := params.Get(ProtocolRegexpContent)
if content == "" {
err = errors.New("empty regexp content")
log.Error("%v", err)
errResp(c, ecode.RequestErr, err)
return
}
if _, err = regexp.Compile(content); err != nil {
log.Error("%v", err)
errResp(c, ecode.FilterIllegalRegexp, err)
return
}
op := params.Get(ProtocolRegexpOperation)
if op == "" {
op = model.OperationLimit
}
area := params.Get(ProtocolArea)
if id == 0 {
_, total, err1 := Svr.GetRegexpsByCond(c, ToServiceCond(&Condition{State: model.StateDefault, Area: area}))
if err1 != nil {
errResp(c, ecode.ServerErr, err1)
return
}
if total >= conf.Conf.ServiceOption.MaxRegexpCountsPerArea {
err1 = fmt.Errorf("regexp counts cannot exceed %d", conf.Conf.ServiceOption.MaxRegexpCountsPerArea)
log.Error("%v", err1)
errResp(c, ecode.RequestErr, err1)
return
}
oldR, err1 := Svr.GetRegexpByAreaAndContent(c, area, content)
if err1 == nil {
if oldR.State != model.StateDefault {
oldR.State = model.StateDefault
result, err2 := Svr.UpsertRegexp(c, oldR)
if err2 != nil {
errResp(c, ecode.ServerErr, err2)
return
}
c.JSON(result, nil)
return
}
err1 = fmt.Errorf("regexp(%s) already exists", content)
log.Error("%v", err1)
errResp(c, ecode.RequestErr, err1)
return
}
if err != dao.ErrResourceNotExist {
log.Error("%v", err1)
errResp(c, ecode.ServerErr, err1)
return
}
}
r := &model.Regexp{
ID: id,
Area: area,
AdminID: adminID,
Name: name,
Content: content,
Operation: op,
}
result, err := Svr.UpsertRegexp(c, r)
if err != nil {
errResp(c, ecode.ServerErr, err)
return
}
c.JSON(result, nil)
}

View File

@@ -0,0 +1,115 @@
package http
import (
"errors"
"fmt"
"strconv"
"go-common/app/service/main/antispam/conf"
"go-common/app/service/main/antispam/model"
"go-common/library/ecode"
"go-common/library/log"
bm "go-common/library/net/http/blademaster"
)
// GetRule .
func GetRule(c *bm.Context) {
params := c.Request.Form
_, area, err := getAdminIDAndArea(params)
if err != nil {
log.Error("%v", err)
errResp(c, ecode.RequestErr, err)
return
}
if params.Get(ProtocolRuleLimitType) == "" ||
params.Get(ProtocolRuleLimitScope) == "" {
err = errors.New("either limit_type or limit_scope is nil")
log.Error("%v", err)
errResp(c, ecode.RequestErr, err)
return
}
rule, err := Svr.GetRuleByAreaAndLimitTypeAndScope(c,
area, params.Get(ProtocolRuleLimitType), params.Get(ProtocolRuleLimitScope))
if err != nil {
errResp(c, ecode.ServerErr, err)
return
}
c.JSON(rule, nil)
}
// GetRules .
func GetRules(c *bm.Context) {
params := c.Request.Form
_, area, err := getAdminIDAndArea(params)
if err != nil {
log.Error("%v", err)
errResp(c, ecode.RequestErr, err)
return
}
rules, err := Svr.GetRuleByArea(c, area)
if err != nil {
errResp(c, ecode.ServerErr, err)
return
}
c.JSON(rules, nil)
}
// AddRule .
func AddRule(c *bm.Context) {
params := c.Request.Form
_, area, err := getAdminIDAndArea(params)
if err != nil {
log.Error("%v", err)
errResp(c, ecode.RequestErr, err)
return
}
typ := params.Get(ProtocolRuleLimitType)
if typ != model.LimitTypeDefault && typ != model.LimitTypeRestrict {
err = fmt.Errorf("illegal limit type %q", typ)
log.Error("%v", err)
errResp(c, ecode.RequestErr, err)
return
}
scope := params.Get(ProtocolRuleLimitScope)
if scope != model.LimitScopeLocal && scope != model.LimitScopeGlobal {
err = fmt.Errorf("illegal limit scope %q", scope)
log.Error("%v", err)
errResp(c, ecode.RequestErr, err)
return
}
r := &model.Rule{
Area: area,
LimitType: typ,
LimitScope: scope,
}
allowedCounts := params.Get(ProtocolRuleAllowedCounts)
if r.AllowedCounts, err = strconv.ParseInt(allowedCounts, 10, 64); err != nil {
log.Error("%v", err)
errResp(c, ecode.RequestErr, err)
return
}
if r.DurationSec, err = strconv.ParseInt(params.Get(ProtocolRuleDuration), 10, 64); err != nil {
log.Error("%v", err)
errResp(c, ecode.RequestErr, err)
return
}
if r.DurationSec <= 0 || r.AllowedCounts <= 0 {
err = fmt.Errorf("both durationSec(%d) and allowedCounts(%d) must be greater than 0", r.AllowedCounts, r.DurationSec)
log.Error("%v", err)
errResp(c, ecode.RequestErr, err)
return
}
if r.DurationSec > conf.Conf.MaxDurationSec || r.AllowedCounts > conf.Conf.MaxAllowedCounts {
err = fmt.Errorf("either durationSec(%d) or allowedCounts(%d) exceed maxDurationSec(%d), maxAllowedCounts(%d)",
r.AllowedCounts, r.DurationSec, conf.Conf.MaxDurationSec, conf.Conf.MaxAllowedCounts)
log.Error("%v", err)
errResp(c, ecode.RequestErr, err)
return
}
r, err = Svr.UpsertRule(c, r)
if err != nil {
errResp(c, ecode.ServerErr, err)
return
}
c.JSON(r, nil)
}

View File

@@ -0,0 +1,35 @@
package(default_visibility = ["//visibility:public"])
load(
"@io_bazel_rules_go//go:def.bzl",
"go_library",
)
go_library(
name = "go_default_library",
srcs = [
"common.go",
"keyword.go",
"regexp.go",
"rule.go",
"suspicious.go",
],
importpath = "go-common/app/service/main/antispam/model",
tags = ["automanaged"],
visibility = ["//visibility:public"],
deps = ["//app/service/main/antispam/util:go_default_library"],
)
filegroup(
name = "package-srcs",
srcs = glob(["**"]),
tags = ["automanaged"],
visibility = ["//visibility:private"],
)
filegroup(
name = "all-srcs",
srcs = [":package-srcs"],
tags = ["automanaged"],
visibility = ["//visibility:public"],
)

View File

@@ -0,0 +1,33 @@
package model
const (
// ID .
ID = "id"
// Area .
Area = "area"
// AreaReply .
AreaReply = "reply"
// AreaIMessage .
AreaIMessage = "im"
// AreaLiveDM .
AreaLiveDM = "live_dm"
// AreaMainSiteDM .
AreaMainSiteDM = "danmu"
// State .
State = "state"
// StateDefault .
StateDefault = "default"
// StateDeleted .
StateDeleted = "deleted"
// OrderASC .
OrderASC = "ASC"
// OrderDESC .
OrderDESC = "DESC"
// CTime .
CTime = "ctime"
// MTime .
MTime = "mtime"
)

View File

@@ -0,0 +1,54 @@
package model
import (
"fmt"
"go-common/app/service/main/antispam/util"
)
const (
// ParamKeywordHitCounts .
ParamKeywordHitCounts = "show_up_counts"
// KeywordTag .
KeywordTag = "tag"
// KeywordTagBlack .
KeywordTagBlack = "black"
// KeywordTagWhite .
KeywordTagWhite = "white"
// KeywordTagDefaultLimit .
KeywordTagDefaultLimit = "limit"
// KeywordTagRestrictLimit .
KeywordTagRestrictLimit = "restrict"
// KeywordContent .
KeywordContent = "content"
// KeywordHitCounts .
KeywordHitCounts = "hit_counts"
)
// SenderList .
type SenderList struct {
SenderIDs []int64 `json:"sender_ids"`
Counts int `json:"counts"`
}
// Keyword .
type Keyword struct {
ID int64 `json:"id"`
Area string `json:"-"`
Content string `json:"content"`
SenderID int64 `json:"-"`
OriginContent string `json:"origin_content"`
SenderCounts int64 `json:"sender_counts"`
RegexpName string `json:"reg_name"`
Tag string `json:"tag"`
State string `json:"state"`
HitCounts int64 `json:"show_up_counts"`
CTime util.JSONTime `json:"ctime"`
MTime util.JSONTime `json:"mtime"`
}
func (k *Keyword) String() string {
return fmt.Sprintf("id: %d, area: %s, content: %s, tag: %s, state: %s, hitCounts %d\n",
k.ID, k.Area, k.Content, k.Tag, k.State, k.HitCounts)
}

View File

@@ -0,0 +1,48 @@
package model
import (
"fmt"
"regexp"
"strings"
"go-common/app/service/main/antispam/util"
)
const (
// OperationLimit .
OperationLimit = "limit"
// OperationRestrictLimit .
OperationRestrictLimit = "restrict"
// OperationPutToWhiteList .
OperationPutToWhiteList = "white"
// OperationIgnore .
OperationIgnore = "ignore"
)
// Regexp .
type Regexp struct {
ID int64 `json:"id"`
Area string `json:"area"`
AdminID int64 `json:"admin_id"`
AdminName string `json:"-"`
Reg *regexp.Regexp `json:"-"`
Name string `json:"name"`
Operation string `json:"op"`
Content string `json:"content"`
State string `json:"state"`
CTime util.JSONTime `json:"-"`
MTime util.JSONTime `json:"mtime"`
}
// FindString .
func (r *Regexp) FindString(content string) string {
if hits := r.Reg.FindStringSubmatch(content); len(hits) >= 2 {
return strings.TrimSpace(hits[1])
}
return ""
}
func (r *Regexp) String() string {
return fmt.Sprintf("name:%s, operation:%s, regexp:%s\n",
r.Name, r.Operation, r.Content)
}

View File

@@ -0,0 +1,38 @@
package model
const (
// LimitTypeDefault .
LimitTypeDefault = "limit"
// LimitTypeRestrict .
LimitTypeRestrict = "restrict"
// LimitTypeBlack .
LimitTypeBlack = "black"
// LimitTypeWhite .
LimitTypeWhite = "white"
// LimitScopeLocal .
LimitScopeLocal = "local"
// LimitScopeGlobal .
LimitScopeGlobal = "global"
)
// AggregateRule .
type AggregateRule struct {
Area string `json:"area"`
LimitType string `json:"limit_type"`
GlobalAllowedCounts int64 `json:"global_allowed_counts"`
LocalAllowedCounts int64 `json:"local_allowed_counts"`
GlobalDurationSec int64 `json:"global_dur"`
LocalDurationSec int64 `json:"local_dur"`
}
// Rule .
type Rule struct {
ID int64 `json:"id"`
Area string `json:"area"`
AllowedCounts int64 `json:"allowed_counts"`
LimitType string `json:"limit_type"`
LimitScope string `json:"limit_scope"`
DurationSec int64 `json:"dur"`
}

View File

@@ -0,0 +1,42 @@
package model
// Suspicious .
type Suspicious struct {
Id int64 `json:"id"`
SenderId int64 `json:"sender_id"`
Content string `json:"content"`
Area string `json:"area"`
OId int64 `json:"oid"`
}
// GetArea .
func (susp *Suspicious) GetArea() string {
return susp.Area
}
// GetSenderID .
func (susp *Suspicious) GetSenderID() int64 {
return susp.SenderId
}
// GetID .
func (susp *Suspicious) GetID() int64 {
return susp.Id
}
// GetOID .
func (susp *Suspicious) GetOID() int64 {
return susp.OId
}
// GetContent .
func (susp *Suspicious) GetContent() string {
return susp.Content
}
// SuspiciousResp .
type SuspiciousResp struct {
Area string `json:"-"`
Content string `json:"content"`
LimitType string `json:"susp_type"`
}

View File

@@ -0,0 +1,32 @@
package(default_visibility = ["//visibility:public"])
load(
"@io_bazel_rules_go//go:def.bzl",
"go_library",
)
go_library(
name = "go_default_library",
srcs = ["client.go"],
importpath = "go-common/app/service/main/antispam/rpc/client",
tags = ["automanaged"],
visibility = ["//visibility:public"],
deps = [
"//app/service/main/antispam/model:go_default_library",
"//library/net/rpc:go_default_library",
],
)
filegroup(
name = "package-srcs",
srcs = glob(["**"]),
tags = ["automanaged"],
visibility = ["//visibility:private"],
)
filegroup(
name = "all-srcs",
srcs = [":package-srcs"],
tags = ["automanaged"],
visibility = ["//visibility:public"],
)

View File

@@ -0,0 +1,30 @@
package rpc
import (
"context"
"go-common/app/service/main/antispam/model"
"go-common/library/net/rpc"
)
const (
_appid = "antispam.service"
)
// Client .
type Client struct {
*rpc.Client2
}
// NewClient .
func NewClient(c *rpc.ClientConfig) *Client {
s := &Client{}
s.Client2 = rpc.NewDiscoveryCli(_appid, c)
return s
}
// Filter .
func (cli *Client) Filter(ctx context.Context, arg *model.Suspicious) (res *model.SuspiciousResp, err error) {
err = cli.Call(ctx, "Filter.Check", arg, &res)
return
}

View File

@@ -0,0 +1,39 @@
package(default_visibility = ["//visibility:public"])
load(
"@io_bazel_rules_go//go:def.bzl",
"go_library",
)
go_library(
name = "go_default_library",
srcs = [
"filter.go",
"rpc.go",
],
importpath = "go-common/app/service/main/antispam/rpc/server",
tags = ["automanaged"],
visibility = ["//visibility:public"],
deps = [
"//app/service/main/antispam/conf:go_default_library",
"//app/service/main/antispam/model:go_default_library",
"//app/service/main/antispam/service:go_default_library",
"//library/log:go_default_library",
"//library/net/rpc:go_default_library",
"//library/net/rpc/context:go_default_library",
],
)
filegroup(
name = "package-srcs",
srcs = glob(["**"]),
tags = ["automanaged"],
visibility = ["//visibility:private"],
)
filegroup(
name = "all-srcs",
srcs = [":package-srcs"],
tags = ["automanaged"],
visibility = ["//visibility:public"],
)

View File

@@ -0,0 +1,64 @@
package server
import (
"fmt"
"go-common/app/service/main/antispam/conf"
"go-common/app/service/main/antispam/model"
"go-common/app/service/main/antispam/service"
"go-common/library/log"
"go-common/library/net/rpc/context"
)
// Filter .
type Filter struct {
svr service.Service
}
func valid(susp *model.Suspicious) error {
if susp == nil {
err := fmt.Errorf("nil request params susp(%v)", susp)
log.Error("%v", err)
return err
}
if susp.OId <= 0 {
err := fmt.Errorf("OId(%d) must be greater than 0", susp.SenderId)
log.Error("%v", err)
return err
}
if susp.Area == "message" {
// for backward compatibility(history reason)
susp.Area = model.AreaIMessage
if susp.SenderId <= 0 {
err := fmt.Errorf("senderId(%d) must be greater than 0", susp.SenderId)
log.Error("%v", err)
return err
}
return nil
}
if _, ok := conf.Areas[susp.Area]; !ok {
err := fmt.Errorf("invalid area(%s)", susp.Area)
log.Error("%v", err)
return err
}
return nil
}
// Check .
func (f *Filter) Check(ctx context.Context, susp *model.Suspicious, resp *model.SuspiciousResp) error {
if err := valid(susp); err != nil {
return err
}
ret, err := f.svr.Filter(ctx, susp)
if err != nil {
return err
}
*resp = *ret
return nil
}
// Ping .
func (f *Filter) Ping(ctx context.Context, arg *struct{}, res *struct{}) error {
return nil
}

View File

@@ -0,0 +1,19 @@
package server
import (
"log"
"go-common/app/service/main/antispam/conf"
"go-common/app/service/main/antispam/service"
"go-common/library/net/rpc"
)
// New .
func New(config *conf.Config, s service.Service) *rpc.Server {
rpcSvr := rpc.NewServer(config.RPC)
if err := rpcSvr.Register(&Filter{svr: s}); err != nil {
log.Fatalf("%+v", err)
}
return rpcSvr
}

View File

@@ -0,0 +1,79 @@
package(default_visibility = ["//visibility:public"])
load(
"@io_bazel_rules_go//go:def.bzl",
"go_test",
"go_library",
)
go_test(
name = "go_default_test",
srcs = [
"service_test.go",
"transform_test.go",
"trie_mgr_test.go",
],
embed = [":go_default_library"],
rundir = ".",
tags = ["automanaged"],
deps = [
"//app/service/main/antispam/conf:go_default_library",
"//app/service/main/antispam/util:go_default_library",
"//app/service/main/antispam/util/trie:go_default_library",
],
)
go_library(
name = "go_default_library",
srcs = [
"cond.go",
"filter.go",
"keyword.go",
"option.go",
"pipeline.go",
"regexp.go",
"rule.go",
"scheduler.go",
"service.go",
"service_impl.go",
"transform.go",
"trie_mgr.go",
],
importpath = "go-common/app/service/main/antispam/service",
tags = ["automanaged"],
visibility = ["//visibility:public"],
deps = [
"//app/service/main/antispam/conf:go_default_library",
"//app/service/main/antispam/dao:go_default_library",
"//app/service/main/antispam/extern:go_default_library",
"//app/service/main/antispam/model:go_default_library",
"//app/service/main/antispam/util:go_default_library",
"//app/service/main/antispam/util/trie:go_default_library",
"//library/cache:go_default_library",
"//library/log:go_default_library",
],
)
go_test(
name = "go_default_xtest",
srcs = [
"keyword_test.go",
"regexp_content_test.go",
],
tags = ["automanaged"],
deps = ["//vendor/github.com/stretchr/testify/assert:go_default_library"],
)
filegroup(
name = "package-srcs",
srcs = glob(["**"]),
tags = ["automanaged"],
visibility = ["//visibility:private"],
)
filegroup(
name = "all-srcs",
srcs = [":package-srcs"],
tags = ["automanaged"],
visibility = ["//visibility:public"],
)

View File

@@ -0,0 +1,34 @@
package service
import (
"time"
"go-common/app/service/main/antispam/util"
)
// Condition aggregates all query parameters
// if doesn't need pagination, set Condition.Pagination to nil
type Condition struct {
// set it to nil if you don't want pagination
// otherwise, page num 1 and page size 20 will be the default
*util.Pagination
Offset string
Limit string
Tag string
Tags []string
Content string
Contents []string
Area string
State string
Search string
HitCounts string
Order, OrderBy string
LimitType, LimitScope string
StartTime, EndTime *time.Time
LastModifiedTime *time.Time
}

View File

@@ -0,0 +1,103 @@
package service
import (
"context"
"go-common/app/service/main/antispam/conf"
"go-common/app/service/main/antispam/model"
"go-common/app/service/main/antispam/util"
"go-common/library/log"
)
const (
// SuspOK .
SuspOK = "ok"
// SuspHitBlack .
SuspHitBlack = "hit_black"
// SuspHitRestrict .
SuspHitRestrict = "hit_restrict"
// SuspExceedAllowedCounts .
SuspExceedAllowedCounts = "exceed_allowed_counts"
)
// Filter detects spam info based on different area rule
func (s *SvcImpl) Filter(ctx context.Context, ugc UserGeneratedContent) (*model.SuspiciousResp, error) {
key, val, err := s.TrieMgr.Get(ugc.GetArea(), ugc.GetContent())
if err != nil && err != ErrTrieNotFound {
log.Error("%v", err)
return nil, err
}
resp := &model.SuspiciousResp{
Area: ugc.GetArea(),
Content: key,
LimitType: SuspOK,
}
if err == ErrTrieNotFound {
s.pushToChan(ugc)
return resp, nil
}
updateCountFn := func() {
k, err1 := s.GetKeywordByID(ctx, val.KeywordID)
if err1 != nil || k.State == model.StateDeleted {
log.Error("%v", err1)
return
}
if _, err = s.IncrKeywordHitCount(ctx, k); err != nil {
log.Warn("incr keyword(id:%d) fail, error(%v)", val.KeywordID, err)
}
if ugc.GetSenderID() > 0 {
if err = s.persistSenderIDs(ctx, val.KeywordID, ugc.GetSenderID()); err != nil {
log.Warn("persistSenderIDs(sender_id: %d) fail, error(%v)", ugc.GetSenderID(), err)
}
}
log.Info("before running autoWhite on keyword(%+v), limitInfo(%+v), autoWhiteConf(%+v)", k, val, conf.Conf.AutoWhite)
if val.LimitType != model.LimitTypeBlack && k.HitCounts > conf.Conf.AutoWhite.KeywordHitCounts {
senderCounts, _ := s.antiDao.CntSendersCache(ctx, k.ID)
if senderCounts > conf.Conf.AutoWhite.NumOfSenders {
senderList, err1 := s.GetSenderIDsByKeywordID(ctx, k.ID)
if err1 != nil {
return
}
if util.StdDeviation(util.Normallization(senderList.SenderIDs)) > conf.Conf.AutoWhite.Derivation {
log.Warn("start running autoWhite on keyword(%+v), senderList(%v)", k, senderList.SenderIDs)
if _, err = s.OpKeyword(ctx, k.ID, model.KeywordTagWhite); err != nil {
log.Warn("auto white fail %+v", k)
return
}
}
}
}
}
s.AddTask(updateCountFn)
if val.LimitType == model.LimitTypeBlack {
resp.LimitType = SuspHitBlack
return resp, nil
}
r, err := s.GetAggregateRuleByAreaAndLimitType(ctx, ugc.GetArea(), val.LimitType)
if err != nil {
return nil, err
}
counts, err := s.antiDao.GlobalLocalLimitCache(ctx, val.KeywordID, ugc.GetOID())
if err != nil {
log.Error("GlobalLocalLimitCache(%d,%d) error(%v)", val.KeywordID, ugc.GetOID(), err)
return nil, err
}
globalCounts, localCounts := counts[0], counts[1]
if globalCounts < r.GlobalAllowedCounts && localCounts < r.LocalAllowedCounts {
if ret, _ := s.antiDao.IncrGlobalLimitCache(ctx, val.KeywordID); ret == 1 {
s.antiDao.GlobalLimitExpire(ctx, val.KeywordID, r.GlobalDurationSec)
}
if ret, _ := s.antiDao.IncrLocalLimitCache(ctx, val.KeywordID, ugc.GetOID()); ret == 1 {
s.antiDao.LocalLimitExpire(ctx, val.KeywordID, ugc.GetOID(), r.LocalDurationSec)
}
resp.LimitType = SuspOK
if val.LimitType == model.LimitTypeRestrict {
resp.LimitType = SuspHitRestrict
}
return resp, nil
}
resp.LimitType = SuspExceedAllowedCounts
return resp, nil
}

View File

@@ -0,0 +1,274 @@
package service
import (
"context"
"errors"
"strconv"
"time"
"go-common/app/service/main/antispam/dao"
"go-common/app/service/main/antispam/model"
"go-common/app/service/main/antispam/util"
"go-common/library/log"
)
const (
// ThreeMonths .
ThreeMonths = 60 * 60 * 24 * 120
)
// GetKeywordsByCond get keywords by condition.
func (s *SvcImpl) GetKeywordsByCond(ctx context.Context, cond *Condition) (ks []*model.Keyword, total int64, err error) {
daoKs, total, err := s.KeywordDao.GetByCond(ctx, ToDaoCond(cond))
if err == dao.ErrResourceNotExist {
return []*model.Keyword{}, 0, nil
}
if err != nil {
return nil, 0, err
}
ks = ToModelKeywords(daoKs)
for _, k := range ks {
k.SenderCounts, _ = s.antiDao.CntSendersCache(ctx, k.ID)
}
return ks, total, nil
}
// DeleteKeywords delete keywords and all caches related to them
func (s *SvcImpl) DeleteKeywords(ctx context.Context, ids []int64) ([]*model.Keyword, error) {
ks, err := s.getKeywordByIDs(ctx, ids)
if err != nil {
return nil, err
}
idsNeedDelete := make([]int64, 0)
keywordsNeedDelete := make([]*model.Keyword, 0)
for _, k := range ks {
if k.State == model.StateDeleted {
continue
}
k.State = model.StateDeleted
idsNeedDelete = append(idsNeedDelete, k.ID)
keywordsNeedDelete = append(keywordsNeedDelete, k)
}
if len(idsNeedDelete) > 0 {
if err := s.antiDao.DelKeywordRelatedCache(ctx, keywordsNeedDelete); err != nil {
log.Error("s.antiDao.DelKeywordRelatedCache(%+v) error(%v)", keywordsNeedDelete, err)
return nil, err
}
daoKs, err := s.KeywordDao.DeleteByIDs(ctx, idsNeedDelete)
if err != nil {
return nil, err
}
return ToModelKeywords(daoKs), nil
}
return []*model.Keyword{}, nil
}
// GetSenderIDsByKeywordID query keyword's sender list by keyword's id
func (s *SvcImpl) GetSenderIDsByKeywordID(ctx context.Context, id int64) (*model.SenderList, error) {
senders, err := s.antiDao.AllSendersCache(ctx, id)
if err != nil {
log.Error("%v", err)
return nil, err
}
senderIDs := make([]int64, len(senders))
for i, sender := range senders {
sid, err := strconv.ParseInt(sender, 10, 64)
if err != nil {
return nil, err
}
senderIDs[i] = sid
}
return &model.SenderList{SenderIDs: senderIDs, Counts: len(senders)}, nil
}
// GetKeywordsByOffsetLimit query keywords by id range(offset, limit)
func (s *SvcImpl) GetKeywordsByOffsetLimit(ctx context.Context, cond *Condition) ([]*model.Keyword, error) {
ks, err := s.KeywordDao.GetByOffsetLimit(ctx, ToDaoCond(cond))
if err != nil {
return nil, err
}
return ToModelKeywords(ks), nil
}
// GetKeywordByID .
func (s *SvcImpl) GetKeywordByID(ctx context.Context, id int64) (*model.Keyword, error) {
k, err := s.KeywordDao.GetByID(ctx, id)
if err != nil {
return nil, err
}
return ToModelKeyword(k), nil
}
// OpKeyword perform update on keywords, including keyword's tag change or delete keyword.
func (s *SvcImpl) OpKeyword(ctx context.Context, id int64, newTag string) (*model.Keyword, error) {
k, err := s.GetKeywordByID(ctx, id)
if err != nil {
return nil, err
}
if k.State == model.StateDeleted {
return nil, ErrResourceNotExist
}
if k.Tag == newTag {
return k, nil
}
k.Tag = newTag
if newTag == model.KeywordTagWhite {
err1 := s.antiDao.DelCountRelatedCache(ctx, k)
if err1 != nil {
log.Error("s.antiDao.DelCountRelatedCache(%+v), error(%v)", k, err1)
return nil, err1
}
}
dk, err := s.KeywordDao.Update(ctx, ToDaoKeyword(k))
if err != nil {
return nil, err
}
return ToModelKeyword(dk), nil
}
// IncrKeywordHitCount increase keyword's hit_counts in cache,
// and persist to db only if hit_counts % 2 equals to 0
func (s *SvcImpl) IncrKeywordHitCount(ctx context.Context, k *model.Keyword) (int64, error) {
totalCounts, err := s.antiDao.IncrTotalLimitCache(ctx, k.ID)
if err != nil {
log.Error("%v", err)
return 0, err
}
if err := s.antiDao.TotalLimitExpire(ctx, k.ID, ThreeMonths); err != nil {
return 0, err
}
if totalCounts%2 == 0 {
k.HitCounts += 2
s.KeywordDao.Update(ctx, ToDaoKeyword(k))
} else {
k.HitCounts++
}
return k.HitCounts, nil
}
// ExpireKeyword clean the keywords which satify the following condition:
// 1. hit_counts <= 3
// 2. scan range from "one month ago - 5 day" to "one month ago"
func (s *SvcImpl) ExpireKeyword(ctx context.Context, dbLimit int64) error {
until := time.Now().AddDate(0, -1, 0)
start := until.AddDate(0, 0, -5)
cond := &Condition{
State: model.StateDefault,
Tags: []string{
model.KeywordTagDefaultLimit,
model.KeywordTagRestrictLimit,
model.KeywordTagWhite,
},
StartTime: &start,
EndTime: &until,
HitCounts: "3",
Pagination: &util.Pagination{
CurPage: 1,
PerPage: dbLimit,
},
}
ks, err := s.KeywordDao.GetRubbish(ctx, ToDaoCond(cond))
if err != nil {
return err
}
needExpireIDs := make([]int64, 0)
for _, k := range ks {
needExpireIDs = append(needExpireIDs, k.ID)
}
_, err = s.DeleteKeywords(ctx, needExpireIDs)
return err
}
func (s *SvcImpl) persistSenderIDs(ctx context.Context, keywordID, senderID int64) error {
totalCounts, err := s.antiDao.ZaddSendersCache(ctx, keywordID, time.Now().UnixNano(), senderID)
if err != nil {
return err
}
if totalCounts <= s.Option.MaxSenderNum {
return nil
}
extraCounts := totalCounts - s.Option.MaxSenderNum
senderIDs, err := s.antiDao.SendersCache(ctx, keywordID, 0, extraCounts)
if err != nil {
log.Error("s.antiDao.SendersCache(%d,%d,%d)%v", keywordID, 0, extraCounts, err)
return err
}
if len(senderIDs) != int(extraCounts) {
log.Warn("got wrong number of senderIDs:keywordID(%d), want senderIDs(%v), length(%d), got(%d)",
keywordID, senderIDs, len(senderIDs), extraCounts)
}
for _, sid := range senderIDs {
ret, err := s.antiDao.ZremSendersCache(ctx, keywordID, sid)
if err != nil {
log.Error("%v", err)
return err
}
if ret != 1 {
err = errors.New("fail to remove senederID from senderID list")
log.Error("%v", err)
return err
}
}
return nil
}
// PersistKeyword persist catched keyword
func (s *SvcImpl) PersistKeyword(ctx context.Context, catchedKeyword *model.Keyword) (*model.Keyword, error) {
keyword, err := s.getKeywordByAreaAndContent(ctx, catchedKeyword.Area, catchedKeyword.Content)
if err != nil {
insertedKeyword, err := s.insertKeyword(ctx, catchedKeyword)
if err != nil {
return nil, err
}
return insertedKeyword, nil
}
if keyword.State == model.StateDeleted {
// the keyword was deleted before,
// now it's hit again, restore to init state
keyword.State = model.StateDefault
keyword.OriginContent = catchedKeyword.OriginContent
keyword.RegexpName = catchedKeyword.RegexpName
keyword.CTime = catchedKeyword.CTime
keyword.Tag = catchedKeyword.Tag
keyword.HitCounts = 1
k, err := s.KeywordDao.Update(ctx, ToDaoKeyword(keyword))
if err != nil {
return nil, err
}
return ToModelKeyword(k), nil
}
s.IncrKeywordHitCount(ctx, keyword)
return keyword, nil
}
func (s *SvcImpl) insertKeyword(ctx context.Context, k *model.Keyword) (*model.Keyword, error) {
res, err := s.KeywordDao.Insert(ctx, ToDaoKeyword(k))
if err != nil {
return nil, err
}
return ToModelKeyword(res), nil
}
func (s *SvcImpl) getKeywordByAreaAndContent(ctx context.Context,
area, content string) (*model.Keyword, error) {
k, err := s.KeywordDao.GetByAreaAndContent(ctx,
ToDaoCond(&Condition{
Area: area,
Contents: []string{content},
}))
if err != nil {
return nil, err
}
return ToModelKeyword(k), nil
}
func (s *SvcImpl) getKeywordByIDs(ctx context.Context,
ids []int64) ([]*model.Keyword, error) {
ks, err := s.KeywordDao.GetByIDs(ctx, ids)
if err != nil {
return nil, err
}
return ToModelKeywords(ks), err
}

View File

@@ -0,0 +1,5 @@
package service_test
import "errors"
var ErrArbitrary = errors.New("arbitrary error")

View File

@@ -0,0 +1,85 @@
package service
import (
"time"
"go-common/app/service/main/antispam/conf"
)
// NewOption .
func NewOption(config *conf.Config) *Option {
opt := &Option{
MaxSenderNum: config.ServiceOption.MaxSenderNum,
MinKeywordLen: config.ServiceOption.MinKeywordLen,
MaxExportRows: config.ServiceOption.MaxExportRows,
MaxRegexpCountsPerArea: config.ServiceOption.MaxRegexpCountsPerArea,
MaxSpawnGoroutines: config.ServiceOption.MaxSpawnGoroutines,
DefaultChanSize: config.ServiceOption.DefaultChanSize,
AsyncTaskChanSize: config.ServiceOption.AsyncTaskChanSize,
DefaultExpireSec: config.ServiceOption.DefaultExpireSec,
RuleDefaultExpireSec: config.ServiceOption.RuleDefaultExpireSec,
RegexpDefaultExpireSec: config.ServiceOption.RegexpDefaultExpireSec,
}
opt.Scheduler = &SchedulerOption{
GcInterval: time.Duration(config.ServiceOption.GcOpt.IntervalSec),
GcMaxRowsPerQuery: config.ServiceOption.GcOpt.MaxRowsPerQuery,
RefreshTrieIntervalSec: time.Duration(config.ServiceOption.RefreshTrieIntervalSec),
RefreshRulesIntervalSec: time.Duration(config.ServiceOption.RefreshRulesIntervalSec),
RefreshRegexpsIntervalSec: time.Duration(config.ServiceOption.RefreshRegexpsIntervalSec),
BuildTrieIntervalMinute: time.Duration(config.ServiceOption.BuildTrieIntervalMinute),
BuildTrieMaxRowsPerQuery: config.ServiceOption.BuildTrieMaxRowsPerQuery,
}
if opt.AsyncTaskChanSize == 0 {
opt.AsyncTaskChanSize = 500
}
return opt
}
// Option .
type Option struct {
// MinKeywordLen specify the minimum length
// a keyword should satify
MinKeywordLen int
// MaxSenderNum limit the length of
// keyword's sender list
MaxSenderNum int64
// MaxExportRows specify the max rows
// when export keywords as excel
MaxExportRows int64
// MaxRegexpCounts specify the max counts
// of regexps inside the extract pipeline
MaxRegexpCountsPerArea int64
DefaultExpireSec int64
RuleDefaultExpireSec int64
RegexpDefaultExpireSec int64
DefaultChanSize int64
MaxSpawnGoroutines int64
AsyncTaskChanSize int64
Scheduler *SchedulerOption
}
// SchedulerOption .
type SchedulerOption struct {
BuildTrieIntervalMinute time.Duration
BuildTrieMaxRowsPerQuery int64
RefreshTrieIntervalSec time.Duration
RefreshTrieMaxRowsPerQuery int64
RefreshRulesIntervalSec time.Duration
RefreshRegexpsIntervalSec time.Duration
// GcInterval specify how often to
// expire the useless keywords
GcInterval time.Duration
GcMaxRowsPerQuery int64
}

View File

@@ -0,0 +1,214 @@
package service
import (
"context"
"strings"
"time"
"go-common/app/service/main/antispam/model"
"go-common/app/service/main/antispam/util"
"go-common/library/log"
)
// UserGeneratedContent .
type UserGeneratedContent interface {
GetID() int64
GetOID() int64
GetSenderID() int64
GetArea() string
GetContent() string
}
// Digest .
func (s *SvcImpl) Digest() {
s.digest(s.UserGeneratedContentChan)
}
func (s *SvcImpl) pushToChan(ugc UserGeneratedContent) {
select {
case <-done:
case s.UserGeneratedContentChan <- ugc:
default:
log.Warn("regexp extract chan full, abandon ugc(%v)", ugc)
}
}
// InWhiteList check if a content match the whitelist regexp
// if match, return the matched regexp's name
func (s *SvcImpl) InWhiteList(k *model.Keyword) (name string, isWhite bool) {
for _, white := range s.GetRegexpsByAreaAndCondFunc(context.TODO(), k.Area, whiteRegexpsCondFn) {
log.Info("inside white regexp loop: %s", white)
if white.FindString(k.Content) != "" {
log.Info("Keyword match whitelist, content: %s", k.Content)
return white.Name, true
}
}
return "", false
}
// ExcludeWhitelist .
func (s *SvcImpl) ExcludeWhitelist(in <-chan *model.Keyword) <-chan *model.Keyword {
out := make(chan *model.Keyword, s.Option.DefaultChanSize)
s.wg.Add(1)
go func() {
defer s.wg.Done()
defer close(out)
for suspicious := range in {
if name, isWhite := s.InWhiteList(suspicious); isWhite {
suspicious.Tag = model.KeywordTagWhite
suspicious.RegexpName = name
}
out <- suspicious
}
log.Info("exclude whitelist chan receive cancel signal")
}()
return out
}
// Aggregate persist only one keyword captured by regexps chain
// only one keyword generated by same owner will be process
func (s *SvcImpl) Aggregate(in <-chan *model.Keyword) <-chan *model.Keyword {
out := make(chan *model.Keyword, s.Option.DefaultChanSize)
s.wg.Add(1)
go func() {
defer s.wg.Done()
defer close(out)
for keyword := range in {
count, err := s.antiDao.IncrAreaSendersCache(context.TODO(), keyword.Area, keyword.SenderID)
if err != nil {
log.Error("%v", err)
continue
}
s.antiDao.AreaSendersExpire(context.TODO(), keyword.Area, keyword.SenderID, 1)
if count == 1 {
out <- keyword
}
continue
}
log.Info("aggregate chan receive cancel signal")
}()
return out
}
// ExtractKeyword extract keywords by match content with the limit/restrict regexps
func (s *SvcImpl) ExtractKeyword(in <-chan UserGeneratedContent) <-chan *model.Keyword {
out := make(chan *model.Keyword, s.Option.DefaultChanSize)
s.wg.Add(1)
go func() {
defer s.wg.Done()
defer close(out)
for obj := range in {
s.extractKeyword(obj, out)
}
log.Info("Extract keyword chan receive cancel")
}()
return out
}
// Ignore block reply which are impossible to contain any keyword
func (s *SvcImpl) Ignore(in <-chan *model.Keyword) <-chan *model.Keyword {
out := make(chan *model.Keyword, s.Option.DefaultChanSize)
s.wg.Add(1)
go func() {
defer s.wg.Done()
defer close(out)
for keyword := range in {
s.ignore(keyword, out)
}
log.Info("ignore chan receive cancel signal")
}()
return out
}
func (s *SvcImpl) extractKeyword(ugc UserGeneratedContent, out chan<- *model.Keyword) {
for _, reg := range s.GetRegexpsByAreaAndCondFunc(context.TODO(), ugc.GetArea(), limitRegexpsCondFn) {
log.Info("inside limit/restrict regexp loop, regexp:%s", reg)
<-s.tokens
s.wg.Add(1)
go func(regex *model.Regexp) {
defer s.wg.Done()
defer func() {
s.tokens <- struct{}{}
}()
if hit := regex.FindString(ugc.GetContent()); hit != "" {
hit = strings.TrimSpace(hit)
if len(hit) < s.Option.MinKeywordLen {
return
}
k := &model.Keyword{
Content: hit,
SenderID: ugc.GetSenderID(),
Area: ugc.GetArea(),
OriginContent: ugc.GetContent(),
CTime: util.JSONTime(time.Now()),
RegexpName: regex.Name,
}
switch regex.Operation {
case model.OperationLimit:
k.Tag = model.KeywordTagDefaultLimit
case model.OperationRestrictLimit:
k.Tag = model.KeywordTagRestrictLimit
}
out <- k
}
}(reg)
}
}
func (s *SvcImpl) ignore(keyword *model.Keyword, out chan<- *model.Keyword) {
if len(keyword.Content) < s.Option.MinKeywordLen {
log.Warn("content small than %d, ignore", s.Option.MinKeywordLen)
return
}
if util.SameChar(keyword.Content) {
log.Warn("content consists of repeated chars(%s), will be ignored", keyword.Content)
return
}
rs := s.GetRegexpsByAreaAndCondFunc(context.TODO(), keyword.Area, ignoreRegexpsCondFn)
// NOTE: this is extremly important
// otherwise, will block pipeline forever
if len(rs) == 0 {
out <- keyword
}
for _, reg := range rs {
log.Info("inside ignore regexp loop: %s", reg)
<-s.tokens
s.wg.Add(1)
go func(regex *model.Regexp) {
defer s.wg.Done()
defer func() {
s.tokens <- struct{}{}
}()
if hit := regex.FindString(keyword.Content); hit != "" {
log.Warn("content %s hit ignore regexp %v", keyword.Content, regex)
return
}
out <- keyword
}(reg)
}
}
func (s *SvcImpl) digest(ch <-chan UserGeneratedContent) {
for k := range s.ExcludeWhitelist(
s.Ignore(s.Aggregate(s.ExtractKeyword(ch)))) {
log.Info("Catch keyword %v", k)
persistedKeyword, err := s.PersistKeyword(context.Background(), k)
if err != nil {
log.Error("error Persiskeyword %v, error %v", k, err)
continue
}
if persistedKeyword.Tag != model.KeywordTagWhite && k.SenderID > 0 {
if err := s.persistSenderIDs(context.TODO(), persistedKeyword.ID, k.SenderID); err != nil {
log.Error("persistSenderIDs(sender_id: %d) fail, error(%v)", k.SenderID, err)
}
}
}
log.Info("digest receive cancel signal")
}

View File

@@ -0,0 +1,153 @@
package service
import (
"context"
"regexp"
"go-common/app/service/main/antispam/dao"
"go-common/app/service/main/antispam/model"
"go-common/library/log"
)
var (
ignoreRegexpsCondFn = func(r *model.Regexp) bool {
return r.State != model.StateDeleted &&
r.Operation == model.OperationIgnore
}
whiteRegexpsCondFn = func(r *model.Regexp) bool {
return r.State != model.StateDeleted &&
r.Operation == model.OperationPutToWhiteList
}
limitRegexpsCondFn = func(r *model.Regexp) bool {
return r.State != model.StateDeleted &&
(r.Operation == model.OperationLimit ||
r.Operation == model.OperationRestrictLimit)
}
)
var (
regexps = []*model.Regexp{}
)
// RefreshRegexps .
func (s *SvcImpl) RefreshRegexps(ctx context.Context) {
s.Lock()
defer s.Unlock()
dbRs, _, err := s.RegexpDao.GetByCond(ctx, ToDaoCond(&Condition{
State: model.StateDefault,
}))
if err != nil || len(dbRs) == 0 {
return
}
rs := ToModelRegexps(dbRs)
for i, r := range rs {
reg, err := regexp.Compile(r.Content)
if err != nil {
log.Warn("%v", err)
rs[i] = nil
continue
}
r.Reg = reg
}
regexps = regexps[:0]
for _, r := range rs {
if r != nil {
regexps = append(regexps, r)
}
}
}
// GetRegexpByID .
func (s *SvcImpl) GetRegexpByID(ctx context.Context, id int64) (*model.Regexp, error) {
r, err := s.RegexpDao.GetByID(ctx, id)
if err != nil {
return nil, err
}
return ToModelRegexp(r), nil
}
// GetRegexpsByAreaAndCondFunc .
func (s *SvcImpl) GetRegexpsByAreaAndCondFunc(ctx context.Context, area string, condFunc func(*model.Regexp) bool) (rs []*model.Regexp) {
if condFunc == nil {
return
}
s.RLock()
defer s.RUnlock()
for _, r := range regexps {
if condFunc(r) &&
// mainsite_dm has its own regexps
((area == model.AreaMainSiteDM && r.Area == area) ||
// all other business areas (imessage, reply, live_dm, ...etc)
// share the area reply's regexps
(area != model.AreaMainSiteDM && r.Area == model.AreaReply)) {
rs = append(rs, r)
}
}
return
}
// GetRegexpByAreaAndContent query regexps by area and content
func (s *SvcImpl) GetRegexpByAreaAndContent(ctx context.Context, area, content string) (*model.Regexp, error) {
r, err := s.RegexpDao.GetByAreaAndContent(ctx, ToDaoCond(&Condition{
Area: area,
Contents: []string{content},
}))
if err != nil {
return nil, err
}
return ToModelRegexp(r), nil
}
// GetRegexpsByCond query regexps by condition
func (s *SvcImpl) GetRegexpsByCond(ctx context.Context,
cond *Condition) (rs []*model.Regexp, total int64, err error) {
dbRs, total, err := s.RegexpDao.GetByCond(ctx, ToDaoCond(cond))
if err == dao.ErrResourceNotExist {
return []*model.Regexp{}, 0, nil
}
if err != nil {
return nil, 0, err
}
return ToModelRegexps(dbRs), total, nil
}
// UpsertRegexp update or insert regexp
func (s *SvcImpl) UpsertRegexp(ctx context.Context, r *model.Regexp) (*model.Regexp, error) {
var res *dao.Regexp
var err error
if r.ID > 0 {
res, err = s.RegexpDao.Update(ctx, ToDaoRegexp(r))
} else {
res, err = s.RegexpDao.Insert(ctx, ToDaoRegexp(r))
}
if err != nil {
return nil, err
}
return ToModelRegexp(res), nil
}
// DeleteRegexp delete regexp by id and adminID
// and delete cache
func (s *SvcImpl) DeleteRegexp(ctx context.Context, id, adminID int64) (*model.Regexp, error) {
r, err := s.GetRegexpByID(ctx, id)
if err != nil {
return nil, err
}
if r.State == model.StateDeleted {
return r, nil
}
r.AdminID, r.State = adminID, model.StateDeleted
if err = s.antiDao.DelRegexpCache(ctx); err != nil {
log.Error("s.antiDao.DelRegexpCache() error(%v)", err)
return nil, err
}
res, err := s.RegexpDao.Update(ctx, ToDaoRegexp(r))
if err != nil {
return nil, err
}
return ToModelRegexp(res), nil
}

View File

@@ -0,0 +1,146 @@
package service_test
import (
"regexp"
"testing"
"github.com/stretchr/testify/assert"
)
var (
regexpTaobao = regexp.MustCompile(`¥([\w\s]+)¥`)
regexpURL = regexp.MustCompile(`(?:http|https|www)(?:[\s\.:\/\/]{1,})([\w%+:\s\/\.?=]{1,})`)
regexpWhitelist = regexp.MustCompile(`((acg|im9|bili|gov).*(com|html|cn|tv)|(av\d{8,}|AV\d{8,}))`)
regexpQQ = regexp.MustCompile(`(?:[加qQ企鹅号码\s]{2,}|[群号]{1,})(?:[\x{4e00}-\x{9eff}]*)(?:[:]?)([\d\s]{6,})`)
regexpWechat = regexp.MustCompile(`(?:[加+微++vV威卫星♥❤姓xX信]{2,}|weixin|weix)(?:[,❤️.\s]?)(?:[\x{4e00}-\x{9eff}]?)(?:[:]?)([\w\s]{6,})`)
)
func TestRegexp(t *testing.T) {
cases := []struct {
name string
regexp *regexp.Regexp
inputs []struct {
content string
expectedKeyword string
}
}{
{
"wechat",
regexpWechat,
[]struct {
content string
expectedKeyword string
}{
{
"加微微Leslie9999990",
"Leslie9999990",
},
{
"开车开车 想看片加微 18250182201",
"18250182201",
},
{
"要gv的威信 RiverLeee",
"RiverLeee",
},
{
"未删减版威信luijixiang",
"luijixiang",
},
},
},
{
"url",
regexpURL,
[]struct {
content string
expectedKeyword string
}{
{
`http://fuli94.com/portal.php?x=611649loli资源`,
"fuli94.com/portal.php?x=611649loli",
},
{
`老司机开车了 懂得上车http://zh.cilex.com.cn/http://www.xxoo.jp/?x=156053 萝莉福利http://zh.cilex.com.cn/http://www.xxoo.jp/?x=156053 福利来了请叫我雷峰http://zh.cilex.co`,
"zh.cilex.com.cn/http://www.xxoo.jp/?x=156053 ",
},
{
`http://flba90.com/forum.php?x=671250 http://flba90.com/forum.php?x=671250`,
"flba90.com/forum.php?x=671250 http://flba90.com/forum.php?x=671250",
},
},
},
{
"taobao",
regexpTaobao,
[]struct {
content string
expectedKeyword string
}{
{
`AA网的泥膜专卖店有10元券。65元入手啊。。。 复制这条信息打开淘宝¥NEVzZCbjQze¥`,
"NEVzZCbjQze",
},
{
`这家店还有佳雪神鲜水,这玩意没假货吧,过段日子就难说啦。 ----------------- 复制这条信息¥jhpeZCWwpvq¥ ,打开【手机淘宝】即可查看`,
"jhpeZCWwpvq",
},
{
`佳雪神鲜水肌底菁华液神仙水面部精华液补水保湿提亮肤色小样试用【包邮】 【在售价】158.00元 【券后价】153.00元 【下单链接】http://c.b1yt.com/h.jJhBrx?cv=CQfzZCeZauK ----------------- 复制这条信息¥CQfzZCeZauK¥ ,打`,
"CQfzZCeZauK",
},
},
},
{
"qq",
regexpQQ,
[]struct {
content string
expectedKeyword string
}{
{
`HMMD各种类型 老司机开车(^・ω・^ ) 加qq2 6 4 8141670`,
"2 6 4 8141670",
},
{
`欢迎加入新日暮里企鹅群450809463`,
"450809463",
},
{
`想看完整版加企鹅: 2418046299 高清未删减✔ 各种资源应有尽有。 想看完整版加企鹅: 2418046299 高清未删减✔ 各种资源应有尽有。 想看完整版加企鹅: 2418046299 高清未删减✔ 各种资源应有尽有。`,
" 2418046299 ",
},
},
},
{
"whitelist",
regexpWhitelist,
[]struct {
content string
expectedKeyword string
}{
{
`http://big.bilibili.com/site/big.html`,
`bilibili.com/site/big.html`,
},
{
`live.bilibili.com`,
`bilibili.com`,
},
{
`http://www.gov.cn/`,
`gov.cn`,
},
},
},
}
for _, c := range cases {
for _, input := range c.inputs {
t.Run(c.name, func(t *testing.T) {
assert := assert.New(t)
k := c.regexp.FindStringSubmatch(input.content)[1]
assert.Equal(k, input.expectedKeyword, "")
})
}
}
}

View File

@@ -0,0 +1,114 @@
package service
import (
"context"
"go-common/app/service/main/antispam/conf"
"go-common/app/service/main/antispam/dao"
"go-common/app/service/main/antispam/model"
"go-common/library/log"
)
// GetRuleByArea .
func (s *SvcImpl) GetRuleByArea(ctx context.Context, area string) ([]*model.Rule, error) {
rs, err := s.RuleDao.GetByArea(ctx, ToDaoCond(&Condition{
Area: area,
State: model.StateDefault,
}))
if err == dao.ErrResourceNotExist {
return []*model.Rule{}, nil
}
if err != nil {
return nil, err
}
return ToModelRules(rs), nil
}
// GetRuleByAreaAndLimitTypeAndScope .
func (s *SvcImpl) GetRuleByAreaAndLimitTypeAndScope(ctx context.Context, area, limitType, limitScope string) (*model.Rule, error) {
cond := Condition{
Area: area,
LimitType: limitType,
LimitScope: limitScope,
}
r, err := s.RuleDao.GetByAreaAndTypeAndScope(ctx, ToDaoCond(&cond))
if err != nil {
return nil, err
}
return ToModelRule(r), nil
}
var (
rules = []*model.Rule{}
)
// RefreshRules .
func (s *SvcImpl) RefreshRules(ctx context.Context) {
s.Lock()
defer s.Unlock()
rs, _, err := s.RuleDao.GetByCond(ctx, ToDaoCond(&Condition{State: model.StateDefault}))
if err != nil {
return
}
rules = ToModelRules(rs)
}
// GetAggregateRuleByAreaAndLimitType .
func (s *SvcImpl) GetAggregateRuleByAreaAndLimitType(ctx context.Context, area, limitType string) (*model.AggregateRule, error) {
s.RLock()
defer s.RUnlock()
res := &model.AggregateRule{}
for _, r := range rules {
if r.Area == area && r.LimitType == limitType {
if r.LimitScope == model.LimitScopeGlobal {
res.GlobalDurationSec = r.DurationSec
res.GlobalAllowedCounts = r.AllowedCounts
}
if r.LimitScope == model.LimitScopeLocal {
res.LocalDurationSec = r.DurationSec
res.LocalAllowedCounts = r.AllowedCounts
}
}
}
if res.GlobalAllowedCounts > conf.Conf.MaxAllowedCounts {
res.GlobalAllowedCounts = conf.Conf.MaxAllowedCounts
}
if res.LocalAllowedCounts > conf.Conf.MaxAllowedCounts {
res.LocalAllowedCounts = conf.Conf.MaxAllowedCounts
}
if res.GlobalDurationSec > conf.Conf.MaxDurationSec {
res.GlobalDurationSec = conf.Conf.MaxDurationSec
}
if res.LocalDurationSec > conf.Conf.MaxDurationSec {
res.LocalDurationSec = conf.Conf.MaxDurationSec
}
return res, nil
}
// UpsertRule .
func (s *SvcImpl) UpsertRule(ctx context.Context, r *model.Rule) (*model.Rule, error) {
_, err := s.RuleDao.GetByAreaAndTypeAndScope(ctx, ToDaoCond(&Condition{
Area: r.Area,
LimitType: r.LimitType,
LimitScope: r.LimitScope,
}))
var res *dao.Rule
if err == nil {
res, err = s.RuleDao.Update(ctx, ToDaoRule(r))
} else {
res, err = s.RuleDao.Insert(ctx, ToDaoRule(r))
}
if err != nil {
return nil, err
}
if err := s.antiDao.DelRulesCache(ctx, r.Area, r.LimitType); err != nil {
log.Error("s.antiDao.DelRulesCache(%s,%s) error(%v)", r.Area, r.LimitType, err)
return nil, err
}
return ToModelRule(res), nil
}

View File

@@ -0,0 +1,114 @@
package service
import (
"context"
"time"
"go-common/app/service/main/antispam/conf"
"go-common/library/log"
)
// Scheduler .
type Scheduler interface {
BuildTrie()
RefreshTrie()
ExpireKeyword()
RefreshRules()
RefreshRegexps()
RunTimeDebugProb()
}
// SchedulerImpl .
type SchedulerImpl struct {
service Service
trieMgr *TrieMgr
refreshTrieInterval time.Duration
refreshRulesInterval time.Duration
refreshRegexpsInterval time.Duration
buildTrieInterval time.Duration
buildTrieMaxRowsPerQuery int64
expireKeywordInterval time.Duration
expireKeywordMaxRowsPerQuery int64
}
// NewScheduler .
func NewScheduler(service Service, trieMgr *TrieMgr, opt *SchedulerOption) Scheduler {
return &SchedulerImpl{
service: service,
trieMgr: trieMgr,
refreshTrieInterval: time.Second * opt.RefreshTrieIntervalSec,
refreshRulesInterval: time.Second * opt.RefreshRulesIntervalSec,
refreshRegexpsInterval: time.Second * opt.RefreshRegexpsIntervalSec,
buildTrieInterval: time.Minute * opt.BuildTrieIntervalMinute,
buildTrieMaxRowsPerQuery: opt.BuildTrieMaxRowsPerQuery,
expireKeywordInterval: time.Second * opt.GcInterval,
expireKeywordMaxRowsPerQuery: opt.GcMaxRowsPerQuery,
}
}
func schedule(name string, dur time.Duration, op func()) {
for {
select {
case <-time.After(dur):
log.Info("start %s...", name)
op()
case <-done:
log.Info("%s exit ...", name)
return
}
}
}
// RunTimeDebugProb .
func (s *SchedulerImpl) RunTimeDebugProb() {
schedule("runtime debug prob",
time.Second*300,
func() {
for _, r := range regexps {
log.Info("regexps:%+v", r)
}
for _, r := range rules {
log.Info("rules:%+v", r)
}
log.Info("autowhite config: %+v", conf.Conf.AutoWhite)
})
}
// RefreshRules .
func (s *SchedulerImpl) RefreshRules() {
s.service.RefreshRules(context.TODO())
schedule("refresh rule",
s.refreshRulesInterval,
func() { s.service.RefreshRules(context.TODO()) })
}
// RefreshRegexps .
func (s *SchedulerImpl) RefreshRegexps() {
s.service.RefreshRegexps(context.TODO())
schedule("refresh regexp",
s.refreshRegexpsInterval,
func() { s.service.RefreshRegexps(context.TODO()) })
}
// RefreshTrie .
func (s *SchedulerImpl) RefreshTrie() {
schedule("refresh trie",
s.refreshTrieInterval,
s.trieMgr.Refresh)
}
// BuildTrie .
func (s *SchedulerImpl) BuildTrie() {
schedule("build trie",
s.buildTrieInterval,
func() { s.trieMgr.Build(s.buildTrieMaxRowsPerQuery) })
}
// ExpireKeyword .
func (s *SchedulerImpl) ExpireKeyword() {
schedule("expire keyword",
s.expireKeywordInterval,
func() { s.service.ExpireKeyword(context.TODO(), s.expireKeywordMaxRowsPerQuery) })
}

View File

@@ -0,0 +1,60 @@
package service
import (
"context"
"errors"
"go-common/app/service/main/antispam/model"
)
const (
// CacheKeyRegexps .
CacheKeyRegexps = "regexps"
// CacheKeyLocalCountsFormat .
CacheKeyLocalCountsFormat = "resource_id:%d:keyword_id:%d:local_limit_counts"
// CacheKeyTotalCountsFormat .
CacheKeyTotalCountsFormat = "keyword_id:%d:total_counts"
// CacheKeyGlobalCountsFormat .
CacheKeyGlobalCountsFormat = "keyword_id:%d:global_limit_counts"
// CacheKeyKeywordsSenderIDsFormat .
CacheKeyKeywordsSenderIDsFormat = "keyword_id:%d:sender_ids"
// CacheKeyRulesWithinAreaAndLimitTypeFormat .
CacheKeyRulesWithinAreaAndLimitTypeFormat = "rule:area:%s:limit_type:%s"
)
var (
// ErrOverRateLimit .
ErrOverRateLimit = errors.New("over rate limit")
// ErrIllegalOperation .
ErrIllegalOperation = errors.New("illegal operation")
// ErrResourceNotExist .
ErrResourceNotExist = errors.New("resource not found")
)
// Service .
type Service interface {
Close()
Ping(context.Context) error
Filter(context.Context, UserGeneratedContent) (*model.SuspiciousResp, error)
RefreshRules(context.Context)
RefreshRegexps(context.Context)
GetKeywordsByCond(context.Context, *Condition) ([]*model.Keyword, int64, error)
GetKeywordsByOffsetLimit(context.Context, *Condition) ([]*model.Keyword, error)
GetKeywordByID(ctx context.Context, id int64) (*model.Keyword, error)
GetSenderIDsByKeywordID(ctx context.Context, id int64) (*model.SenderList, error)
OpKeyword(ctx context.Context, id int64, newTag string) (*model.Keyword, error)
DeleteKeywords(ctx context.Context, ids []int64) ([]*model.Keyword, error)
GetRegexpByID(context.Context, int64) (*model.Regexp, error)
GetRegexpsByCond(context.Context, *Condition) ([]*model.Regexp, int64, error)
GetRegexpByAreaAndContent(ctx context.Context, area, content string) (*model.Regexp, error)
UpsertRegexp(context.Context, *model.Regexp) (*model.Regexp, error)
DeleteRegexp(ctx context.Context, id int64, adminID int64) (*model.Regexp, error)
GetRuleByAreaAndLimitTypeAndScope(ctx context.Context, area, limitType, limitScope string) (*model.Rule, error)
GetRuleByArea(ctx context.Context, area string) ([]*model.Rule, error)
UpsertRule(context.Context, *model.Rule) (*model.Rule, error)
ExpireKeyword(context.Context, int64) error
}

View File

@@ -0,0 +1,122 @@
package service
import (
"context"
"errors"
"sync"
"go-common/app/service/main/antispam/conf"
"go-common/app/service/main/antispam/dao"
"go-common/app/service/main/antispam/extern"
"go-common/library/cache"
"go-common/library/log"
)
var done = make(chan struct{})
// Ping .
func (s *SvcImpl) Ping(ctx context.Context) error {
return s.antiDao.Ping(ctx)
}
// Spawns spawn goroutines with waitGroup
func (s *SvcImpl) Spawns(fns ...func()) {
for _, fn := range fns {
s.wg.Add(1)
go func(f func()) {
defer s.wg.Done()
f()
}(fn)
}
}
// Close close service and all the resources it opens
func (s *SvcImpl) Close() {
close(done)
close(s.UserGeneratedContentChan)
close(s.AsyncTaskChan)
s.wg.Wait()
dao.Close()
}
// AddTask add async task
func (s *SvcImpl) AddTask(fn func()) {
select {
case s.AsyncTaskChan <- fn:
default:
log.Warn("task chan full, will discard operation")
}
}
// HandleTask perform task
func (s *SvcImpl) HandleTask() {
for fn := range s.AsyncTaskChan {
if fn != nil {
log.Info("receive task ...")
fn()
}
}
log.Info("async task chan closed ...")
}
// New .
func New(config *conf.Config) *SvcImpl {
if ok := dao.Init(config); !ok {
panic(errors.New("init dao fail"))
}
s := &SvcImpl{
wg: new(sync.WaitGroup),
Option: NewOption(config),
antiDao: dao.New(config),
RegexpDao: dao.NewRegexpDao(),
KeywordDao: dao.NewKeywordDao(),
RuleDao: dao.NewRuleDao(),
}
s.TrieMgr = NewTrieMgr(s)
s.Scheduler = NewScheduler(s, s.TrieMgr, s.Option.Scheduler)
s.AsyncTaskChan = make(chan func(), s.Option.AsyncTaskChanSize)
s.tokens = make(chan struct{}, s.Option.MaxSpawnGoroutines)
for i := 0; i < cap(s.tokens); i++ {
s.tokens <- struct{}{}
}
s.UserGeneratedContentChan = make(chan UserGeneratedContent, s.Option.DefaultChanSize)
if config.ServiceOption.GcOpt.Open {
s.Spawns(s.Scheduler.ExpireKeyword)
}
s.TrieMgr.Build(s.Option.Scheduler.BuildTrieMaxRowsPerQuery)
s.Spawns(
s.Digest,
s.HandleTask,
s.Scheduler.BuildTrie,
s.Scheduler.RefreshTrie,
s.Scheduler.RefreshRules,
s.Scheduler.RefreshRegexps,
s.Scheduler.RunTimeDebugProb,
)
return s
}
// SvcImpl .
type SvcImpl struct {
tokens chan struct{}
sync.RWMutex
wg *sync.WaitGroup
Option *Option
AsyncTaskChan chan func()
TrieMgr *TrieMgr
Cache cache.Cache
ExternHandler extern.Handler
Scheduler Scheduler
antiDao *dao.Dao
RuleDao dao.RuleDao
RegexpDao dao.RegexpDao
KeywordDao dao.KeywordDao
UserGeneratedContentChan chan UserGeneratedContent
}

View File

@@ -0,0 +1,11 @@
package service
import (
"testing"
"go-common/app/service/main/antispam/conf"
)
func TestNewOption(t *testing.T) {
NewOption(&conf.Config{ServiceOption: &conf.ServiceOption{GcOpt: &conf.GcOpt{}}})
}

View File

@@ -0,0 +1,379 @@
package service
import (
"fmt"
"time"
"go-common/app/service/main/antispam/conf"
"go-common/app/service/main/antispam/dao"
"go-common/app/service/main/antispam/model"
"go-common/app/service/main/antispam/util"
"go-common/library/log"
)
// ToDaoArea .
func ToDaoArea(area string) int {
if d, ok := conf.Areas[area]; ok {
return d
}
return int(dao.AreaReply)
}
// ToModelArea .
func ToModelArea(area int) string {
for m, d := range conf.Areas {
if d == area {
return m
}
}
return ""
}
// ToDaoState .
func ToDaoState(state string) int {
switch state {
case model.StateDefault:
return dao.StateDefault
case model.StateDeleted:
return dao.StateDeleted
default:
return dao.StateDefault
}
}
// ToModelState .
func ToModelState(state int) string {
switch state {
case dao.StateDefault:
return model.StateDefault
case dao.StateDeleted:
return model.StateDeleted
default:
return ""
}
}
// ToModelOperation .
func ToModelOperation(op int) string {
switch op {
case dao.OperationLimit:
return model.OperationLimit
case dao.OperationRestrictLimit:
return model.OperationRestrictLimit
case dao.OperationPutToWhiteList:
return model.OperationPutToWhiteList
case dao.OperationIgnore:
return model.OperationIgnore
default:
return ""
}
}
// ToDaoOperation .
func ToDaoOperation(op string) int {
switch op {
case model.OperationLimit:
return dao.OperationLimit
case model.OperationRestrictLimit:
return dao.OperationRestrictLimit
case model.OperationPutToWhiteList:
return dao.OperationPutToWhiteList
case model.OperationIgnore:
return dao.OperationIgnore
default:
return dao.OperationLimit
}
}
// ToModelRules .
func ToModelRules(rules []*dao.Rule) []*model.Rule {
if rules == nil {
return nil
}
result := make([]*model.Rule, len(rules))
for i, r := range rules {
if r == nil {
result[i] = nil
} else {
result[i] = ToModelRule(r)
}
}
return result
}
// ToModelRule .
func ToModelRule(d *dao.Rule) *model.Rule {
if d == nil {
return nil
}
r := &model.Rule{
ID: d.ID,
Area: ToModelArea(d.Area),
AllowedCounts: d.AllowedCounts,
DurationSec: d.DurationSec,
}
// limit type
switch d.LimitType {
case dao.LimitTypeDefaultLimit:
r.LimitType = model.LimitTypeDefault
case dao.LimitTypeRestrictLimit:
r.LimitType = model.LimitTypeRestrict
}
// limit scope
switch d.LimitScope {
case dao.LimitScopeGlobal:
r.LimitScope = model.LimitScopeGlobal
case dao.LimitScopeLocal:
r.LimitScope = model.LimitScopeLocal
}
return r
}
// ToDaoRule .
func ToDaoRule(m *model.Rule) *dao.Rule {
if m == nil {
return nil
}
d := &dao.Rule{
ID: m.ID,
AllowedCounts: m.AllowedCounts,
DurationSec: m.DurationSec,
Area: ToDaoArea(m.Area),
}
// limit type
switch m.LimitType {
case model.LimitTypeDefault:
d.LimitType = dao.LimitTypeDefaultLimit
case model.LimitTypeRestrict:
d.LimitType = dao.LimitTypeRestrictLimit
}
// limit scope
switch m.LimitScope {
case model.LimitScopeGlobal:
d.LimitScope = dao.LimitScopeGlobal
case model.LimitScopeLocal:
d.LimitScope = dao.LimitScopeLocal
}
return d
}
// ToModelKeywords .
func ToModelKeywords(keywords []*dao.Keyword) []*model.Keyword {
if keywords == nil {
return nil
}
result := make([]*model.Keyword, len(keywords))
for i, r := range keywords {
if r == nil {
result[i] = nil
} else {
result[i] = ToModelKeyword(r)
}
}
return result
}
// ToModelKeyword .
func ToModelKeyword(d *dao.Keyword) *model.Keyword {
if d == nil {
return nil
}
k := &model.Keyword{
ID: d.ID,
Content: d.Content,
RegexpName: d.RegexpName,
HitCounts: d.HitCounts,
OriginContent: d.OriginContent,
State: ToModelState(d.State),
CTime: util.JSONTime(d.CTime),
MTime: util.JSONTime(d.MTime),
Area: ToModelArea(d.Area),
}
switch d.Tag {
case dao.KeywordTagDefaultLimit:
k.Tag = model.KeywordTagDefaultLimit
case dao.KeywordTagRestrictLimit:
k.Tag = model.KeywordTagRestrictLimit
case dao.KeywordTagWhite:
k.Tag = model.KeywordTagWhite
case dao.KeywordTagBlack:
k.Tag = model.KeywordTagBlack
default:
log.Error("unknown keyword tag %q", d.Tag)
return nil
}
return k
}
// ToDaoKeywords .
func ToDaoKeywords(ks []*model.Keyword) []*dao.Keyword {
if ks == nil {
return nil
}
result := make([]*dao.Keyword, 0)
for _, k := range ks {
result = append(result, ToDaoKeyword(k))
}
return result
}
// ToDaoKeyword .
func ToDaoKeyword(k *model.Keyword) *dao.Keyword {
if k == nil {
return nil
}
d := &dao.Keyword{
ID: k.ID,
Content: k.Content,
RegexpName: k.RegexpName,
Area: ToDaoArea(k.Area),
State: ToDaoState(k.State),
OriginContent: k.OriginContent,
CTime: time.Time(k.CTime),
HitCounts: k.HitCounts,
}
switch k.Tag {
case model.KeywordTagDefaultLimit:
d.Tag = dao.KeywordTagDefaultLimit
case model.KeywordTagRestrictLimit:
d.Tag = dao.KeywordTagRestrictLimit
case model.KeywordTagWhite:
d.Tag = dao.KeywordTagWhite
case model.KeywordTagBlack:
d.Tag = dao.KeywordTagBlack
default:
log.Error("Unknown tag %q", k.Tag)
return nil
}
return d
}
// ToModelRegexps .
func ToModelRegexps(rs []*dao.Regexp) []*model.Regexp {
if rs == nil {
return nil
}
result := make([]*model.Regexp, len(rs))
for i, r := range rs {
if r == nil {
result[i] = nil
} else {
result[i] = ToModelRegexp(r)
}
}
return result
}
// ToModelRegexp .
func ToModelRegexp(d *dao.Regexp) *model.Regexp {
if d == nil {
return nil
}
return &model.Regexp{
ID: d.ID,
Area: ToModelArea(d.Area),
AdminID: d.AdminID,
Name: d.Name,
Content: d.Content,
State: ToModelState(d.State),
Operation: ToModelOperation(d.Operation),
CTime: util.JSONTime(d.CTime),
MTime: util.JSONTime(d.MTime),
}
}
// ToDaoRegexps .
func ToDaoRegexps(regs []*model.Regexp) []*dao.Regexp {
if regs == nil {
return nil
}
result := make([]*dao.Regexp, 0)
for _, reg := range regs {
result = append(result, ToDaoRegexp(reg))
}
return result
}
// ToDaoRegexp .
func ToDaoRegexp(m *model.Regexp) *dao.Regexp {
if m == nil {
return nil
}
return &dao.Regexp{
ID: m.ID,
Area: ToDaoArea(m.Area),
State: ToDaoState(m.State),
Name: m.Name,
AdminID: m.AdminID,
Content: m.Content,
Operation: ToDaoOperation(m.Operation),
CTime: time.Time(m.CTime),
MTime: time.Time(m.MTime),
}
}
// ToDaoCond .
func ToDaoCond(cond *Condition) *dao.Condition {
if cond == nil {
return nil
}
res := &dao.Condition{
Pagination: cond.Pagination,
HitCounts: cond.HitCounts,
Search: cond.Search,
Offset: cond.Offset,
Limit: cond.Limit,
Order: cond.Order,
OrderBy: cond.OrderBy,
Contents: cond.Contents,
}
if len(cond.Area) > 0 {
res.Area = fmt.Sprintf("%d", ToDaoArea(cond.Area))
}
if len(cond.State) > 0 {
res.State = fmt.Sprintf("%d", ToDaoState(cond.State))
}
if res.OrderBy == "" {
res.OrderBy = "id"
}
if cond.StartTime != nil {
res.StartTime = cond.StartTime.Format(util.TimeFormat)
}
if cond.EndTime != nil {
res.EndTime = cond.EndTime.Format(util.TimeFormat)
}
if cond.LastModifiedTime != nil {
res.LastModifiedTime = cond.LastModifiedTime.Format(util.TimeFormat)
}
for _, tag := range cond.Tags {
switch tag {
case model.KeywordTagBlack:
res.Tags = append(res.Tags, fmt.Sprintf("%d", dao.KeywordTagBlack))
case model.KeywordTagWhite:
res.Tags = append(res.Tags, fmt.Sprintf("%d", dao.KeywordTagWhite))
case model.KeywordTagDefaultLimit:
res.Tags = append(res.Tags, fmt.Sprintf("%d", dao.KeywordTagDefaultLimit))
case model.KeywordTagRestrictLimit:
res.Tags = append(res.Tags, fmt.Sprintf("%d", dao.KeywordTagRestrictLimit))
}
}
switch cond.LimitType {
case model.LimitTypeBlack:
res.LimitType = fmt.Sprintf("%d", dao.LimitTypeBlack)
case model.LimitTypeWhite:
res.LimitType = fmt.Sprintf("%d", dao.LimitTypeWhite)
case model.LimitTypeDefault:
res.LimitType = fmt.Sprintf("%d", dao.LimitTypeDefaultLimit)
case model.LimitTypeRestrict:
res.LimitType = fmt.Sprintf("%d", dao.LimitTypeRestrictLimit)
}
switch cond.LimitScope {
case model.LimitScopeGlobal:
res.LimitScope = fmt.Sprintf("%d", dao.LimitScopeGlobal)
case model.LimitScopeLocal:
res.LimitScope = fmt.Sprintf("%d", dao.LimitScopeLocal)
}
return res
}

View File

@@ -0,0 +1,19 @@
package service
import "testing"
func TestToDaoArea(t *testing.T) {
ToDaoArea("reply")
}
func TestToModelArea(t *testing.T) {
ToModelArea(1)
}
func TestToDaoState(t *testing.T) {
ToDaoState("default")
}
func TestToModelState(t *testing.T) {
ToModelState(1)
}

View File

@@ -0,0 +1,208 @@
package service
import (
"context"
"errors"
"fmt"
"sync"
"sync/atomic"
"time"
"go-common/app/service/main/antispam/model"
"go-common/app/service/main/antispam/util/trie"
"go-common/library/log"
)
var (
// ErrTrieNotFound .
ErrTrieNotFound = errors.New("so such key in trie tree")
)
// Put .
func (ctrie *ConcurrentTrie) Put(key string, val interface{}) {
ctrie.Lock()
defer ctrie.Unlock()
ctrie.trier.Put(key, val)
}
// Delete .
func (ctrie *ConcurrentTrie) Delete(key string) {
ctrie.RLock()
if ctrie.trier.Get(key) == nil {
ctrie.RUnlock()
return
}
ctrie.RUnlock()
ctrie.Lock()
defer ctrie.Unlock()
ctrie.trier.Put(key, nil)
}
// KeywordLimitInfo the data stored in trie tree
type KeywordLimitInfo struct {
KeywordID int64
LimitType string
}
func (ctrie *ConcurrentTrie) find(content string) (string, *KeywordLimitInfo) {
ctrie.RLock()
defer ctrie.RUnlock()
key, val := ctrie.trier.Find(content, "")
if val == nil {
return "", nil
}
if limitInfo, ok := val.(*KeywordLimitInfo); ok {
return key, limitInfo
}
return "", nil
}
// NewConcurrentTrie .
func NewConcurrentTrie() *ConcurrentTrie {
return &ConcurrentTrie{}
}
// ConcurrentTrie .
type ConcurrentTrie struct {
sync.RWMutex
trier trie.Trier
}
// LastModifiedTime .
func (mgr *TrieMgr) LastModifiedTime() *time.Time {
if t, ok := mgr.lastModifiedTime.Load().(time.Time); ok {
return &t
}
return nil
}
// UpdateLastModifiedTime .
func (mgr *TrieMgr) UpdateLastModifiedTime() {
mgr.lastModifiedTime.Store(time.Now())
}
// Refresh .
func (mgr *TrieMgr) Refresh() {
ks, _, err := mgr.service.GetKeywordsByCond(context.TODO(),
&Condition{LastModifiedTime: mgr.LastModifiedTime()})
if err != nil {
return
}
mgr.UpdateLastModifiedTime()
for _, k := range ks {
if k.State == model.StateDeleted ||
k.Tag == model.KeywordTagWhite {
for _, ctrie := range mgr.tries[k.Area] {
ctrie.Delete(k.Content)
}
continue
}
for tag, ctrie := range mgr.tries[k.Area] {
// NOTICE: must exec `put` before exec `delete`
if tag == k.Tag {
ctrie.Put(k.Content, &KeywordLimitInfo{
KeywordID: k.ID,
LimitType: tag,
})
} else {
ctrie.Delete(k.Content)
}
}
}
log.Info("refresh finished.")
}
func (ctrie *ConcurrentTrie) build(area string, tag string, s Service, defaultDBLimit int64) {
newTrier := trie.New()
var lastID int64
for {
ks, err := s.GetKeywordsByOffsetLimit(context.TODO(),
&Condition{
Area: area,
Offset: fmt.Sprintf("%d", lastID),
Limit: fmt.Sprintf("%d", defaultDBLimit),
Tags: []string{tag}})
if err != nil || len(ks) == 0 {
break
}
for _, k := range ks {
if k.ID > lastID {
lastID = k.ID
}
newTrier.Put(k.Content, &KeywordLimitInfo{
KeywordID: k.ID,
LimitType: tag,
})
}
if len(ks) < int(defaultDBLimit) {
break
}
}
ctrie.Lock()
defer ctrie.Unlock()
ctrie.trier = newTrier
}
// Build .
func (mgr *TrieMgr) Build(dBLimit int64) {
mgr.UpdateLastModifiedTime()
for area, subMap := range mgr.tries {
for tag, ctrie := range subMap {
ctrie.build(area, tag, mgr.service, dBLimit)
}
}
log.Info("build finished.")
}
// Get .
func (mgr *TrieMgr) Get(area string, content string) (string, *KeywordLimitInfo, error) {
if ttr, ok := mgr.tries[area]; ok {
for _, severity := range []string{
model.KeywordTagBlack,
model.KeywordTagRestrictLimit,
model.KeywordTagDefaultLimit,
} {
if tr, ok := ttr[severity]; ok {
if k, v := tr.find(content); v != nil {
return k, v, nil
}
}
}
}
return "", nil, ErrTrieNotFound
}
// NewTagConcurrentTrieMap .
func NewTagConcurrentTrieMap() map[string]*ConcurrentTrie {
return map[string]*ConcurrentTrie{
model.KeywordTagDefaultLimit: NewConcurrentTrie(),
model.KeywordTagRestrictLimit: NewConcurrentTrie(),
model.KeywordTagBlack: NewConcurrentTrie(),
}
}
// NewTrieMgr .
func NewTrieMgr(service Service) *TrieMgr {
return &TrieMgr{
service: service,
tries: map[string]map[string]*ConcurrentTrie{
model.AreaReply: NewTagConcurrentTrieMap(),
model.AreaIMessage: NewTagConcurrentTrieMap(),
model.AreaLiveDM: NewTagConcurrentTrieMap(),
model.AreaMainSiteDM: NewTagConcurrentTrieMap(),
},
}
}
// TrieMgr .
type TrieMgr struct {
service Service
tries map[string]map[string]*ConcurrentTrie // area:tag:trie, TODO: pick a better representation
lastModifiedTime atomic.Value
}

View File

@@ -0,0 +1,41 @@
package service
import (
"math/rand"
"testing"
"time"
"go-common/app/service/main/antispam/util"
"go-common/app/service/main/antispam/util/trie"
)
func TestConcurrentTrieFind(t *testing.T) {
conTrie := NewConcurrentTrie()
conTrie.trier = trie.New()
conTrie.Put("jjjj", &KeywordLimitInfo{KeywordID: 111})
k, v := conTrie.find("jjjj")
t.Logf("k: %v, v: %v", k, v)
if v == nil {
t.FailNow()
}
}
func BenchmarkConcurrentTrieFind(b *testing.B) {
rand.Seed(time.Now().UnixNano())
tr := NewConcurrentTrie()
tr.trier = trie.New()
for i := 0; i < b.N; i++ {
tr.Put(util.RandStr(20), &KeywordLimitInfo{})
}
tr.Put("地方考虑saDFFDSALK", 8888)
tr.Put("都说了开发贷款", 888)
for i := 0; i < b.N; i++ {
go func() {
tr.Put(util.RandStr(20), &KeywordLimitInfo{})
}()
tr.find("dfa啥都发生地方的施工费按发的噶是打发士大夫撒旦噶尔尕热狗怕的是结果来看砥节奉公来人速度感而过;sfdsfas fsadf asd fsad f asd都说了sdfs gfdgd jimmy开发速度来发噶都说了开发贷款时间范德萨了空间发的是 jimmy按时到路口发生撒地方考虑saDFFDSALKFDFASDFASDFSDFSADFRGEWTRETGERG")
}
}

View File

@@ -0,0 +1,51 @@
package(default_visibility = ["//visibility:public"])
load(
"@io_bazel_rules_go//go:def.bzl",
"go_test",
"go_library",
)
go_test(
name = "go_default_test",
srcs = [
"deviations_test.go",
"pagination_test.go",
"string_test.go",
],
embed = [":go_default_library"],
rundir = ".",
tags = ["automanaged"],
deps = ["//vendor/github.com/stretchr/testify/assert:go_default_library"],
)
go_library(
name = "go_default_library",
srcs = [
"deviations.go",
"pagination.go",
"rand.go",
"string.go",
"time.go",
],
importpath = "go-common/app/service/main/antispam/util",
tags = ["automanaged"],
visibility = ["//visibility:public"],
)
filegroup(
name = "package-srcs",
srcs = glob(["**"]),
tags = ["automanaged"],
visibility = ["//visibility:private"],
)
filegroup(
name = "all-srcs",
srcs = [
":package-srcs",
"//app/service/main/antispam/util/trie:all-srcs",
],
tags = ["automanaged"],
visibility = ["//visibility:public"],
)

View File

@@ -0,0 +1,64 @@
package util
import "math"
// Max .
func Max(vars []int64) (maxVar int64) {
for _, i := range vars {
if i > maxVar {
maxVar = i
}
}
return
}
// Expectation .
func Expectation(randomVars []float64) float64 {
if len(randomVars) == 0 {
return 0
}
var sum float64
for _, rv := range randomVars {
sum += rv
}
return sum / float64(len(randomVars))
}
// StdDeviation .
func StdDeviation(randomVars []float64) float64 {
if len(randomVars) == 0 {
return 0
}
return math.Sqrt(Deviation(randomVars))
}
// Deviation .
func Deviation(randomVars []float64) float64 {
if len(randomVars) == 0 {
return 0
}
var total float64
expec := Expectation(randomVars)
for _, rv := range randomVars {
total += math.Pow(rv-expec, 2.0)
}
return total / float64(len(randomVars))
}
// Normallization .
func Normallization(randomVars []int64) []float64 {
if len(randomVars) == 0 {
return nil
}
maxVal := Max(randomVars)
if maxVal == 0 || maxVal == 1 {
return nil
}
res := make([]float64, 0, len(randomVars))
for _, rv := range randomVars {
res = append(res, math.Log10(float64(rv))/math.Log10(float64(maxVal)))
}
return res
}

View File

@@ -0,0 +1,255 @@
package util
import "testing"
func TestMax(t *testing.T) {
cases := []struct {
input []int64
}{
{
input: []int64{},
},
{
input: []int64{
268826797,
},
},
{
input: []int64{
1,
2,
3,
4,
5,
272668542,
87759075,
272670403,
192148035,
189361023,
88269430,
89400131,
272690653,
269278391,
268823477,
268826797,
},
},
}
for _, c := range cases {
t.Run("", func(t *testing.T) {
rs := Max(c.input)
t.Logf("input %v \noutputs:%v\n", c.input, rs)
})
}
}
func TestStdDeviation(t *testing.T) {
cases := []struct {
input []float64
}{
{
input: []float64{},
},
{
input: []float64{
268826797,
},
},
{
input: []float64{
1,
2,
3,
4,
5,
272668542,
87759075,
272670403,
192148035,
189361023,
88269430,
89400131,
272690653,
269278391,
268823477,
268826797,
},
},
}
for _, c := range cases {
t.Run("", func(t *testing.T) {
rs := StdDeviation(c.input)
t.Logf("input %v \noutputs:%v\n", c.input, rs)
})
}
}
func TestDeviation(t *testing.T) {
cases := []struct {
input []float64
}{
{
input: []float64{
1,
2,
3,
4,
5,
272668542,
87759075,
272670403,
192148035,
189361023,
88269430,
89400131,
272690653,
269278391,
268823477,
268826797,
},
},
}
for _, c := range cases {
t.Run("", func(t *testing.T) {
rs := Deviation(c.input)
t.Logf("input %v \noutputs:%v\n", c.input, rs)
})
}
}
func TestNormallization(t *testing.T) {
cases := []struct {
input []int64
}{
{
input: []int64{
1,
2,
3,
4,
5,
272668542,
87759075,
272670403,
192148035,
189361023,
88269430,
89400131,
272690653,
269278391,
268823477,
268826797,
},
},
}
for _, c := range cases {
t.Run("", func(t *testing.T) {
rs := Normallization(c.input)
t.Logf("input %v\n, outputs:%v\n", c.input, rs)
})
}
}
func TestExpectation(t *testing.T) {
cases := []struct {
input []float64
}{
{
input: []float64{
272668542,
87759075,
272670403,
192148035,
189361023,
88269430,
89400131,
272690653,
269278391,
268823477,
268826797,
107338074,
272635619,
272655050,
272280850,
274403561,
274404369,
274408851,
33561986,
274440108,
274440766,
274440926,
273730691,
274443040,
273744469,
274443230,
273749519,
274837710,
273748847,
274837920,
273814911,
274838433,
269278030,
273815488,
273819536,
274838710,
274838821,
274875985,
274876083,
},
},
{
input: []float64{
23062012,
32199188,
114238752,
35134612,
18289010,
29669239,
25702393,
19201815,
38191035,
24237961,
10155123,
36970516,
14015382,
89085165,
28324458,
38405237,
20190219,
175006499,
178984876,
34990873,
158026517,
111053972,
25519948,
39061494,
155286833,
87469728,
62849283,
210174070,
13841839,
65905090,
8219588,
37192235,
274356431,
44363565,
22899631,
43582749,
101217680,
25011431,
33447081,
222278335,
20785287,
38448378,
40153047,
},
},
}
for _, c := range cases {
t.Run("", func(t *testing.T) {
output := Expectation(c.input)
t.Logf("input %v, output:%v", c.input, output)
})
}
}

View File

@@ -0,0 +1,64 @@
package util
const (
// DefaultPerPage .
DefaultPerPage = 20
)
// SimplePage calculate "from", "to" without total_counts
// "from" index start from 1
func (p *Pagination) SimplePage() (from int64, to int64) {
if p.CurPage == 0 || p.PerPage == 0 {
p.CurPage, p.PerPage = 1, DefaultPerPage
}
from = (p.CurPage-1)*p.PerPage + 1
to = from + p.PerPage - 1
return
}
// Page calculate "from", "to" with total_counts
// index start from 1
func (p *Pagination) Page(total int64) (from int64, to int64) {
if p.CurPage == 0 {
p.CurPage = 1
}
if p.PerPage == 0 {
p.PerPage = DefaultPerPage
}
if total == 0 || total < p.PerPage*(p.CurPage-1) {
return
}
if total <= p.PerPage {
return 1, total
}
from = (p.CurPage-1)*p.PerPage + 1
if (total - from + 1) < p.PerPage {
return from, total
}
return from, from + p.PerPage - 1
}
// VagueOffsetLimit calculate "offset", "limit" without total_counts
func (p *Pagination) VagueOffsetLimit() (offset int64, limit int64) {
from, to := p.SimplePage()
if to == 0 || from == 0 {
return 0, 0
}
return from - 1, to - from + 1
}
// OffsetLimit calculate "offset" and "start" with total_counts
func (p *Pagination) OffsetLimit(total int64) (offset int64, limit int64) {
from, to := p.Page(total)
if to == 0 || from == 0 {
return 0, 0
}
return from - 1, to - from + 1
}
// Pagination perform page algorithm
type Pagination struct {
CurPage int64
PerPage int64
}

View File

@@ -0,0 +1,118 @@
package util
import (
"fmt"
"math/rand"
"testing"
"time"
)
func TestSimplePage(t *testing.T) {
cases := []struct {
perPage int64
curPage int64
expectedFrom int64
expectedTo int64
}{
{20, 9, 161, 180},
{0, 1, 1, 20},
{0, 0, 1, 20},
{1, 0, 1, 20},
}
for _, c := range cases {
t.Run(fmt.Sprintf("curPage(%d) perPage(%d)", c.curPage, c.perPage), func(t *testing.T) {
p := &Pagination{
CurPage: c.curPage,
PerPage: c.perPage,
}
from, to := p.SimplePage()
if from != c.expectedFrom || to != c.expectedTo {
t.Errorf("cond.SimplePage() = from: %d, to: %d, want: %d, %d", from, to, c.expectedFrom, c.expectedTo)
}
})
}
}
func TestPage(t *testing.T) {
cases := []struct {
total int64
perPage int64
curPage int64
expectedFrom int64
expectedTo int64
}{
{66269, 20, 3314, 66261, 66269},
{66269, 20, 3313, 66241, 66260},
{81, 20, 9, 0, 0},
{100, 0, 1, 1, 20},
{1, 20, 1, 1, 1},
{0, 20, 1, 0, 0},
{5, 20, 1, 1, 5},
{211, 20, 3, 41, 60},
{100, 100, 1, 1, 100},
{101, 20, 6, 101, 101},
{211, 20, 2, 21, 40},
{211, 20, 1, 1, 20},
}
for _, c := range cases {
t.Run(fmt.Sprintf("total(%d) curPage(%d) perPage(%d)", c.total, c.curPage, c.perPage), func(t *testing.T) {
p := &Pagination{
CurPage: c.curPage,
PerPage: c.perPage,
}
from, to := p.Page(c.total)
if from != c.expectedFrom || to != c.expectedTo {
t.Errorf("cond.Page(%d) = from: %d, to: %d, want: %d, %d", c.total, from, to, c.expectedFrom, c.expectedTo)
}
})
}
}
func TestOffsetLimit(t *testing.T) {
cases := []struct {
total int64
perpage int64
curpage int64
expectedoffset int64
expectedlimit int64
}{
{66269, 20, 3314, 66260, 9},
{66269, 20, 3313, 66240, 20},
{100, 0, 1, 0, 20},
{1, 20, 1, 0, 1},
{0, 20, 1, 0, 0},
{5, 20, 1, 0, 5},
{211, 20, 3, 40, 20},
{100, 100, 1, 0, 100},
{101, 20, 6, 100, 1},
{211, 20, 2, 20, 20},
{211, 20, 1, 0, 20},
}
for _, c := range cases {
t.Run(fmt.Sprintf("total(%d) curpage(%d) perpage(%d)", c.total, c.curpage, c.perpage), func(t *testing.T) {
p := &Pagination{
CurPage: c.curpage,
PerPage: c.perpage,
}
offset, limit := p.OffsetLimit(c.total)
if offset != c.expectedoffset || limit != c.expectedlimit {
t.Errorf("cond.offsetlimit(%d) = offset: %d, limit: %d, want %d, %d", c.total, offset, limit, c.expectedoffset, c.expectedlimit)
}
})
}
}
func TestBulkPage(t *testing.T) {
p := &Pagination{}
rand.Seed(time.Now().Unix())
for i := 0; i < 9999; i++ {
p.CurPage = rand.Int63n(10000)
p.PerPage = rand.Int63n(300)
total := rand.Int63n(20000)
from, to := p.Page(total)
if from < 0 || to < 0 {
t.Fatalf(`Bulk test page fail, got negative result,
total: %d, curPage: %d, perPage: %d, from: %d, to: %d`, total, p.CurPage, p.PerPage, from, to)
}
}
}

View File

@@ -0,0 +1,19 @@
package util
import "math/rand"
var letterRunes = []rune("abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ")
// RandID .
func RandID() int64 {
return rand.Int63()
}
// RandStr .
func RandStr(size int) string {
b := make([]rune, size)
for i := range b {
b[i] = letterRunes[rand.Intn(len(letterRunes))]
}
return string(b)
}

View File

@@ -0,0 +1,65 @@
package util
import (
"strconv"
"strings"
)
// StrSliToSQLVarchars convert string slice to varchar in sql syntax
// eg: ["default", "deleted", "modified"] -> " 'default', 'deleted', 'modified' "
// so that we can used it in 'SELECT * ... WHERE xxx IN ('default', 'deleted', 'modified')'
func StrSliToSQLVarchars(s []string) string {
sli := make([]string, len(s))
for i, ss := range s {
sli[i] = "'" + ss + "'"
}
return strings.Join(sli, ",")
}
// StrToIntSli convert string to int slice, eg: "1,2,3" -> [1,2,3]
func StrToIntSli(s string, delimiter string) ([]int64, error) {
var result []int64
sli := strings.Split(s, delimiter)
for _, intStr := range sli {
i, err := strconv.ParseInt(intStr, 10, 64)
if err != nil {
return nil, err
}
result = append(result, i)
}
return result, nil
}
// IntSliToSQLVarchars convert int slice to string, eg: [1,2,3] -> "1,2,3"
func IntSliToSQLVarchars(ints []int64) string {
return intSliToStr(ints, ",")
}
func intSliToStr(ints []int64, delimiter string) string {
sli := make([]string, len(ints))
for i, ii := range ints {
sli[i] = strconv.FormatInt(ii, 10)
}
return strings.Join(sli, delimiter)
}
// SameChar check if string consists of same characters
func SameChar(content string) bool {
content = strings.ToLower(content)
first := content[0]
for _, s := range content {
if s != rune(first) {
return false
}
}
return true
}
// StripPrefix remove prefix from string if exists
func StripPrefix(s string, prefix string, suffix string) string {
if strings.HasPrefix(s, prefix) {
i := strings.Index(s, suffix)
return s[i+1:]
}
return s
}

View File

@@ -0,0 +1,135 @@
package util
import (
"fmt"
"reflect"
"testing"
"github.com/stretchr/testify/assert"
)
func ExampleIntSliToSQLVarchars() {
fmt.Println(IntSliToSQLVarchars([]int64{1, 2, 3}))
// Output: 1,2,3
}
func ExampleStrToIntSli() {
fmt.Println(StrToIntSli("1,2,3", ","))
// Output: [1 2 3] <nil>
}
func ExampleStrSliToSQLVarchars() {
fmt.Println(StrSliToSQLVarchars([]string{"default", "deleted", "modified"}))
// Output: 'default','deleted','modified'
}
func TestStrSliToSQLVarchars(t *testing.T) {
cases := []struct {
s []string
expected string
}{
{[]string{"foo", "bar"}, "'foo','bar'"},
}
for _, c := range cases {
t.Run(fmt.Sprintf("inputStr(%v)", c.s), func(t *testing.T) {
got := StrSliToSQLVarchars(c.s)
if got != c.expected {
t.Errorf("StrSliToSQLVarchars(%v) = %s, want: %s", c.s, got, c.expected)
}
})
}
}
func TestStrToIntSli(t *testing.T) {
cases := []struct {
s string
delimiter string
expectedSli []int64
expectedErr error
}{
{"1,2,3", ",", []int64{1, 2, 3}, nil},
{"1 2 3", " ", []int64{1, 2, 3}, nil},
{"1|2|3", "|", []int64{1, 2, 3}, nil},
}
for _, c := range cases {
assert := assert.New(t)
t.Run(fmt.Sprintf("inputString(%v) delimiter(%s)", c.s, c.delimiter), func(t *testing.T) {
got, err := StrToIntSli(c.s, c.delimiter)
assert.Equal(c.expectedSli, got, "")
assert.Equal(c.expectedErr, err, "")
})
}
}
func TestIntSliToStr(t *testing.T) {
cases := []struct {
s []int64
delimiter string
expected string
}{
{[]int64{1, 2, 3}, ",", "1,2,3"},
{[]int64{1, 2, 3}, " ", "1 2 3"},
{[]int64{1, 2, 3}, "|", "1|2|3"},
}
for _, c := range cases {
t.Run(fmt.Sprintf("inputSli(%v) delimiter(%s)", c.s, c.delimiter), func(t *testing.T) {
got := intSliToStr(c.s, c.delimiter)
if !reflect.DeepEqual(got, c.expected) {
t.Errorf("IntSliToStr(%v, %s) = %s, want %s", c.s, c.delimiter, got, c.expected)
}
})
}
}
func TestStripPrefix(t *testing.T) {
cases := []struct {
name string
content string
expectedOutput string
}{
{
"need strip prefix",
"回复 @画鸾凰 :我知道 但我的不是正版的 上不了工坊 只能要模型软件 格式是LPK的模型软件 你找一下看看 模型列表下面应该有在哪个文件夹里面 找到可以发我QQ1918882322 如果找不到就算吧",
"我知道 但我的不是正版的 上不了工坊 只能要模型软件 格式是LPK的模型软件 你找一下看看 模型列表下面应该有在哪个文件夹里面 找到可以发我QQ1918882322 如果找不到就算吧",
},
{
"empty reply body",
"回复 @画鸾凰 :",
"",
},
{
"not need strip",
"我知道 但我的不是正版的 上不了工坊 只能要模型软件 格式是LPK的模型软件 你找一下看看 模型列表下面应该有在哪个文件夹里面 找到可以发我QQ1918882322 如果找不到就算吧",
"我知道 但我的不是正版的 上不了工坊 只能要模型软件 格式是LPK的模型软件 你找一下看看 模型列表下面应该有在哪个文件夹里面 找到可以发我QQ1918882322 如果找不到就算吧",
},
}
for _, c := range cases {
t.Run(c.name, func(t *testing.T) {
actual := StripPrefix(c.content, "回复 @", ":")
if actual != c.expectedOutput {
t.Fatalf("Strip Prefix failed, expected %q \t\n got %q", c.expectedOutput, actual)
}
})
}
}
func TestSameChar(t *testing.T) {
cases := []struct {
content string
expectedResult bool
}{
{"~~~~~~~", true},
{"666666666", true},
{"666666666~~~", false},
{"WWWWWWW", true},
{"XXXxxx", true},
}
for _, c := range cases {
t.Run(fmt.Sprintf("content(%s)", c.content), func(t *testing.T) {
if rs := SameChar(c.content); rs != c.expectedResult {
t.Errorf("SameChar(%s) = %v, want %v", c.content, rs, c.expectedResult)
}
})
}
}

View File

@@ -0,0 +1,25 @@
package util
import (
"fmt"
"time"
)
const (
// TimeFormat .
TimeFormat = "2006-01-02 15:04:05"
)
// JSONTime .
type JSONTime time.Time
// MarshalJSON .
func (jt JSONTime) MarshalJSON() ([]byte, error) {
stamp := fmt.Sprintf("%q", time.Time(jt).Format(TimeFormat))
return []byte(stamp), nil
}
// Before .
func (jt JSONTime) Before(t time.Time) bool {
return time.Time(jt).Before(t)
}

View File

@@ -0,0 +1,41 @@
package(default_visibility = ["//visibility:public"])
load(
"@io_bazel_rules_go//go:def.bzl",
"go_test",
"go_library",
)
go_test(
name = "go_default_test",
srcs = ["rune_trie_test.go"],
embed = [":go_default_library"],
rundir = ".",
tags = ["automanaged"],
deps = ["//app/service/main/antispam/util:go_default_library"],
)
go_library(
name = "go_default_library",
srcs = [
"rune_trie.go",
"trie.go",
],
importpath = "go-common/app/service/main/antispam/util/trie",
tags = ["automanaged"],
visibility = ["//visibility:public"],
)
filegroup(
name = "package-srcs",
srcs = glob(["**"]),
tags = ["automanaged"],
visibility = ["//visibility:private"],
)
filegroup(
name = "all-srcs",
srcs = [":package-srcs"],
tags = ["automanaged"],
visibility = ["//visibility:public"],
)

View File

@@ -0,0 +1,120 @@
package trie
func (trie *RuneTrie) find(contents []rune, accu []rune) (string, interface{}) {
if trie.value != nil {
return string(accu), trie.value
}
if len(contents) == 0 {
return "", nil
}
k := contents[0]
tt, ok := trie.children[k]
if !ok {
return "", nil
}
return tt.find(contents[1:], append(accu, k))
}
func (trie *RuneTrie) Find(content string, accu string) (string, interface{}) {
rcontent := []rune(content)
if len(rcontent) == 0 {
return "", nil
}
k, v := trie.find(rcontent, []rune(accu))
if v == nil {
return trie.Find(string(rcontent[1:]), accu)
}
return k, v
}
/*
* the belowing codes come from "https://github.com/dghubble/trie"
*/
type RuneTrie struct {
value interface{}
children map[rune]*RuneTrie
}
func NewRuneTrie() *RuneTrie {
return &RuneTrie{
children: make(map[rune]*RuneTrie),
}
}
func (trie *RuneTrie) Get(key string) interface{} {
node := trie
for _, r := range key {
node = node.children[r]
if node == nil {
return nil
}
}
return node.value
}
func (trie *RuneTrie) Put(key string, value interface{}) bool {
node := trie
for _, r := range key {
child := node.children[r]
if child == nil {
child = NewRuneTrie()
node.children[r] = child
}
node = child
}
isNewVal := node.value == nil
node.value = value
return isNewVal
}
func (trie *RuneTrie) Delete(key string) bool {
path := make([]nodeRune, len(key))
node := trie
for i, r := range key {
path[i] = nodeRune{r: r, node: node}
node = node.children[r]
if node == nil {
return false
}
}
node.value = nil
if node.isLeaf() {
for i := len(key) - 1; i >= 0; i-- {
parent := path[i].node
r := path[i].r
delete(parent.children, r)
if parent.value != nil || !parent.isLeaf() {
break
}
}
}
return true
}
func (trie *RuneTrie) Walk(walker WalkFunc) error {
return trie.walk("", walker)
}
type nodeRune struct {
node *RuneTrie
r rune
}
func (trie *RuneTrie) walk(key string, walker WalkFunc) error {
if trie.value != nil {
walker(key, trie.value)
}
for r, child := range trie.children {
err := child.walk(key+string(r), walker)
if err != nil {
return err
}
}
return nil
}
func (trie *RuneTrie) isLeaf() bool {
return len(trie.children) == 0
}
type WalkFunc func(key string, value interface{}) error

View File

@@ -0,0 +1,108 @@
package trie
import (
"errors"
"math/rand"
"testing"
"time"
"go-common/app/service/main/antispam/util"
)
func TestRuneTrieAdd(t *testing.T) {
tr := NewRuneTrie()
tr.Put("jimmy", 1)
tr.Put("anny", 87)
tr.Put("xxxx", 23)
tr.Put("jim", 2)
if v := tr.Get("jimxx"); v != nil {
t.Errorf("expected nil, got %v", v)
}
if v := tr.Get("jimmy"); v.(int) != 1 {
t.Errorf("expected val, got %v", v)
}
if v := tr.Get("anny"); v.(int) != 87 {
t.Errorf("expected val, got %v", v)
}
if v := tr.Get("xxxx"); v.(int) != 23 {
t.Errorf("expected val, got %v", v)
}
if v := tr.Get("jim"); v.(int) != 2 {
t.Errorf("expected val, got %v", v)
}
}
func BenchmarkRuneTriePut(b *testing.B) {
rand.Seed(time.Now().UnixNano())
tr := NewRuneTrie()
for i := 0; i < b.N; i++ {
tr.Put(util.RandStr(10), 845)
}
}
func TestRuneTrieFind(t *testing.T) {
tr := NewRuneTrie()
tr.Put("我才是大佬", 2)
tr.Put("我才是大佬", 88)
tr.Put("mm", 1)
tr.Put("mmp", 2)
tr.Put("my name is jimmymmp", 2)
tr.Put("xxx", 88)
tr.Put("jimmy xxx, hhjhmmp", 2)
cases := []struct {
content string
expectKey string
expectValue int
}{
{
content: "mm",
expectKey: "mm",
expectValue: 1,
},
{
content: "m都xx发生地方范德萨发爱迪生刚发的否多少发生的否阿萨德否收到符文大师否xxxmy name is jimy, hhjhmp",
expectKey: "xxx",
expectValue: 88,
},
{
content: "m都mxx发生地方范德萨发爱迪生刚发的否多少发生的否阿萨德否收到符文大师否xxxmy name is jimy, hhjhmp",
expectKey: "xxx",
expectValue: 88,
},
{
content: "我才是大佬",
expectKey: "我才是大佬",
expectValue: 88,
},
}
for _, c := range cases {
t.Run("", func(t *testing.T) {
k, v := tr.Find(c.content, "")
if v == nil {
t.Fatal(errors.New("val is nil"))
}
if k != c.expectKey || v.(int) != c.expectValue {
t.Errorf("want key: %s, val:%v, got key:%s, val:%v", c.expectKey, c.expectValue, k, v)
}
})
}
}
//BenchmarkTrieListFind-4 50000 25347 ns/op
func BenchmarkRuneTrieFind(b *testing.B) {
rand.Seed(time.Now().UnixNano())
tr := NewRuneTrie()
for i := 0; i < b.N; i++ {
tr.Put(util.RandStr(10), i)
}
tr.Put("地方考虑saDFFDSALK", 8888)
tr.Put("都说了开发贷款", 7512)
for i := 0; i < b.N; i++ {
tr.Find("dfa啥都发生地方的施工费按发的噶是打发士大夫撒旦噶尔尕热狗怕的是结果来看砥节奉公来人速度感而过;sfdsfas fsadf asd fsad f asd都说了sdfs gfdgd jimmy开发速度来发噶都说了开发贷款时间范德萨了空间发的是 jimmy按时到路口发生撒地方考虑saDFFDSALKFDFASDFASDFSDFSADFRGEWTRETGERG", "")
}
}

View File

@@ -0,0 +1,11 @@
package trie
type Trier interface {
Put(key string, value interface{}) bool
Find(in string, acc string) (key string, value interface{})
Get(key string) interface{}
}
func New() Trier {
return NewRuneTrie()
}